diff --git a/app/constants.js b/app/constants.js new file mode 100644 index 000000000..46116ac8b --- /dev/null +++ b/app/constants.js @@ -0,0 +1,30 @@ +export const UI_SETTINGS_CONTROL_ID = { + STREAM_MODE: 'stream_mode', + HW_PROFILE: 'hw_encoder_profile', + GOP: 'gop', + VIDEO_STREAM_QUALITY: 'video_stream_quality', + PRESET: 'preset', +}; + +export const UI_SETTINGS_STREAM_MODE_QUALITY_SETTINGS_GROUPS = { + IMAGE_GROUP: 'noVNC_setting_render_mode_image_quality_group', + VIDEO_GROUP: 'noVNC_setting_render_mode_video_quality_group' +}; + +export const UI_SETTING_PROFILE_OPTIONS = { + MAIN: 0, + BASELINE: 1, + HIGH: 2 +}; + +export const UI_FPS_CHART = { + WIDTH: 200, + HEIGHT: 50, + MAX_POINTS: 60, + MAX_FPS_VALUE: 120 +}; + +export const FPS = { + MIN: 24, + MAX: 60 +}; \ No newline at end of file diff --git a/app/styles/base.css b/app/styles/base.css index 2274acec8..a5afc8ef9 100644 --- a/app/styles/base.css +++ b/app/styles/base.css @@ -352,6 +352,18 @@ select:active { background: #9fa5a2d4; color: #00ffa2d4; visibility: hidden; + z-index: 1000; +} + +#noVNC_fps_chart { + top: 40px; + left: auto; + right: 0; + position: fixed; + background: #9fa5a2d4; + z-index: 1000; + visibility: hidden; + display: flex; } /* ---------------------------------------- diff --git a/app/ui.js b/app/ui.js index 421e72691..68631ac39 100644 --- a/app/ui.js +++ b/app/ui.js @@ -6,6 +6,7 @@ * * See README.md for usage and integration instructions. */ + window._noVNC_has_module_support = true; window.addEventListener("load", function() { if (window._noVNC_has_module_support) return; @@ -36,12 +37,17 @@ import { isTouchDevice, isSafari, hasScrollbarGutter, dragThreshold, supportsBin from '../core/util/browser.js'; import { setCapture, getPointerEvent } from '../core/util/events.js'; import KeyTable from "../core/input/keysym.js"; -import keysyms from "../core/input/keysymdef.js"; -import Keyboard from "../core/input/keyboard.js"; import RFB from "../core/rfb.js"; import { MouseButtonMapper, XVNC_BUTTONS } from "../core/mousebuttonmapper.js"; import * as WebUtil from "./webutil.js"; import { uuidv4 } from '../core/util/strings.js'; +import { + UI_SETTINGS_STREAM_MODE_QUALITY_SETTINGS_GROUPS, + UI_SETTINGS_CONTROL_ID as UI_SETTINGS, + UI_FPS_CHART, FPS +} from './constants.js'; +import {encodings} from "../core/encodings.js"; +import CodecDetector, {CODEC_VARIANT_NAMES, preferredCodecs} from "../core/codecs"; const PAGE_TITLE = "KasmVNC"; @@ -73,22 +79,33 @@ const UI = { currentDisplay: null, displayWindows: new Map([['primary', 'primary']]), registeredWindows: new Map([['primary', 'primary']]), + fpsChartTicks: [], monitorDragOk: false, monitorStartX: 0, monitorStartY: 0, supportsBroadcastChannel: (typeof BroadcastChannel !== "undefined"), + codecDetector: null, + forcedCodecs: [], - prime() { - return WebUtil.initSettings().then(() => { - if (document.readyState === "interactive" || document.readyState === "complete") { - return UI.start(); - } + prime: async () => { + await WebUtil.initSettings(); + try { + const detector = await (new CodecDetector()).detect(); + UI.codecDetector = detector; - return new Promise((resolve, reject) => { - document.addEventListener('DOMContentLoaded', () => UI.start().then(resolve).catch(reject)); - }); + Log.Debug('Supported Codecs: ', detector.getSupportedCodecs()); + } catch (e) { + Log.Warn('Failed to detect codecs: ', e); + } + + if (document.readyState === "interactive" || document.readyState === "complete") { + return UI.start(); + } + + return new Promise((resolve, reject) => { + document.addEventListener('DOMContentLoaded', () => UI.start().then(resolve).catch(reject)); }); }, @@ -150,11 +167,9 @@ const UI = { UI.addSettingsHandlers(); UI.addDisplaysHandler(); // UI.addMultiMonitorAddHandler(); - document.getElementById("noVNC_status") - .addEventListener('click', UI.hideStatus); + document.getElementById("noVNC_status").addEventListener('click', UI.hideStatus); UI.openControlbar(); - UI.updateVisualState('init'); document.documentElement.classList.remove("noVNC_loading"); @@ -184,8 +199,23 @@ const UI = { } }); + window.addEventListener("beforeunload", (e) => { + // Clean up secondary display connection before window closes + const urlParams = new URLSearchParams(window.location.search); + const windowId = urlParams.get('windowId'); + + if (UI.rfb && windowId) { + // This is a secondary display - unregister it without disconnecting main session + UI.rfb._unregisterSecondaryDisplay(); + } + }); + window.addEventListener("unload", (e) => { - if (UI.rfb) { + // Only disconnect main window (without windowId parameter) + const urlParams = new URLSearchParams(window.location.search); + const windowId = urlParams.get('windowId'); + + if (UI.rfb && !windowId) { UI.disconnect(); } }); @@ -268,7 +298,9 @@ const UI = { UI.initSetting('video_scaling', 2); UI.initSetting('max_video_resolution_x', 960); UI.initSetting('max_video_resolution_y', 540); - UI.initSetting('framerate', 30); + UI.initSetting('framerate', FPS.MIN); + UI.initSetting('framerate_image_mode', FPS.MIN); + UI.initSetting('framerate_video_mode', FPS.MIN); UI.initSetting('compression', 2); UI.initSetting('shared', true); UI.initSetting('view_only', false); @@ -286,6 +318,13 @@ const UI = { UI.initSetting('enable_ime', false); UI.initSetting('enable_webrtc', false); UI.initSetting('enable_hidpi', false); + + UI.initSetting(UI_SETTINGS.STREAM_MODE, encodings.pseudoEncodingStreamingModeJpegWebp); + // UI.initSetting(UI_SETTINGS.HW_PROFILE, UI_SETTING_PROFILE_OPTIONS.BASELINE); + UI.initSetting(UI_SETTINGS.GOP, this.getSetting('framerate')); + UI.initSetting(UI_SETTINGS.VIDEO_STREAM_QUALITY, 43); + // UI.initSetting(UI_SETTINGS.PRESET, 3); + UI.toggleKeyboardControls(); if ((WebUtil.isInsideKasmVDI()) && (! WebUtil.getConfigVar('show_control_bar'))) { @@ -317,6 +356,19 @@ const UI = { UI.setupSettingLabels(); UI.updateQuality(); + + // VDI setting + let val = WebUtil.getConfigVar('kasmvnc_mode_preference'); + if (val === 'image') { + UI.forcedCodecs = [encodings.pseudoEncodingStreamingModeJpegWebp]; + Log.Debug('VDI setting: image'); + return; + } + + if (val != null) { + UI.forcedCodecs = val.split('|').map(Number); + Log.Debug('VDI setting: ' + UI.forcedCodecs); + } }, initMouseButtonMapper() { const mouseButtonMapper = new MouseButtonMapper(); @@ -542,6 +594,10 @@ const UI = { settingElem.addEventListener('change', changeFunc); }, + addSettingChangeHandlerByName(name) { + this.addSettingChangeHandler(name, UI.updatePropertyName(name)); + }, + addSettingsHandlers() { UI.addClickHandle('noVNC_settings_button', UI.toggleSettingsPanel); @@ -583,8 +639,18 @@ const UI = { UI.addSettingChangeHandler('max_video_resolution_x', UI.updateQuality); UI.addSettingChangeHandler('max_video_resolution_y'); UI.addSettingChangeHandler('max_video_resolution_y', UI.updateQuality); - UI.addSettingChangeHandler('framerate'); - UI.addSettingChangeHandler('framerate', UI.updateQuality); + UI.addSettingChangeHandler('framerate_image_mode', () => { + const settingElem = UI.getSettingElement('framerate_image_mode'); + UI.getSettingElement('framerate_streaming_mode').value = settingElem.value; + WebUtil.writeSetting('framerate', settingElem.value); + UI.updateQuality(); + }); + UI.addSettingChangeHandler('framerate_streaming_mode', () => { + const settingElem = UI.getSettingElement('framerate_streaming_mode'); + UI.getSettingElement('framerate_image_mode').value = settingElem.value; + WebUtil.writeSetting('framerate', settingElem.value); + UI.updateQuality(); + }); UI.addSettingChangeHandler('compression'); UI.addSettingChangeHandler('compression', UI.updateCompression); UI.addSettingChangeHandler('view_clip'); @@ -617,6 +683,12 @@ const UI = { UI.addSettingChangeHandler('enable_hidpi', UI.enableHiDpi); UI.addSettingChangeHandler('enable_threading'); UI.addSettingChangeHandler('enable_threading', UI.threading); + + UI.addSettingChangeHandler(UI_SETTINGS.STREAM_MODE, UI.streamMode); + // UI.addSettingChangeHandlerByName(UI_SETTINGS.HW_PROFILE); + UI.addSettingChangeHandlerByName(UI_SETTINGS.GOP); + UI.addSettingChangeHandlerByName(UI_SETTINGS.VIDEO_STREAM_QUALITY); + // UI.addSettingChangeHandlerByName(UI_SETTINGS.PRESET); }, addFullscreenHandlers() { @@ -740,11 +812,21 @@ const UI = { }, showStats() { + // Clear any existing interval first + if (UI.statsInterval) { + clearInterval(UI.statsInterval); + UI.statsInterval = null; + } + + // Read checkbox state directly and save it + const perfStatsToggle = document.getElementById('noVNC_setting_enable_perf_stats'); + const enable_stats = perfStatsToggle ? perfStatsToggle.checked : false; + // WebUtil.writeSetting('enable_perf_stats', enable_stats); UI.saveSetting('enable_perf_stats'); - let enable_stats = UI.getSetting('enable_perf_stats'); - if (enable_stats === true && UI.statsInterval == undefined) { + if (enable_stats) { document.getElementById("noVNC_connection_stats").style.visibility = "visible"; + document.getElementById("noVNC_fps_chart").style.visibility = 'visible'; UI.statsInterval = setInterval(function() { if (UI.rfb !== undefined) { UI.rfb.requestBottleneckStats(); @@ -752,9 +834,8 @@ const UI = { } , 5000); } else { document.getElementById("noVNC_connection_stats").style.visibility = "hidden"; - UI.statsInterval = null; + document.getElementById("noVNC_fps_chart").style.visibility = 'hidden'; } - }, threading() { @@ -768,6 +849,166 @@ const UI = { UI.saveSetting('enable_threading'); }, + updatePropertyName(propertyName) { + return UI.updateRfbProperty(propertyName, propertyName); + }, + + updateRfbProperty(propertyName, settingId) { + return (event) => { + if (UI.rfb) { + UI.rfb [propertyName] = Number(event.target.value); + } + UI.saveSetting(settingId); + + UI.updateQuality(); + } + }, + + gop(event) { + if (UI.rfb) { + UI.rfb.gop = Number(event.target.value); + } + UI.saveSetting(UI_SETTINGS.GOP); + }, + + videoStreamQuality(event) { + if (UI.rfb) { + UI.rfb.videoStreamQuality = Number(event.target.value); + } + UI.saveSetting(UI_SETTINGS.VIDEO_STREAM_QUALITY); + }, + + qualityPreset(event) { + if (UI.rfb) { + UI.rfb.qualityPreset = Number(event.target.value); + } + UI.saveSetting(UI_SETTINGS.PRESET); + }, + + streamMode(event) { + const value = Number(event.target.value); + UI.toggleStreamModeGroupVisibility(value); + + if (value !== encodings.pseudoEncodingStreamingModeJpegWebp) { + const config = event.configuration ? event.configuration : UI.rfb.videoCodecConfigurations[value]; + UI.updateQualitySliderRange(value, config); + } + + UI.updatePropertyName(UI_SETTINGS.STREAM_MODE); + UI.saveSetting(UI_SETTINGS.STREAM_MODE); + UI.updateQuality(); + + if (value === encodings.pseudoEncodingStreamingModeJpegWebp) { + UI.rfb._requestFullRefresh(); + } + }, + + initStreamModeSetting(codecs, configurations) { + const streamModeElem = UI.getSettingElement(UI_SETTINGS.STREAM_MODE); + if (!streamModeElem) + return; + + streamModeElem.innerHTML = ""; + + // Always include the JPEG/WEBP image mode (fallback) + const fallbackOption = { + id: encodings.pseudoEncodingStreamingModeJpegWebp, + label: "JPEG/WEBP (Images)" + }; + const availableModes = [fallbackOption]; + + const codecsAvailable = this.getAvailableStreamingModes(codecs); + availableModes.push(...codecsAvailable); + + const previousValue = Number(UI.getSetting(UI_SETTINGS.STREAM_MODE)); + const selectedValue = this.getBestStreamingMode(availableModes, fallbackOption, previousValue); + + availableModes.sort((a, b) => b.id - a.id).forEach(option => { + UI.addOption(streamModeElem, option.label, option.id); + }); + + streamModeElem.value = selectedValue; + + const config = configurations?.[selectedValue]; + UI.streamMode({target: streamModeElem, configuration: config}); + UI.sendMessage("update_codecs", {current: streamModeElem.value, codecs: availableModes}); + }, + + updateQualitySliderRange(codecId, config) { + const qualitySlider = UI.getSettingElement(UI_SETTINGS.VIDEO_STREAM_QUALITY); + if (!qualitySlider) return; + + if (!config) { + qualitySlider.min = 1; + qualitySlider.max = 63; + + return; + } + + qualitySlider.min = config.minQuality; + qualitySlider.max = config.maxQuality; + + const currentValue = parseInt(qualitySlider.value); + if (currentValue < config.minQuality) { + qualitySlider.value = config.minQuality; + } else if (currentValue > config.maxQuality) { + qualitySlider.value = config.maxQuality; + } + + const output = document.getElementById('noVNC_setting_video_stream_quality_output'); + if (output) { + output.value = qualitySlider.value; + } + + // Save updated value if it changed + if (currentValue !== parseInt(qualitySlider.value)) { + UI.saveSetting(UI_SETTINGS.VIDEO_STREAM_QUALITY); + } + }, + + getAvailableStreamingModes(codecs) { + let result = []; + if (!Array.isArray(codecs) || codecs.length === 0) + return result; + + const forcedCodecs = UI.forcedCodecs; + codecs = forcedCodecs.length > 0 + ? forcedCodecs.filter(id => codecs.includes(id)) + : codecs; + + const codecTuples = codecs.map((id) => { + const label = CODEC_VARIANT_NAMES[id] ? CODEC_VARIANT_NAMES[id] : `Codec ${id}`; + return {id, label}; + }); + + result.push(...codecTuples); + + return result; + }, + + getBestStreamingMode(availableModes, fallbackOption, previousValue) { + let result = fallbackOption.id; + if (UI.forcedCodecs.length > 0) { + const forcedMode = UI.forcedCodecs.find(id => availableModes.some(option => option.id === id)); + return forcedMode !== undefined ? forcedMode : fallbackOption.id; + } + + // Restore selection if possible; otherwise default to JPEG/WEBP + const hasPrevious = availableModes.some(option => option.id === previousValue); + + const availableIds = availableModes.map(option => option.id); + const preferredMatch = preferredCodecs.filter(c => availableIds.includes(c)); + result = hasPrevious ? previousValue : encodings.pseudoEncodingStreamingModeJpegWebp; + + if (preferredMatch.length > 0) { + if (result === encodings.pseudoEncodingStreamingModeJpegWebp) { + result = Math.min(...preferredMatch); + } + } + + return result; + }, + showStatus(text, statusType, time, kasm = false) { // If inside the full Kasm CDI framework, don't show messages unless explicitly told to if (WebUtil.isInsideKasmVDI() && !kasm) { @@ -911,6 +1152,18 @@ const UI = { UI.controlbarDrag = true; }, + toggleStreamModeGroupVisibility(streamModeValue) { + const isImageGroupVisible = streamModeValue === encodings.pseudoEncodingStreamingModeJpegWebp; + const imageGroup = document.getElementById(UI_SETTINGS_STREAM_MODE_QUALITY_SETTINGS_GROUPS.IMAGE_GROUP); + const videoGroup = document.getElementById(UI_SETTINGS_STREAM_MODE_QUALITY_SETTINGS_GROUPS.VIDEO_GROUP); + if (imageGroup) { + imageGroup.style.display = isImageGroupVisible ? 'block' : 'none'; + } + if (videoGroup) { + videoGroup.style.display = !isImageGroupVisible ? 'block' : 'none'; + } + }, + showControlbarHint(show) { const hint = document.getElementById('noVNC_control_bar_hint'); if (show) { @@ -1124,11 +1377,12 @@ const UI = { // Update cookie and form control setting. If value is not set, then // updates from control to current cookie setting. updateSetting(name) { - // Update the settings control let value = UI.getSetting(name); const ctrl = document.getElementById('noVNC_setting_' + name); + if (!ctrl) return; + if (ctrl.type === 'checkbox') { ctrl.checked = value; @@ -1152,6 +1406,7 @@ const UI = { // Save control setting to cookie saveSetting(name) { const ctrl = document.getElementById('noVNC_setting_' + name); + if (!ctrl) return; let val; if (ctrl.type === 'checkbox') { val = ctrl.checked; @@ -1170,7 +1425,7 @@ const UI = { const ctrl = document.getElementById('noVNC_setting_' + name); let val = WebUtil.readSetting(name); - if (val != null && ctrl.type === 'checkbox') { + if (val != null && ctrl?.type === 'checkbox') { const str = String(val).toLowerCase(); const falseStrings = [ '0', 'no', 'false']; if (falseStrings.includes(str)) { @@ -1183,6 +1438,10 @@ const UI = { return val; }, + getSettingElement(name) { + return document.getElementById('noVNC_setting_' + name); + }, + // These helpers compensate for the lack of parent-selectors and // previous-sibling-selectors in CSS which are needed when we want to // disable the labels that belong to disabled input elements. @@ -1247,7 +1506,7 @@ const UI = { UI.updateSetting('video_scaling', 2); UI.updateSetting('max_video_resolution_x', 960); UI.updateSetting('max_video_resolution_y', 540); - UI.updateSetting('framerate', 30); + UI.updateSetting('framerate', FPS.MIN); UI.updateSetting('compression'); UI.updateSetting('shared'); UI.updateSetting('view_only'); @@ -1379,13 +1638,48 @@ const UI = { } }, - //recieved bottleneck stats + generateFpsChartPath() { + if (this.fpsChartTicks.length === 0) { + return ''; + } + + const stepX = UI_FPS_CHART.WIDTH / (UI_FPS_CHART.MAX_POINTS - 1); + const scaleY = UI_FPS_CHART.HEIGHT / UI_FPS_CHART.MAX_FPS_VALUE; + + let d = `M 0 ${UI_FPS_CHART.HEIGHT}`; + + for (let i = 0; i < UI.fpsChartTicks.length; i++) { + const x = i * stepX; + const y = UI_FPS_CHART.HEIGHT - UI.fpsChartTicks[i] * scaleY; + d += ` L ${x} ${y}`; + } + + + d += ` L ${(UI.fpsChartTicks.length - 1) * stepX} ${UI_FPS_CHART.HEIGHT} L 0 ${UI_FPS_CHART.HEIGHT} Z`; + + return d; + }, + + updateFpsChart(fpsValue) { + UI.fpsChartTicks.push(fpsValue); + + if (UI.fpsChartTicks.length > UI_FPS_CHART.MAX_POINTS) { + UI.fpsChartTicks.shift(); + } + const path = document.getElementById('noVNC_fps_chart_path'); + if (path) { + path.setAttribute('d', UI.generateFpsChartPath()); + } + }, + + //received bottleneck stats bottleneckStatsRecieve(e) { if (UI.rfb) { try { let obj = JSON.parse(e.detail.text); let fps = UI.rfb.statsFps; document.getElementById("noVNC_connection_stats").innerHTML = "CPU: " + obj[0] + "/" + obj[1] + " | Network: " + obj[2] + "/" + obj[3] + " | FPS: " + UI.rfb.statsFps + " Dropped FPS: " + UI.rfb.statsDroppedFps; + UI.updateFpsChart(Number(fps)); console.log(e.detail.text); } catch (err) { console.log('Invalid bottleneck stats recieved from server.') @@ -1413,6 +1707,32 @@ const UI = { Log.Debug("<< UI.clipboardSend"); }, + setConnectionQualityValues() { + UI.rfb.qualityLevel = parseInt(UI.getSetting('quality')); + UI.rfb.antiAliasing = parseInt(UI.getSetting('anti_aliasing')); + UI.rfb.dynamicQualityMin = parseInt(UI.getSetting('dynamic_quality_min')); + UI.rfb.dynamicQualityMax = parseInt(UI.getSetting('dynamic_quality_max')); + UI.rfb.jpegVideoQuality = parseInt(UI.getSetting('jpeg_video_quality')); + UI.rfb.webpVideoQuality = parseInt(UI.getSetting('webp_video_quality')); + UI.rfb.videoArea = parseInt(UI.getSetting('video_area')); + UI.rfb.videoTime = parseInt(UI.getSetting('video_time')); + UI.rfb.videoOutTime = parseInt(UI.getSetting('video_out_time')); + UI.rfb.videoScaling = parseInt(UI.getSetting('video_scaling')); + UI.rfb.treatLossless = parseInt(UI.getSetting('treat_lossless')); + UI.rfb.maxVideoResolutionX = parseInt(UI.getSetting('max_video_resolution_x')); + UI.rfb.maxVideoResolutionY = parseInt(UI.getSetting('max_video_resolution_y')); + UI.rfb.frameRate = parseInt(UI.getSetting('framerate')); + UI.rfb.enableWebP = UI.getSetting('enable_webp'); + UI.rfb.videoQuality = parseInt(UI.getSetting('video_quality')); + UI.rfb.enableHiDpi = UI.getSetting('enable_hidpi'); + UI.rfb.threading = UI.getSetting('enable_threading'); + + UI.rfb.streamMode = parseInt(UI.getSetting(UI_SETTINGS.STREAM_MODE)); + // UI.rfb.hwEncoderProfile = parseInt(UI.getSetting(UI_SETTINGS.HW_PROFILE)); + UI.rfb.gop = parseInt(UI.getSetting(UI_SETTINGS.GOP)); + UI.rfb.videoStreamQuality = parseInt(UI.getSetting(UI_SETTINGS.VIDEO_STREAM_QUALITY)); + }, + /* ------^------- * /CLIPBOARD * ============== @@ -1473,6 +1793,7 @@ const UI = { repeaterID: UI.getSetting('repeaterID'), credentials: { password: password } }, + UI.codecDetector?.getSupportedCodecIds(), true ); UI.rfb.addEventListener("connect", UI.connectFinished); UI.rfb.addEventListener("disconnect", UI.disconnectFinished); @@ -1488,41 +1809,32 @@ const UI = { UI.rfb.addEventListener("screenregistered", UI.screenRegistered); UI.rfb.addEventListener("sharedSessionUserJoin", UI.sharedSessionUserJoin); UI.rfb.addEventListener("sharedSessionUserLeft", UI.sharedSessionUserLeft); + UI.rfb.addEventListener("videocodecschange", (e) => { + UI.initStreamModeSetting(e.detail?.codecs, e.detail?.configurations); + }); + UI.rfb.translateShortcuts = UI.getSetting('translate_shortcuts'); UI.rfb.clipViewport = UI.getSetting('view_clip'); UI.rfb.scaleViewport = UI.getSetting('resize') === 'scale'; UI.rfb.resizeSession = UI.getSetting('resize') === 'remote'; - UI.rfb.qualityLevel = parseInt(UI.getSetting('quality')); - UI.rfb.dynamicQualityMin = parseInt(UI.getSetting('dynamic_quality_min')); - UI.rfb.dynamicQualityMax = parseInt(UI.getSetting('dynamic_quality_max')); - UI.rfb.jpegVideoQuality = parseInt(UI.getSetting('jpeg_video_quality')); - UI.rfb.webpVideoQuality = parseInt(UI.getSetting('webp_video_quality')); - UI.rfb.videoArea = parseInt(UI.getSetting('video_area')); - UI.rfb.videoTime = parseInt(UI.getSetting('video_time')); - UI.rfb.videoOutTime = parseInt(UI.getSetting('video_out_time')); - UI.rfb.videoScaling = parseInt(UI.getSetting('video_scaling')); - UI.rfb.treatLossless = parseInt(UI.getSetting('treat_lossless')); - UI.rfb.maxVideoResolutionX = parseInt(UI.getSetting('max_video_resolution_x')); - UI.rfb.maxVideoResolutionY = parseInt(UI.getSetting('max_video_resolution_y')); - UI.rfb.frameRate = parseInt(UI.getSetting('framerate')); + + UI.setConnectionQualityValues(); + UI.rfb.compressionLevel = parseInt(UI.getSetting('compression')); UI.rfb.showDotCursor = UI.getSetting('show_dot'); UI.rfb.idleDisconnect = UI.getSetting('idle_disconnect'); UI.rfb.pointerRelative = UI.getSetting('pointer_relative'); - UI.rfb.videoQuality = parseInt(UI.getSetting('video_quality')); - UI.rfb.antiAliasing = UI.getSetting('anti_aliasing'); UI.rfb.clipboardUp = UI.getSetting('clipboard_up'); UI.rfb.clipboardDown = UI.getSetting('clipboard_down'); UI.rfb.clipboardSeamless = UI.getSetting('clipboard_seamless'); UI.rfb.keyboard.enableIME = UI.getSetting('enable_ime'); UI.rfb.clipboardBinary = supportsBinaryClipboard() && UI.rfb.clipboardSeamless; UI.rfb.enableWebRTC = UI.getSetting('enable_webrtc'); - UI.rfb.enableHiDpi = UI.getSetting('enable_hidpi'); - UI.rfb.threading = UI.getSetting('enable_threading'); UI.rfb.mouseButtonMapper = UI.initMouseButtonMapper(); + // UI.rfb.qualityPreset = UI.getSetting(UI_SETTINGS.PRESET); if (UI.rfb.videoQuality === 5) { UI.rfb.enableQOI = true; - } + } //Only explicitly request permission to clipboard on browsers that support binary clipboard access if (supportsBinaryClipboard()) { @@ -1537,7 +1849,6 @@ const UI = { UI.rfb.clipboardSeamless = false; } UI.rfb.preferLocalCursor = UI.getSetting('prefer_local_cursor'); - UI.rfb.enableWebP = UI.getSetting('enable_webp'); UI.updateViewOnly(); // requires UI.rfb /**** @@ -1735,13 +2046,29 @@ const UI = { } break; case 'setvideoquality': + let value; + + if (event.data.qualityLevel !== undefined) { + value = parseInt(event.data.qualityLevel); + } else if (event.data.value !== undefined) { + value = parseInt(event.data.value); + } else { + Log.Error("Invalid message received from parent window: " + event.data.action); + break; + } + + const streamMode = parseInt(UI.getSetting(UI_SETTINGS.STREAM_MODE)); + const isJpegWebp = streamMode === encodings.pseudoEncodingStreamingModeJpegWebp; + const settingKey = isJpegWebp ? 'video_quality' : UI_SETTINGS.VIDEO_STREAM_QUALITY; + const settingValue = isJpegWebp ? value : UI.rfb.videoCodecConfigurations[streamMode].presets[value]; + + UI.forceSetting(settingKey, settingValue, false); + if (event.data.qualityLevel !== undefined) { //apply preset mode values, but don't apply to connection - UI.forceSetting('video_quality', parseInt(event.data.qualityLevel), false); // apply quality preset quality level and override some settings (fps) UI.updateQuality(event.data.frameRate); } else { - UI.forceSetting('video_quality', parseInt(event.data.value), false); UI.updateQuality(); } break; @@ -1850,6 +2177,19 @@ const UI = { UI.rfb.terminate(); } break; + case 'set_streaming_mode': + let mode = encodings.pseudoEncodingStreamingModeJpegWebp; + if (event.data.value !== 'image') { + mode = parseInt(event.data.value); + } + + UI.forceSetting(UI_SETTINGS.STREAM_MODE, mode, false); + UI.updateQuality(); + break; + case 'set_gop': + UI.forceSetting(UI_SETTINGS.GOP, parseInt(event.data.value), false); + UI.updateQuality(); + break; } } @@ -2524,6 +2864,15 @@ const UI = { updateQuality(fps) { let present_mode = parseInt(UI.getSetting('video_quality')); let enable_qoi = false; + const imageMode = parseInt(UI.getSetting(UI_SETTINGS.STREAM_MODE)) === encodings.pseudoEncodingStreamingModeJpegWebp; + + const forceFramerate = (fps) => { + if (imageMode) { + UI.forceSetting('framerate_image_mode', fps); + UI.forceSetting('framerate_streaming_mode', fps, false); + WebUtil.writeSetting('framerate', fps); + } + }; // video_quality preset values switch (present_mode) { @@ -2537,16 +2886,16 @@ const UI = { UI.enableSetting('max_video_resolution_y'); UI.enableSetting('jpeg_video_quality'); UI.enableSetting('webp_video_quality'); - UI.enableSetting('framerate'); + UI.enableSetting('framerate_image_mode'); UI.enableSetting('video_scaling'); UI.enableSetting('video_out_time'); break; case 5: //lossless enable_qoi = true; - fps = (fps && Number.isFinite(fps)) ? fps : 60; + fps = (fps && Number.isFinite(fps)) ? fps : FPS.MAX; UI.forceSetting('dynamic_quality_min', 9); UI.forceSetting('dynamic_quality_max', 9); - UI.forceSetting('framerate', fps); + forceFramerate(fps); UI.forceSetting('treat_lossless', 9); UI.forceSetting('video_time', 100); UI.forceSetting('video_area', 100); @@ -2558,10 +2907,10 @@ const UI = { UI.forceSetting('video_out_time', 3); break; case 4: //extreme - fps = (fps && Number.isFinite(fps)) ? fps : 60; + fps = (fps && Number.isFinite(fps)) ? fps : FPS.MAX; UI.forceSetting('dynamic_quality_min', 8); UI.forceSetting('dynamic_quality_max', 9); - UI.forceSetting('framerate', fps); + forceFramerate(fps); UI.forceSetting('treat_lossless', 9); UI.forceSetting('video_time', 100); UI.forceSetting('video_area', 100); @@ -2573,14 +2922,14 @@ const UI = { UI.forceSetting('video_out_time', 3); break; case 3: // high - fps = (fps && Number.isFinite(fps)) ? fps : 60; + fps = (fps && Number.isFinite(fps)) ? fps : FPS.MAX; UI.forceSetting('jpeg_video_quality', 8); UI.forceSetting('webp_video_quality', 8); UI.forceSetting('dynamic_quality_min', 7); UI.forceSetting('dynamic_quality_max', 9); UI.forceSetting('max_video_resolution_x', 1920); UI.forceSetting('max_video_resolution_y', 1080); - UI.forceSetting('framerate', fps); + forceFramerate(fps); UI.forceSetting('treat_lossless', 8); UI.forceSetting('video_time', 5); UI.forceSetting('video_area', 65); @@ -2588,14 +2937,14 @@ const UI = { UI.forceSetting('video_out_time', 3); break; case 1: // low, resolution capped at 720p keeping aspect ratio - fps = (fps && Number.isFinite(fps)) ? fps : 24; + fps = (fps && Number.isFinite(fps)) ? fps : FPS.MIN; UI.forceSetting('jpeg_video_quality', 5); UI.forceSetting('webp_video_quality', 4); UI.forceSetting('dynamic_quality_min', 3); UI.forceSetting('dynamic_quality_max', 7); UI.forceSetting('max_video_resolution_x', 960); UI.forceSetting('max_video_resolution_y', 540); - UI.forceSetting('framerate', fps); + forceFramerate(fps); UI.forceSetting('treat_lossless', 7); UI.forceSetting('video_time', 5); UI.forceSetting('video_area', 65); @@ -2605,42 +2954,28 @@ const UI = { case 2: // medium case 0: // static resolution, but same settings as medium default: - fps = (fps && Number.isFinite(fps)) ? fps : 24; - UI.forceSetting('jpeg_video_quality', 7); - UI.forceSetting('webp_video_quality', 7); - UI.forceSetting('dynamic_quality_min', 4); - UI.forceSetting('dynamic_quality_max', 9); - UI.forceSetting('max_video_resolution_x', 960); - UI.forceSetting('max_video_resolution_y', 540); - UI.forceSetting('framerate', (fps) ? fps : 24); - UI.forceSetting('treat_lossless', 7); - UI.forceSetting('video_time', 5); - UI.forceSetting('video_area', 65); - UI.forceSetting('video_scaling', 0); - UI.forceSetting('video_out_time', 3); + if (imageMode) { + fps = (fps && Number.isFinite(fps)) ? fps : FPS.MIN; + UI.forceSetting('jpeg_video_quality', 7); + UI.forceSetting('webp_video_quality', 7); + UI.forceSetting('dynamic_quality_min', 4); + UI.forceSetting('dynamic_quality_max', 9); + UI.forceSetting('max_video_resolution_x', 960); + UI.forceSetting('max_video_resolution_y', 540); + forceFramerate((fps) ? fps : FPS.MIN); + UI.forceSetting('treat_lossless', 7); + UI.forceSetting('video_time', 5); + UI.forceSetting('video_area', 65); + UI.forceSetting('video_scaling', 0); + UI.forceSetting('video_out_time', 3); + } break; } if (UI.rfb) { - UI.rfb.qualityLevel = parseInt(UI.getSetting('quality')); - UI.rfb.antiAliasing = parseInt(UI.getSetting('anti_aliasing')); - UI.rfb.dynamicQualityMin = parseInt(UI.getSetting('dynamic_quality_min')); - UI.rfb.dynamicQualityMax = parseInt(UI.getSetting('dynamic_quality_max')); - UI.rfb.jpegVideoQuality = parseInt(UI.getSetting('jpeg_video_quality')); - UI.rfb.webpVideoQuality = parseInt(UI.getSetting('webp_video_quality')); - UI.rfb.videoArea = parseInt(UI.getSetting('video_area')); - UI.rfb.videoTime = parseInt(UI.getSetting('video_time')); - UI.rfb.videoOutTime = parseInt(UI.getSetting('video_out_time')); - UI.rfb.videoScaling = parseInt(UI.getSetting('video_scaling')); - UI.rfb.treatLossless = parseInt(UI.getSetting('treat_lossless')); - UI.rfb.maxVideoResolutionX = parseInt(UI.getSetting('max_video_resolution_x')); - UI.rfb.maxVideoResolutionY = parseInt(UI.getSetting('max_video_resolution_y')); - UI.rfb.frameRate = parseInt(UI.getSetting('framerate')); - UI.rfb.enableWebP = UI.getSetting('enable_webp'); - UI.rfb.videoQuality = parseInt(UI.getSetting('video_quality')); + UI.setConnectionQualityValues(); + UI.rfb.enableQOI = enable_qoi; - UI.rfb.enableHiDpi = UI.getSetting('enable_hidpi'); - UI.rfb.threading = UI.getSetting('enable_threading'); // Gracefully update settings server side UI.rfb.updateConnectionSettings(); diff --git a/app/ui_screen.js b/app/ui_screen.js index af1a876d7..5ac6dc7bd 100644 --- a/app/ui_screen.js +++ b/app/ui_screen.js @@ -20,9 +20,14 @@ const UI = { //Render default UI start() { - window.addEventListener("unload", (e) => { - if (UI.rfb) { - UI.disconnect(); + window.addEventListener("beforeunload", (e) => { + // Clean up secondary display connection before window closes + const urlParams = new URLSearchParams(window.location.search); + const windowId = urlParams.get('windowId'); + + if (UI.rfb && windowId) { + // This is a secondary display - unregister it without disconnecting main session + UI.rfb._unregisterSecondaryDisplay(); } }); @@ -132,7 +137,6 @@ const UI = { }, connect() { - let details = null const initialAutoPlacementValue = window.localStorage.getItem('autoPlacement') if (initialAutoPlacementValue === null) { @@ -146,16 +150,16 @@ const UI = { UI.rfb = new RFB(document.getElementById('noVNC_container'), document.getElementById('noVNC_keyboardinput'), "", //URL - { + { shared: UI.getSetting('shared', true), repeaterID: UI.getSetting('repeaterID', false), credentials: { password: null }, hiDpi: UI.getSetting('enable_hidpi', true, false) }, + null, false // Not a primary display ); } - UI.rfb.addEventListener("connect", UI.connectFinished); //UI.rfb.addEventListener("disconnect", UI.disconnectFinished); @@ -239,7 +243,7 @@ const UI = { document.documentElement.classList.remove("noVNC_disconnected"); const transitionElem = document.getElementById("noVNC_transition_text"); - if (WebUtil.isInsideKasmVDI()) + if (WebUtil.isInsideKasmVDI()) { parent.postMessage({ action: 'connection_state', value: state}, '*' ); } @@ -367,7 +371,7 @@ const UI = { if (UI.supportsBroadcastChannel) { UI.controlChannel.removeEventListener('message', UI.handleControlMessage); UI.rfb.removeEventListener("connect", UI.connectFinished); - } + } } }, diff --git a/core/codecs.js b/core/codecs.js new file mode 100755 index 000000000..02a5b01dc --- /dev/null +++ b/core/codecs.js @@ -0,0 +1,114 @@ +import * as Log from './util/logging'; +import {encodings} from "./encodings.js"; + +export const CODEC_NAMES = { + AVC: 'AVC', + HEVC: 'HEVC', + AV1: 'AV1' +} + +export const CODEC_IDS = { + AVCQSV: encodings.pseudoEncodingStreamingModeAVCQSV, + AVCNVENC: encodings.pseudoEncodingStreamingModeAVCNVENC, + AVCVAAPI: encodings.pseudoEncodingStreamingModeAVCVAAPI, + AVCSW: encodings.pseudoEncodingStreamingModeAVCSW, + AVC: encodings.pseudoEncodingStreamingModeAVC, + + HEVCQSV: encodings.pseudoEncodingStreamingModeHEVCQSV, + HEVCNVENC: encodings.pseudoEncodingStreamingModeHEVCNVENC, + HEVCVAAPI: encodings.pseudoEncodingStreamingModeHEVCVAAPI, + HEVCSW: encodings.pseudoEncodingStreamingModeHEVCSW, + HEVC: encodings.pseudoEncodingStreamingModeHEVC, + + AV1QSV: encodings.pseudoEncodingStreamingModeAV1QSV, + AV1VAAPI: encodings.pseudoEncodingStreamingModeAV1VAAPI, + AV1NVENC: encodings.pseudoEncodingStreamingModeAV1NVENC, + AV1SW: encodings.pseudoEncodingStreamingModeAV1SW, + AV1: encodings.pseudoEncodingStreamingModeAV1 +} + +export const CODEC_VARIANT_NAMES = { + [CODEC_IDS.AVCQSV]: 'HW H.264/AVC (QSV)', + [CODEC_IDS.AVCNVENC]: 'HW H.264/AVC (NVENC)', + [CODEC_IDS.AVCVAAPI]: 'HW H.264/AVC (VAAPI)', + [CODEC_IDS.AVCSW]: 'SW H.264/AVC', + + [CODEC_IDS.HEVCQSV]: 'HW H.265/HEVC (QSV)', + [CODEC_IDS.HEVCNVENC]: 'HW H.265/HEVC (NVENC)', + [CODEC_IDS.HEVCVAAPI]: 'HW H.265/HEVC (VAAPI)', + [CODEC_IDS.HEVCSW]: 'SW H.265/HEVC', + + [CODEC_IDS.AV1QSV]: 'HW AV1 (QSV) (experimental)', + [CODEC_IDS.AV1NVENC]: 'HW AV1 (NVENC) (experimental)', + [CODEC_IDS.AV1VAAPI]: 'HW AV1 (VAAPI) (experimental)', + [CODEC_IDS.AV1SW]: 'SW AV1 (experimental)' +} + +export const preferredCodecs = [ + encodings.pseudoEncodingStreamingModeHEVCVAAPI, + encodings.pseudoEncodingStreamingModeAVCVAAPI, + encodings.pseudoEncodingStreamingModeHEVCSW, + encodings.pseudoEncodingStreamingModeAVCSW +]; + +export default class CodecDetector { + constructor() { + this._capabilities = null; + } + + async detect() { + this._capabilities = {}; + + + if (!('VideoDecoder' in window)) { + Log.Warn('WebCodecs API not available'); + return; + } + + const codecs = { + [CODEC_NAMES.AVC]: 'avc1.42E01E', + [CODEC_NAMES.HEVC]: 'hev1.1.6.L93.B0', + [CODEC_NAMES.AV1]: 'av01.0.04M.08' + }; + + + for (const [name, codec] of Object.entries(codecs)) { + try { + const config = { + codec: codec, + codedWidth: 1920, + codedHeight: 1080 + }; + + const support = await VideoDecoder.isConfigSupported(config); + this._capabilities[name] = support.supported; + } catch (error) { + console.warn(`Error checking ${name}:`, error); + this._capabilities[name] = false; + } + } + + return this; + } + + isSupported(codec) { + return this._capabilities[codec] || false; + } + + getSupportedCodecIds() { + return this.getSupportedCodecs().map(codec => CODEC_IDS[codec]); + } + + getSupportedCodecs() { + return Object.keys(this._capabilities).filter(codec => this._capabilities[codec]); + // return this.getPreferredCodec(); + } + + getPreferredCodec() { + if (this._capabilities.AVC) return CODEC_NAMES.AVC; + if (this._capabilities.HEVC) return CODEC_NAMES.HEVC; + if (this._capabilities.AV1) return CODEC_NAMES.AV1; + + return CODEC_NAMES.AVC; // fallback + } +} \ No newline at end of file diff --git a/core/decoders/kasmvideo.js b/core/decoders/kasmvideo.js new file mode 100644 index 000000000..cbafa8ae0 --- /dev/null +++ b/core/decoders/kasmvideo.js @@ -0,0 +1,199 @@ +/* + * KasmVNC: HTML5 VNC client + * Copyright (C) 2020 Kasm Technologies + * Copyright (C) 2019 The noVNC Authors + * (c) 2012 Michael Tinglof, Joe Balaz, Les Piech (Mercuri.ca) + * Licensed under MPL 2.0 (see LICENSE.txt) + * + * See README.md for usage and integration instructions. + * + */ + +import * as Log from '../util/logging.js'; + +const VIDEO_CODEC_NAMES = { + 1: 'avc1.42E01E', + 2: 'hev1.1.6.L93.B0', + 3: 'av01.0.04M.08' +} + +const TARGET_FPS = 120; +const FRAME_DURATION_US = Math.round(1_000_000 / TARGET_FPS); +//avc1.4d002a - main +/// avc1.42001E - baseline + +export default class KasmVideoDecoder { + constructor(display) { + this._len = 0; + this._keyFrame = 0; + this._screenId = null; + this._ctl = null; + this._display = display; + + this._timestamp = 0; + this._timestampMap = new Map(); + this._decoders = new Map(); + } + + // ===== Public Methods ===== + decodeRect(x, y, width, height, sock, display, depth, frame_id) { + if (this._ctl === null) { + if (sock.rQwait("KasmVideo screen and compression-control", 2)) { + return false; + } + + this._screenId = sock.rQshift8(); + this._ctl = sock.rQshift8(); + + // Figure out the filter + this._ctl = this._ctl >> 4; + } + + let ret; + + if (this._ctl === 0x00) { + ret = this._skipRect(x, y, width, height, sock, display, depth, frame_id); + } else if ((this._ctl === 0x01) || (this._ctl === 0x02) || (this._ctl === 0x03)) { + ret = this._processVideoFrameRect(this._screenId, this._ctl, x, y, width, height, sock, display, depth, frame_id); + } else { + throw new Error("Illegal KasmVideo compression received (ctl: " + this._ctl + ")"); + } + + if (ret) { + this._ctl = null; + this._screenId = null; + } + + return ret; + } + + // ===== Private Methods ===== + _configureDecoder(screen) { + Log.Debug('Configuring decoder for screen: ', screen.id, ' codec: ', VIDEO_CODEC_NAMES[screen.codec], ' width: ', screen.width, ' height: ', screen.height); + screen.decoder.configure({ + codec: VIDEO_CODEC_NAMES[screen.codec], + codedWidth: screen.width, + codedHeight: screen.height, + optimizeForLatency: true, + }) + } + + _updateSize(screen, codec, width, height) { + Log.Debug('Updated size: ', {width, height}); + + screen.width = width; + screen.height = height; + screen.codec = codec; + + this._configureDecoder(screen); + } + + _skipRect(x, y, width, height, _sock, display, _depth, frame_id) { + display.clearRect(x, y, width, height, 0, frame_id, false); + return true; + } + + _handleProcessVideoChunk(frame) { + Log.Debug('Frame ', frame); + const {screenId, frame_id, x, y, width, height} = this._timestampMap.get(frame.timestamp); + Log.Debug('frame_id: ', frame_id, 'x: ', x, 'y: ', y, 'coded width: ', frame.codedWidth, 'coded height: ', frame.codedHeight); + this._display.videoFrameRect(screenId, frame, frame_id, x, y, width, height); + this._timestampMap.delete(frame.timestamp); + } + + _processVideoFrameRect(screenId, codec, x, y, width, height, sock, display, depth, frame_id) { + let [keyFrame, dataArr] = this._readData(sock); + Log.Debug('Screen: ', screenId, ' key_frame: ', keyFrame); + if (dataArr === null) { + return false; + } + + let screen; + if (this._decoders.has(screenId)) { + screen = this._decoders.get(screenId); + } else { + screen = { + id: screenId, + width: width, + height: height, + decoder: new VideoDecoder({ + output: (frame) => { + this._handleProcessVideoChunk(frame); + // frame.close(); + }, error: (e) => { + Log.Error(`There was an error inside KasmVideoDecoder`, e) + } + }) + }; + Log.Debug('Created new decoder for screen: ', screenId); + this._decoders.set(screenId, screen); + } + + if (width !== screen.width && height !== screen.height || codec !== screen.codec) + this._updateSize(screen, codec, width, height) + + const vidChunk = new EncodedVideoChunk({ + type: keyFrame ? 'key' : 'delta', + data: dataArr, + timestamp: this._timestamp, + }); + + Log.Debug('Type ', vidChunk.type, ' timestamp: ', vidChunk.timestamp, ' bytelength ', vidChunk.byteLength); + + this._timestampMap.set(this._timestamp, { + screenId, + frame_id, + x, + y, + width, + height + }); + this._timestamp += FRAME_DURATION_US; + + try { + screen.decoder.decode(vidChunk); + } catch (e) { + Log.Error('Screen: ', screenId, + 'Key frame ', keyFrame, ' frame_id: ', frame_id, ' x: ', x, ' y: ', y, ' width: ', width, ' height: ', height, ' codec: ', codec, ' ctl ', this._ctl, ' dataArr: ', dataArr, ' error: ', e); + Log.Error('There was an error inside KasmVideoDecoder: ', e) + } + return true; + } + + _readData(sock) { + if (this._len === 0) { + if (sock.rQwait("KasmVideo", 5)) { + return [0, null]; + } + + this._keyFrame = sock.rQshift8(); + let byte = sock.rQshift8(); + this._len = byte & 0x7f; + if (byte & 0x80) { + byte = sock.rQshift8(); + this._len |= (byte & 0x7f) << 7; + if (byte & 0x80) { + byte = sock.rQshift8(); + this._len |= byte << 14; + } + } + } + + if (sock.rQwait("KasmVideo", this._len)) { + return [0, null]; + } + + const data = sock.rQshiftBytes(this._len); + const keyFrame = this._keyFrame; + this._len = 0; + this._keyFrame = 0; + + return [keyFrame, data]; + } + + dispose() { + for (let screen of this._decoders.values()) { + screen.decoder.close(); + } + } +} diff --git a/core/display.js b/core/display.js index ced4f80f5..1ffc30813 100644 --- a/core/display.js +++ b/core/display.js @@ -13,6 +13,9 @@ import { toSigned32bit } from './util/int.js'; import { isWindows } from './util/browser.js'; import { uuidv4 } from './util/strings.js'; import UI from '../app/ui.js'; +import { encodings } from "./encodings.js"; +import {Canvas2DRenderer} from "./renderers/Canvas2DRenderer"; +import {WebGLRenderer} from "./renderers/WebGLRenderer"; export default class Display { constructor(target, isPrimaryDisplay) { @@ -33,8 +36,6 @@ export default class Display { this._maxAsyncFrameQueue = 3; this._clearAsyncQueue(); this._syncFrameQueue = []; - this._transparentOverlayImg = null; - this._transparentOverlayRect = null; this._lastTransparentRectId = ""; this._flushing = false; @@ -44,22 +45,11 @@ export default class Display { this._fbHeight = 0; this._renderMs = 0; - this._prevDrawStyle = ""; + this._backbuffer = document.createElement('canvas'); this._target = target; - if (!this._target) { - throw new Error("Target must be set"); - } - - if (typeof this._target === 'string') { - throw new Error('target must be a DOM element'); - } - - if (!this._target.getContext) { - throw new Error("no getContext method"); - } - - this._targetCtx = this._target.getContext('2d'); + const canvas2DRenderer = new Canvas2DRenderer(target, this._backbuffer); + this._renderer = canvas2DRenderer; Log.Debug("User Agent: " + navigator.userAgent); @@ -96,7 +86,6 @@ export default class Display { this._maxScreens = 4; this._scale = 1.0; this._clipViewport = false; - this._antiAliasing = 0; this._fps = 0; this._isPrimaryDisplay = isPrimaryDisplay; this._screenID = uuidv4(); @@ -125,10 +114,6 @@ export default class Display { this._threading = true; this._primaryChannel = null; - //optional offscreen canvas - this._enableCanvasBuffer = false; - this._backbuffer = document.createElement('canvas'); - this._drawCtx = this._backbuffer.getContext('2d'); this._damageBounds = { left: 0, top: 0, right: this._backbuffer.width, bottom: this._backbuffer.height }; // ===== EVENT HANDLERS ===== @@ -145,27 +130,9 @@ export default class Display { // ===== PROPERTIES ===== - get enableCanvasBuffer() { return this._enableCanvasBuffer; } + get enableCanvasBuffer() { return this._renderer.enableCanvasBuffer; } set enableCanvasBuffer(value) { - if (value === this._enableCanvasBuffer) { return; } - - this._enableCanvasBuffer = value; - - - if (value && this._target) - { - //copy current visible canvas to backbuffer - let saveImg = this._targetCtx.getImageData(0, 0, this._target.width, this._target.height); - this._drawCtx.putImageData(saveImg, 0, 0); - - if (this._transparentOverlayImg) { - this.drawImage(this._transparentOverlayImg, this._transparentOverlayRect.x, this._transparentOverlayRect.y, this._transparentOverlayRect.width, this._transparentOverlayRect.height, true); - } - } else if (!value && this._target) { - //copy backbuffer to canvas to clear any overlays - let saveImg = this._targetCtx.getImageData(0, 0, this._target.width, this._target.height); - this._drawCtx.putImageData(saveImg, 0, 0); - } + this._renderer.enableCanvasBuffer = value; } get screens() { return this._screens; } @@ -179,9 +146,9 @@ export default class Display { return this._screens[0].screenIndex; } - get antiAliasing() { return this._antiAliasing; } + get antiAliasing() { return this._renderer.antiAliasing; } set antiAliasing(value) { - this._antiAliasing = value; + this._renderer.antiAliasing = value; this._rescale(this._scale); } @@ -255,7 +222,7 @@ export default class Display { return [x, y]; } - getScreenSize(resolutionQuality, max_width, max_height, hiDpi, disableLimit, disableScaling) { + getScreenSize(resolutionQuality, max_width, max_height, hiDpi, disableLimit, disableScaling, streamMode) { let data = { screens: null, serverWidth: 0, @@ -291,12 +258,12 @@ export default class Display { height = this._screens[i].serverReportedHeight; width = this._screens[i].serverReportedWidth; } - else if (width > 1280 && !disableLimit && resolutionQuality == 1) { + else if (width > 1280 && !disableLimit && resolutionQuality == 1 && streamMode == encodings.pseudoEncodingStreamingModeJpegWebp) { height = Math.floor(1280 * (height/width)); //keeping the aspect ratio of original resolution, shrink y to match x width = 1280; } //hard coded 720p - else if (resolutionQuality == 0 && !disableLimit) { + else if (resolutionQuality == 0 && !disableLimit && streamMode == encodings.pseudoEncodingStreamingModeJpegWebp) { width = 1280; height = 720; } @@ -307,7 +274,7 @@ export default class Display { scale = 1 / this._screens[i].pixelRatio; } //physically small device with high DPI - else if (this._antiAliasing === 0 && this._screens[i].pixelRatio > 1 && width < 1000 & width > 0) { + else if (this._renderer.antiAliasing === 0 && this._screens[i].pixelRatio > 1 && width < 1000 & width > 0) { Log.Info('Device Pixel ratio: ' + this._screens[i].pixelRatio + ' Reported Resolution: ' + width + 'x' + height); let targetDevicePixelRatio = 1.5; if (this._screens[i].pixelRatio > 2) { targetDevicePixelRatio = 2; } @@ -419,7 +386,7 @@ export default class Display { x = Math.max(x, this._screens[i].x + this._screens[i].serverWidth); } - var new_screen = { + const new_screen = { screenID: screenID, screenIndex: this.screens.length, width: width, //client @@ -437,7 +404,7 @@ export default class Display { scale: scale, x2: x + serverWidth, y2: serverHeight - } + }; this._screens.push(new_screen); if (new_screen.channel) { @@ -538,24 +505,11 @@ export default class Display { height = this._fbHeight; } - const vp = this._screens[0]; - const canvas = this._target; - if (canvas.width !== width || canvas.height !== height) { - let saveImg = null; - if (canvas.width > 0 && canvas.height > 0) { - saveImg = this._targetCtx.getImageData(0, 0, canvas.width, canvas.height); - } - + if (this._renderer.viewportChangeSize(width, height)) { + const vp = this._screens[0]; vp.serverWidth = width; vp.serverHeight = height; - canvas.width = width; - canvas.height = height; - - if (saveImg) { - this._targetCtx.putImageData(saveImg, 0, 0); - } - // The position might need to be updated if we've grown this.viewportChangePos(0, 0); @@ -579,40 +533,10 @@ export default class Display { } resize(width, height) { - this._prevDrawStyle = ""; - this._fbWidth = width; this._fbHeight = height; - let canvas = this._backbuffer; - if (canvas == undefined) { return; } - - if (this._screens.length > 0) { - width = this._screens[0].serverWidth; - height = this._screens[0].serverHeight; - } - - if (canvas.width !== width || canvas.height !== height) { - // We have to save the canvas data since changing the size will clear it - let saveImg = null; - if (canvas.width > 0 && canvas.height > 0) { - saveImg = this._drawCtx.getImageData(0, 0, canvas.width, canvas.height); - } - - if (canvas.width !== width) { - canvas.width = width; - - } - if (canvas.height !== height) { - canvas.height = height; - } - - if (saveImg) { - this._drawCtx.putImageData(saveImg, 0, 0); - } - } - - + this._renderer.resize(width, height, this._screens); // Readjust the viewport as it may be incorrectly sized // and positioned @@ -670,10 +594,15 @@ export default class Display { * Cleans up resources, should be called on a disconnect */ dispose() { - clearInterval(this._frameStatsInterval); + if (this._frameStatsInterval) { + clearInterval(this._frameStatsInterval); + this._frameStatsInterval = null; + } this.clear(); - if (this._targetCtx && this._target) { - this._targetCtx.clearRect(0,0, this._target.width, this._target.height); + + if (this._renderer) { + this._renderer.dispose(); + this._renderer = null; } } @@ -691,12 +620,7 @@ export default class Display { this._processRectScreens(rect); this._asyncRenderQPush(rect); } else { - this._setFillColor(color); - if (this._enableCanvasBuffer) { - this._drawCtx.fillRect(x, y, width, height); - } else { - this._targetCtx.fillRect(x, y, width, height); - } + this._renderer.fillRect(x, y, width, height, color); } } @@ -715,24 +639,7 @@ export default class Display { this._processRectScreens(rect); this._asyncRenderQPush(rect); } else { - let targetCtx = ((this._enableCanvasBuffer) ? this._drawCtx : this._targetCtx); - let sourceCvs = ((this._enableCanvasBuffer) ? this._backbuffer : this._target); - - // Due to this bug among others [1] we need to disable the image-smoothing to - // avoid getting a blur effect when copying data. - // - // 1. https://bugzilla.mozilla.org/show_bug.cgi?id=1194719 - // - // We need to set these every time since all properties are reset - // when the the size is changed - targetCtx.mozImageSmoothingEnabled = false; - targetCtx.webkitImageSmoothingEnabled = false; - targetCtx.msImageSmoothingEnabled = false; - targetCtx.imageSmoothingEnabled = false; - - targetCtx.drawImage(sourceCvs, - oldX, oldY, w, h, - newX, newY, w, h); + this._renderer.copyImage(oldX, oldY, newX, newY, w, h); } } @@ -799,13 +706,41 @@ export default class Display { } } + videoFrameRect(screenId, frame, frame_id, x, y, width, height) { + if (frame.displayWidth === 0 || frame.displayHeight === 0 || frame.codedWidth === 0 || frame.codedHeight === 0) { + frame.close(); + return false; + } + + const rect = { + type: 'video_frame', + screenId, + frame, + x, + y, + width, + height, + frame_id + }; + // TODO: REMoVE + // this.drawVideoFrame(frame, x, y, width, height); + + if (rect.screenId < this._screens.length) { + this._processRectScreens(rect); + this._asyncRenderQPush(rect); + } else { + frame.close(); + Log.Debug(`ScreenId ${screenId} not found in display list`); + } + } + transparentRect(x, y, width, height, img, frame_id, hashId) { /* The internal logic cannot handle empty images, so bail early */ if ((width === 0) || (height === 0)) { return; } - var rect = { + const rect = { 'type': 'transparent', 'img': null, 'x': x, @@ -815,18 +750,17 @@ export default class Display { 'frame_id': frame_id, 'arr': img, 'hash_id': hashId - } + }; this._processRectScreens(rect); if (rect.inPrimary) { let imageBmpPromise = createImageBitmap(img); imageBmpPromise.then( function(bitmap) { - this._transparentOverlayImg = bitmap; - this.enableCanvasBuffer = true; + this._renderer.transparentOverlayImg = bitmap; }.bind(this) ); } - this._transparentOverlayRect = rect; + this._renderer.transparentOverlayRect = rect; this._asyncRenderQPush(rect); } @@ -846,7 +780,7 @@ export default class Display { blitImage(x, y, width, height, arr, offset, frame_id, fromQueue) { if (!fromQueue) { - var buf; + let buf; if (!ArrayBuffer.isView(arr)) { buf = arr; } else { @@ -869,25 +803,7 @@ export default class Display { this._processRectScreens(rect); this._asyncRenderQPush(rect); } else { - var data; - if (!ArrayBuffer.isView(arr)) { - data = new Uint8ClampedArray(arr, - arr.length + offset, - width * height * 4); - } else { - data = new Uint8ClampedArray(arr.buffer, - arr.byteOffset + offset, - width * height * 4); - } - // NB(directxman12): arr must be an Type Array view - let img = new ImageData(data, width, height); - if (this._enableCanvasBuffer) { - this._drawCtx.putImageData(img, x, y); - } else { - this._targetCtx.putImageData(img, x, y); - - } - + this._renderer.blitImage(x, y, width, height, arr, offset); } } @@ -905,24 +821,51 @@ export default class Display { this._processRectScreens(rect); this._asyncRenderQPush(rect); } else { - if (this._enableCanvasBuffer) { - this._drawCtx.putImageData(arr, x, y); - } else { - this._targetCtx.putImageData(arr, x, y); - } + this._renderer.blitQoi(arr, x, y); } } - drawImage(img, x, y, w, h, overlay=false) { + drawImage(img, x, y, w, h, overlay = false) { try { - let targetCtx = ((this._enableCanvasBuffer && !overlay) ? this._drawCtx : this._targetCtx); - if (img.width != w || img.height != h) { - targetCtx.drawImage(img, x, y, w, h); - } else { - targetCtx.drawImage(img, x, y); - } + this._renderer.drawImage(img, x, y, w, h); + } catch (error) { + Log.Error('Invalid image received.'); //KASM-2090 + } + } + + drawVideoFrame(videoFrame, x, y, width, height) { + try { + this._renderer.drawImage(videoFrame, x, y, width, height); + videoFrame.close(); } catch (error) { - Log.Error('Invalid image recieved.'); //KASM-2090 + Log.Error('Invalid video frame received. ', error); + } + } + + putImage(img, x, y) { + try { + this._renderer.putImage(img, x, y); + img = null; + } catch (error) { + Log.Error('Invalid image received.'); + img = null; + } + } + + clearRect(x, y, width, height, offset, frame_id, fromQueue) { + if (!fromQueue) { + let rect = { + 'type': 'clear', + 'x': x, + 'y': y, + 'width': width, + 'height': height, + 'frame_id': frame_id + } + this._processRectScreens(rect); + this._asyncRenderQPush(rect); + } else { + this._renderer.clearRect(x, y, width, height); } } @@ -948,12 +891,6 @@ export default class Display { // ===== PRIVATE METHODS ===== - _writeCtxBuffer() { - //TODO: KASM-5450 Damage tracking with transparent rect overlay support - if (this._backbuffer.width > 0) { - this._targetCtx.drawImage(this._backbuffer, 0, 0); - } - } _handleSecondaryDisplayMessage(event) { if (!this._isPrimaryDisplay && event.data) { @@ -976,12 +913,9 @@ export default class Display { case 'transparent': let imageBmpPromise = createImageBitmap(rect.arr); imageBmpPromise.then( function(img) { - this._transparentOverlayImg = img; - if (!this.enableCanvasBuffer) { - this._enableCanvasBuffer = true; - } + this._renderer.transparentOverlayImg = img; }.bind(this) ); - this._transparentOverlayRect = rect; + this._renderer.transparentOverlayRect = rect; break; } this._syncFrameQueue.push(rect); @@ -1047,6 +981,9 @@ export default class Display { this.drawImage(a.img, pos.x, pos.y, a.width, a.height); a.img.close(); break; + case 'video_frame': + this.drawVideoFrame(a.frame, pos.x, pos.y, a.frame.codedWidth, a.frame.codedHeight); + break; default: this._syncFrameQueue.shift(); continue; @@ -1055,11 +992,9 @@ export default class Display { this._syncFrameQueue.shift(); } - if (this._enableCanvasBuffer && drawRectCnt > 0) { - this._writeCtxBuffer(); - if (this._transparentOverlayImg) { - this.drawImage(this._transparentOverlayImg, this._transparentOverlayRect.x, this._transparentOverlayRect.y, this._transparentOverlayRect.width, this._transparentOverlayRect.height, true); - } + if (this._renderer.enableCanvasBuffer && drawRectCnt > 0) { + this._renderer._writeCtxBuffer(); + this._renderer.drawTransparentOverlayImg() } if (this._syncFrameQueue.length > 0) { @@ -1131,7 +1066,15 @@ export default class Display { if (rect.frame_id < oldestFrameID) { //rect is older than any frame in the queue, drop it this._droppedRects++; - if (rect.type == "flip") { this._lateFlipRect++; } + switch (rect.type) { + case 'video_frame': + rect.frame?.close(); + break; + case 'flip': + this._lateFlipRect++; + break; + } + return; } else if (rect.frame_id > newestFrameID) { //frame is newer than any frame in the queue, drop old frame @@ -1142,6 +1085,15 @@ export default class Display { this._forcedFrameCnt++; } else { Log.Warn("Old frame dropped"); + + // Close VideoFrames in the frame being dropped + const droppedFrame = this._asyncFrameQueue[0]; + for (const droppedRect of droppedFrame[2]) { + if (droppedRect.type === 'video_frame') { + droppedRect.frame?.close(); + } + } + this._asyncFrameQueue.shift(); this._droppedFrames += (rect.frame_id - newestFrameID); } @@ -1151,13 +1103,19 @@ export default class Display { } } - } /* Clear the async frame buffer */ _clearAsyncQueue() { + // Close all VideoFrames in the queue before dropping + for (const frame of this._asyncFrameQueue) { + for (const rect of frame[2]) + if (rect.type === 'video_frame') + rect.frame?.close(); + } + this._droppedFrames += this._asyncFrameQueue.length; this._asyncFrameQueue = []; @@ -1238,7 +1196,7 @@ export default class Display { for (let sI = 0; sI < a.screenLocations.length; sI++) { let screenLocation = a.screenLocations[sI]; - if (screenLocation.screenIndex == 0) { + if (screenLocation.screenIndex === 0) { switch (a.type) { case 'copy': this.copyImage(screenLocation.oldX, screenLocation.oldY, screenLocation.x, screenLocation.y, a.width, a.height, a.frame_id, true); @@ -1255,12 +1213,18 @@ export default class Display { case 'img': this.drawImage(a.img, screenLocation.x, screenLocation.y, a.width, a.height); break; + case 'clear': + this.clearRect(screenLocation.x, screenLocation.y, a.width, a.height, 0, a.frame_id, true); + break; case 'vid': this.drawImage(a.img, screenLocation.x, screenLocation.y, a.width, a.height); break; case 'bitmap': this.drawImage(a.img, screenLocation.x, screenLocation.y, a.width, a.height); break; + case 'video_frame': + this.drawVideoFrame(a.frame, screenLocation.x, screenLocation.y, a.frame.codedWidth, a.frame.codedHeight); + break; default: continue; } @@ -1334,6 +1298,38 @@ export default class Display { }, [buf]); } break; + case 'video_frame': + secondaryScreenRects++; + if (a.frame.format !== null) { + if (this._screens[screenLocation.screenIndex]?.channel) { + Log.Debug(`[PRIMARY] Converting VideoFrame to ImageBitmap`); + createImageBitmap(a.frame).then((bitmap) => { + this._screens[screenLocation.screenIndex].channel.postMessage({ + eventType: 'rect', + rect: { + type: 'bitmap', + img: bitmap, + x: a.x, + y: a.y, + width: a.width, + height: a.height, + frame_id: a.frame_id, + screenLocations: a.screenLocations + }, + screenLocationIndex: sI + }, [bitmap]); // Transfer ImageBitmap + + Log.Debug(`[PRIMARY] ImageBitmap posted to secondary screen ${screenLocation.screenIndex}`); + }).catch((error) => { + Log.Error(`[PRIMARY] Failed to create ImageBitmap from VideoFrame: ${error.message}`); + }); + } else { + a.frame.close(); + } + } else { + Log.Warn(`[PRIMARY] VideoFrame has null format, skipping`); + } + break; case 'img': case '_img': secondaryScreenRects++; @@ -1357,15 +1353,20 @@ export default class Display { break; default: secondaryScreenRects++; - if (a instanceof HTMLImageElement) { - Log.Warn("Wrong rect type: " + rect.type); + if (a instanceof HTMLImageElement || a?.img instanceof HTMLImageElement) { + Log.Warn("Wrong rect type: " + a.type); } else { if (this._screens[screenLocation.screenIndex].channel) { - this._screens[screenLocation.screenIndex].channel.postMessage({ - eventType: 'rect', - rect: a, - screenLocationIndex: sI - }); + try { + this._screens[screenLocation.screenIndex].channel.postMessage({ + eventType: 'rect', + rect: a, + screenLocationIndex: sI + }); + + } catch (e) { + Log.Error(`Failed to post rect: ${e.message}, rect type: ${a.type}`); + } } } } @@ -1373,24 +1374,24 @@ export default class Display { } } - if (this._enableCanvasBuffer) { - + if (this._renderer.enableCanvasBuffer) { if (primaryScreenRects > 0) { - this._writeCtxBuffer(); + this._renderer._writeCtxBuffer(); } - if (this._transparentOverlayImg) { + if (this._renderer.transparentOverlayImg) { if (primaryScreenRects > 0) { - this.drawImage(this._transparentOverlayImg, this._transparentOverlayRect.x, this._transparentOverlayRect.y, this._transparentOverlayRect.width, this._transparentOverlayRect.height, true); + this._renderer.drawTransparentOverlayImg(); } - if (secondaryScreenRects > 0 && this._lastTransparentRectId !== this._transparentOverlayRect.hash_id) { - for (let sI = 1; sI < this._transparentOverlayRect.screenLocations.length; sI++) { - if (this._screens[this._transparentOverlayRect.screenLocations[sI].screenIndex].channel) { - this._screens[this._transparentOverlayRect.screenLocations[sI].screenIndex].channel.postMessage({ eventType: 'rect', rect: this._transparentOverlayRect, screenLocationIndex: sI }); + const transparentOverlayRect = this._renderer.transparentOverlayRect; + if (secondaryScreenRects > 0 && this._lastTransparentRectId !== transparentOverlayRect.hash_id) { + for (let sI = 1; sI < transparentOverlayRect.screenLocations.length; sI++) { + if (this._screens[transparentOverlayRect.screenLocations[sI].screenIndex].channel) { + this._screens[transparentOverlayRect.screenLocations[sI].screenIndex].channel.postMessage({ eventType: 'rect', rect: transparentOverlayRect, screenLocationIndex: sI }); } } } - this._lastTransparentRectId = this._transparentOverlayRect.hash_id; + this._lastTransparentRectId = transparentOverlayRect.hash_id; } } @@ -1413,7 +1414,7 @@ export default class Display { if (this._asyncFrameQueue[0][2].length > 0) { window.requestAnimationFrame( () => { this._pushAsyncFrame(); }); } - } else if (this._asyncFrameQueue[0][1] > 0 && this._asyncFrameQueue[0][1] == this._asyncFrameQueue[0][2].length) { + } else if (this._asyncFrameQueue[0][1] > 0 && this._asyncFrameQueue[0][1] === this._asyncFrameQueue[0][2].length) { //how many times has _pushAsyncFrame been called when the frame had all rects but has not been drawn this._asyncFrameQueue[0][5] += 1; //force the frame to be drawn if it has been here too long @@ -1426,28 +1427,39 @@ export default class Display { _processRectScreens(rect) { //find which screen this rect belongs to and adjust its x and y to be relative to the destination let indexes = []; - rect.inPrimary = false; - rect.inSecondary = false; - for (let i=0; i < this._screens.length; i++) { - let screen = this._screens[i]; + if (rect.type === 'video_frame') { + const screen = this._screens[rect.screenId]; + let screenPosition = { + x: 0 - (screen.x - rect.x), //rect.x - screen.x, + y: 0 - (screen.y - rect.y), //rect.y - screen.y, + screenIndex: rect.screenId + } - if ( - !((rect.x > screen.x2 || screen.x > (rect.x + rect.width)) && (rect.y > screen.y2 || screen.y > (rect.y + rect.height))) - ) { - let screenPosition = { - x: 0 - (screen.x - rect.x), //rect.x - screen.x, - y: 0 - (screen.y - rect.y), //rect.y - screen.y, - screenIndex: i - } - if (rect.type === 'copy') { - screenPosition.oldX = 0 - (screen.x - rect.oldX); //rect.oldX - screen.x; - screenPosition.oldY = 0 - (screen.y - rect.oldY); //rect.oldY - screen.y; - } - indexes.push(screenPosition); - if (i == 0) { - rect.inPrimary = true; - } else { - rect.inSecondary = true; + indexes.push(screenPosition); + } else { + rect.inPrimary = false; + rect.inSecondary = false; + for (let i = 0; i < this._screens.length; i++) { + let screen = this._screens[i]; + + if ( + !((rect.x > screen.x2 || screen.x > (rect.x + rect.width)) && (rect.y > screen.y2 || screen.y > (rect.y + rect.height))) + ) { + let screenPosition = { + x: 0 - (screen.x - rect.x), //rect.x - screen.x, + y: 0 - (screen.y - rect.y), //rect.y - screen.y, + screenIndex: i + } + if (rect.type === 'copy') { + screenPosition.oldX = 0 - (screen.x - rect.oldX); //rect.oldX - screen.x; + screenPosition.oldY = 0 - (screen.y - rect.oldY); //rect.oldY - screen.y; + } + indexes.push(screenPosition); + if (i === 0) { + rect.inPrimary = true; + } else { + rect.inSecondary = true; + } } } } @@ -1466,34 +1478,8 @@ export default class Display { const width = factor * vp.serverWidth + 'px'; const height = factor * vp.serverHeight + 'px'; - if ((this._target.style.width !== width) || - (this._target.style.height !== height)) { - this._target.style.width = width; - this._target.style.height = height; - } - - Log.Info('Pixel Ratio: ' + window.devicePixelRatio + ', VNC Scale: ' + factor + 'VNC Res: ' + vp.serverWidth + 'x' + vp.serverHeight); - - var pixR = Math.abs(Math.ceil(window.devicePixelRatio)); - var isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1; - - if (this.antiAliasing === 2 || (this.antiAliasing === 0 && factor === 1 && this._target.style.imageRendering !== 'pixelated' && pixR === window.devicePixelRatio && vp.width > 0)) { - this._target.style.imageRendering = ((!isFirefox) ? 'pixelated' : 'crisp-edges' ); - Log.Debug('Smoothing disabled'); - } else if (this.antiAliasing === 1 || (this.antiAliasing === 0 && factor !== 1 && this._target.style.imageRendering !== 'auto')) { - this._target.style.imageRendering = 'auto'; //auto is really smooth (blurry) using trilinear of linear - Log.Debug('Smoothing enabled'); - } + this._renderer.rescale(factor, width, height, vp.serverWidth, vp.serverHeight, vp.width); requestAnimationFrame( () => { this._pushAsyncFrame(); }); } - - _setFillColor(color) { - const newStyle = 'rgb(' + color[0] + ',' + color[1] + ',' + color[2] + ')'; - let targetCtx = ((this._enableCanvasBuffer) ? this._drawCtx : this._targetCtx); - if (newStyle !== this._prevDrawStyle) { - targetCtx.fillStyle = newStyle; - this._prevDrawStyle = newStyle; - } - } } diff --git a/core/encodings.js b/core/encodings.js index a4d5f3540..8ebbf1433 100644 --- a/core/encodings.js +++ b/core/encodings.js @@ -12,6 +12,7 @@ export const encodings = { encodingRRE: 2, encodingHextile: 5, encodingTight: 7, + encodingKasmVideo: 17, encodingTightPNG: -260, encodingUDP: -261, @@ -29,6 +30,45 @@ export const encodings = { pseudoEncodingCompressLevel9: -247, pseudoEncodingCompressLevel0: -256, + pseudoEncodingFrameRateLevel10: -2048, + pseudoEncodingFrameRateLevel60: -1998, + pseudoEncodingMaxVideoResolution: -1997, + pseudoEncodingVideoScalingLevel0: -1996, + pseudoEncodingVideoScalingLevel9: -1987, + pseudoEncodingVideoOutTimeLevel1: -1986, + pseudoEncodingVideoOutTimeLevel100: -1887, + pseudoEncodingQOI: -1886, + pseudoEncodingKasmDisconnectNotify: -1885, + + pseudoEncodingHardwareProfile0: -1170, + pseudoEncodingHardwareProfile4: -1166, + + pseudoEncodingGOP1: -1165, + pseudoEncodingGOP60: -1105, + pseudoEncodingStreamingVideoQualityLevel0: -1104, + pseudoEncodingStreamingVideoQualityLevel63: -1041, + + // AV1 + pseudoEncodingStreamingModeAV1QSV: -1040, + pseudoEncodingStreamingModeAV1NVENC: -1039, + pseudoEncodingStreamingModeAV1VAAPI: -1038, + pseudoEncodingStreamingModeAV1SW: -1037, + pseudoEncodingStreamingModeAV1: -1036, + // h.265 + pseudoEncodingStreamingModeHEVCQSV: -1035, + pseudoEncodingStreamingModeHEVCNVENC: -1034, + pseudoEncodingStreamingModeHEVCVAAPI: -1033, + pseudoEncodingStreamingModeHEVCSW: -1032, + pseudoEncodingStreamingModeHEVC: -1031, + // h.264 + pseudoEncodingStreamingModeAVCQSV: -1030, + pseudoEncodingStreamingModeAVCNVENC: -1029, + pseudoEncodingStreamingModeAVCVAAPI: -1028, + pseudoEncodingStreamingModeAVCSW: -1027, + pseudoEncodingStreamingModeAVC: -1026, + + pseudoEncodingStreamingModeJpegWebp: -1025, + pseudoEncodingWEBP: -1024, pseudoEncodingJpegVideoQualityLevel0: -1023, pseudoEncodingJpegVideoQualityLevel9: -1014, @@ -46,16 +86,6 @@ export const encodings = { pseudoEncodingVideoTimeLevel0: -870, pseudoEncodingVideoTimeLevel100: -770, - pseudoEncodingFrameRateLevel10: -2048, - pseudoEncodingFrameRateLevel60: -1998, - pseudoEncodingMaxVideoResolution: -1997, - pseudoEncodingVideoScalingLevel0: -1996, - pseudoEncodingVideoScalingLevel9: -1987, - pseudoEncodingVideoOutTimeLevel1: -1986, - pseudoEncodingVideoOutTimeLevel100: -1887, - pseudoEncodingQOI: -1886, - pseudoEncodingKasmDisconnectNotify: -1885, - pseudoEncodingVMwareCursor: 0x574d5664, pseudoEncodingVMwareCursorPosition: 0x574d5666, pseudoEncodingExtendedClipboard: 0xc0a1e5ce @@ -63,12 +93,30 @@ export const encodings = { export function encodingName(num) { switch (num) { - case encodings.encodingRaw: return "Raw"; - case encodings.encodingCopyRect: return "CopyRect"; - case encodings.encodingRRE: return "RRE"; - case encodings.encodingHextile: return "Hextile"; - case encodings.encodingTight: return "Tight"; - case encodings.encodingTightPNG: return "TightPNG"; - default: return "[unknown encoding " + num + "]"; + case encodings.encodingRaw: return "Raw"; + case encodings.encodingCopyRect: return "CopyRect"; + case encodings.encodingRRE: return "RRE"; + case encodings.encodingHextile: return "Hextile"; + case encodings.encodingTight: return "Tight"; + case encodings.encodingTightPNG: return "TightPNG"; + case encodings.pseudoEncodingStreamingModeAVCQSV: + case encodings.pseudoEncodingStreamingModeAVCNVENC: + case encodings.pseudoEncodingStreamingModeAVCVAAPI: + case encodings.pseudoEncodingStreamingModeAVCSW: + case encodings.pseudoEncodingStreamingModeAVC: + return "KasmVideo AVC"; + case encodings.pseudoEncodingStreamingModeHEVCQSV: + case encodings.pseudoEncodingStreamingModeHEVCNVENC: + case encodings.pseudoEncodingStreamingModeHEVCVAAPI: + case encodings.pseudoEncodingStreamingModeHEVCSW: + case encodings.pseudoEncodingStreamingModeHEVC: + return "KasmVideo HEVC"; + case encodings.pseudoEncodingStreamingModeAV1QSV: + case encodings.pseudoEncodingStreamingModeAV1NVENC: + case encodings.pseudoEncodingStreamingModeAV1VAAPI: + case encodings.pseudoEncodingStreamingModeAV1SW: + case encodings.pseudoEncodingStreamingModeAV1: + return "KasmVideo AV1"; + default: return "[unknown encoding " + num + "]"; } } diff --git a/core/messages.js b/core/messages.js new file mode 100644 index 000000000..618e2e931 --- /dev/null +++ b/core/messages.js @@ -0,0 +1,7 @@ +export const messages = { + msgTypeVideoEncoders: 184, + msgTypeKeepAlive: 185, + msgTypeServerDisconnect: 186, + msgTypeUserAddedToSession: 253, + msgTypeUserRemovedFromSession: 254 +}; \ No newline at end of file diff --git a/core/renderers/Canvas2DRenderer.js b/core/renderers/Canvas2DRenderer.js new file mode 100644 index 000000000..0d68b33ad --- /dev/null +++ b/core/renderers/Canvas2DRenderer.js @@ -0,0 +1,262 @@ +import * as Log from "../util/logging"; + +export class Canvas2DRenderer { + constructor(canvas, backbuffer) { + this._target = canvas; + + if (!this._target) { + throw new Error("Target must be set"); + } + + if (typeof this._target === 'string') { + throw new Error('target must be a DOM element'); + } + + if (!this._target.getContext) { + throw new Error("no getContext method"); + } + + this._targetCtx = this._target.getContext('2d'); + + //optional offscreen canvas + this._enableCanvasBuffer = false; + this._backbuffer = backbuffer; + this._drawCtx = this._backbuffer.getContext('2d'); + + this._prevDrawStyle = ""; + this._antiAliasing = 0; + + this._transparentOverlayImg = null; + this._transparentOverlayRect = null; + } + + get width() { + return this._target.width; + } + + get height() { + return this._target.height; + } + + get enableCanvasBuffer() { + return this._enableCanvasBuffer; + } + + set enableCanvasBuffer(value) { + if (value === this._enableCanvasBuffer) { + return; + } + + this._enableCanvasBuffer = value; + this._targetCtx = value ? this._drawCtx : this._targetCtx; + + if (value && this._target) { + //copy current visible canvas to backbuffer + let saveImg = this._targetCtx.getImageData(0, 0, this._target.width, this._target.height); + this._drawCtx.putImageData(saveImg, 0, 0); + + if (this._transparentOverlayImg) { + this.drawImage(this._transparentOverlayImg, this._transparentOverlayRect.x, this._transparentOverlayRect.y, this._transparentOverlayRect.width, this._transparentOverlayRect.height, true); + } + } else if (!value && this._target) { + //copy backbuffer to canvas to clear any overlays + let saveImg = this._targetCtx.getImageData(0, 0, this._target.width, this._target.height); + this._drawCtx.putImageData(saveImg, 0, 0); + } + } + + get antiAliasing() { + return this._antiAliasing; + } + + set antiAliasing(value) { + this._antiAliasing = value; + } + + get transparentOverlayImg() { + return this._transparentOverlayImg; + } + + set transparentOverlayImg(value) { + this._transparentOverlayImg = value; + this.enableCanvasBuffer = true; + } + + get transparentOverlayRect() { + return this._transparentOverlayRect; + } + + set transparentOverlayRect(value) { + this._transparentOverlayRect = value; + } + + drawTransparentOverlayImg() { + if (this._transparentOverlayImg) { + this.drawImage(this._transparentOverlayImg, this._transparentOverlayRect.x, this._transparentOverlayRect.y, this._transparentOverlayRect.width, this._transparentOverlayRect.height, true); + } + } + + viewportChangeSize(width, height) { + const canvas = this._target; + if (canvas.width === width && canvas.height === height) { + return false; + } + + let saveImg = null; + if (canvas.width > 0 && canvas.height > 0) { + saveImg = this._targetCtx.getImageData(0, 0, canvas.width, canvas.height); + } + + canvas.width = width; + canvas.height = height; + + if (saveImg) { + this._targetCtx.putImageData(saveImg, 0, 0); + } + + return true; + } + + rescale(factor, width, height, serverWidth, serverHeight, viewPortWidth) { + const style = this._target.style; + if ((style.width !== width) || + (style.height !== height)) { + style.width = width; + style.height = height; + } + + Log.Info('Pixel Ratio: ' + window.devicePixelRatio + ', VNC Scale: ' + factor + 'VNC Res: ' + serverWidth + 'x' + serverHeight + 'y'); + + const pixR = Math.abs(Math.ceil(window.devicePixelRatio)); + const isFirefox = navigator.userAgent.toLowerCase().indexOf('firefox') > -1; + + if (this.antiAliasing === 2 || (this.antiAliasing === 0 && factor === 1 && style.imageRendering !== 'pixelated' && pixR === window.devicePixelRatio && viewPortWidth > 0)) { + style.imageRendering = ((!isFirefox) ? 'pixelated' : 'crisp-edges'); + Log.Debug('Smoothing disabled'); + } else if (this.antiAliasing === 1 || (this.antiAliasing === 0 && factor !== 1 && style.imageRendering !== 'auto')) { + style.imageRendering = 'auto'; //auto is really smooth (blurry) using trilinear of linear + Log.Debug('Smoothing enabled'); + } + } + + resize(width, height, screens) { + this._prevDrawStyle = ""; + + let canvas = this._backbuffer; + if (canvas === undefined) { + return; + } + + if (screens.length > 0) { + width = screens[0].serverWidth; + height = screens[0].serverHeight; + } + + if (canvas.width !== width || canvas.height !== height) { + // We have to save the canvas data since changing the size will clear it + let saveImg = null; + if (canvas.width > 0 && canvas.height > 0) { + saveImg = this._drawCtx.getImageData(0, 0, canvas.width, canvas.height); + } + + if (canvas.width !== width) { + canvas.width = width; + + } + if (canvas.height !== height) { + canvas.height = height; + } + + if (saveImg) { + this._drawCtx.putImageData(saveImg, 0, 0); + } + } + } + + blitImage(x, y, width, height, arr, offset) { + let data; + if (!ArrayBuffer.isView(arr)) { + data = new Uint8ClampedArray(arr, + arr.length + offset, + width * height * 4); + } else { + data = new Uint8ClampedArray(arr.buffer, + arr.byteOffset + offset, + width * height * 4); + } + // NB(directxman12): arr must be an Type Array view + let img = new ImageData(data, width, height); + this._targetCtx.putImageData(img, x, y); + } + + blitQoi(arr, x, y) { + this._targetCtx.putImageData(arr, x, y); + } + + clearRect(x, y, width, height) { + this._targetCtx.clearRect(x, y, width, height); + } + + copyImage(oldX, oldY, newX, newY, w, h) { + const targetCtx = this._targetCtx; + let sourceCvs = ((this._enableCanvasBuffer) ? this._backbuffer : this._target); + + // Due to this bug among others [1] we need to disable the image-smoothing to + // avoid getting a blur effect when copying data. + // + // 1. https://bugzilla.mozilla.org/show_bug.cgi?id=1194719 + // + // We need to set these every time since all properties are reset + // when the the size is changed + targetCtx.mozImageSmoothingEnabled = false; + targetCtx.webkitImageSmoothingEnabled = false; + targetCtx.msImageSmoothingEnabled = false; + targetCtx.imageSmoothingEnabled = false; + + targetCtx.drawImage(sourceCvs, + oldX, oldY, w, h, + newX, newY, w, h); + } + + drawImage(img, x, y, w, h) { + if (img.width !== w || img.height !== h) { + this._targetCtx.drawImage(img, x, y, w, h); + } else { + this._targetCtx.drawImage(img, x, y); + } + } + + drawVideoFrame(videoFrame, x, y, width, height) { + this._targetCtx.drawImage(videoFrame, x, y, width, height); + } + + fillRect(x, y, width, height, color) { + this._setFillColor(color); + this._targetCtx.fillRect(x, y, width, height); + } + + putImage(img, x, y) { + this._targetCtx.putImageData(img, x, y); + } + + _writeCtxBuffer() { + //TODO: KASM-5450 Damage tracking with transparent rect overlay support + if (this._backbuffer.width > 0) { + this._targetCtx.drawImage(this._backbuffer, 0, 0); + } + } + + _setFillColor(color) { + const newStyle = 'rgb(' + color[0] + ',' + color[1] + ',' + color[2] + ')'; + if (newStyle !== this._prevDrawStyle) { + this._targetCtx.fillStyle = newStyle; + this._prevDrawStyle = newStyle; + } + } + + dispose() { + if (this._targetCtx && this._target) { + this._targetCtx.clearRect(0, 0, this._target.width, this._target.height); + } + } +} \ No newline at end of file diff --git a/core/renderers/WebGLRenderer.js b/core/renderers/WebGLRenderer.js new file mode 100644 index 000000000..b1e3f376e --- /dev/null +++ b/core/renderers/WebGLRenderer.js @@ -0,0 +1,208 @@ +import {Canvas2DRenderer} from "./Canvas2DRenderer"; +import * as Log from "../util/logging"; + +export class WebGLRenderer { + static vertexShaderSource = ` + attribute vec2 xy; + + varying highp vec2 uv; + + void main(void) { + gl_Position = vec4(xy, 0.0, 1.0); + // Map vertex coordinates (-1 to +1) to UV coordinates (0 to 1). + // UV coordinates are Y-flipped relative to vertex coordinates. + uv = vec2((1.0 + xy.x) / 2.0, (1.0 - xy.y) / 2.0); + } + `; + + static fragmentShaderSource = ` + varying highp vec2 uv; + + uniform sampler2D texture; + + void main(void) { + gl_FragColor = texture2D(texture, uv); + } + `; + + constructor(canvas2D, gl) { + this._canvas2D = canvas2D; + this.gl = gl; + this._webglCanvas = webglCanvas; + + Log.Info("WebGL Renderer Initialized"); + Log.Info("WebGL Version: " + gl.getParameter(gl.VERSION)); + Log.Info("WebGL Color: " + gl.getParameter(gl.RED_BITS) + ", " + gl.getParameter(gl.GREEN_BITS) + ", " + gl.getParameter(gl.BLUE_BITS) + ", " + gl.getParameter(gl.ALPHA_BITS)) + Log.Info("WebGL Depth: " + gl.getParameter(gl.DEPTH_BITS) + ", Stencil: " + gl.getParameter(gl.STENCIL_BITS)); + Log.Info("WebGL GLSL Version: " + gl.getParameter(gl.SHADING_LANGUAGE_VERSION)); + Log.Info("WebGL Vendor: " + gl.getParameter(gl.VENDOR)); + Log.Info("WebGL Renderer: " + gl.getParameter(gl.RENDERER)); + Log.Info("WebGL Max Texture Size: " + gl.getParameter(gl.MAX_TEXTURE_SIZE)); + Log.Info("WebGL Max Vertex Attrib: " + gl.getParameter(gl.MAX_VERTEX_ATTRIBS)); + Log.Info("WebGL Extensions: " + gl.getSupportedExtensions()); + + const vertexShader = this.compileShader(gl, gl.VERTEX_SHADER, WebGLRenderer.vertexShaderSource); + const fragmentShader = this.compileShader(gl, gl.FRAGMENT_SHADER, WebGLRenderer.fragmentShaderSource); + + this.shaderProgram = gl.createProgram(); + gl.attachShader(this.shaderProgram, vertexShader); + gl.attachShader(this.shaderProgram, fragmentShader); + gl.linkProgram(this.shaderProgram); + if (!gl.getProgramParameter(this.shaderProgram, gl.LINK_STATUS)) { + throw gl.getProgramInfoLog(this.shaderProgram); + } + gl.useProgram(this.shaderProgram); + + // Vertex coordinates, clockwise from bottom-left. + const vertexBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([ + -1.0, -1.0, + -1.0, +1.0, + +1.0, +1.0, + +1.0, -1.0 + ]), gl.STATIC_DRAW); + + const xyLocation = gl.getAttribLocation(this.shaderProgram, "xy"); + gl.vertexAttribPointer(xyLocation, 2, gl.FLOAT, false, 0, 0); + gl.enableVertexAttribArray(xyLocation); + + // Create one texture to upload frames to. + const texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + + + } + + get width() { + return this._canvas2D.width; + } + + get height() { + return this._canvas2D.height; + } + + get enableCanvasBuffer() { + return this._canvas2D.enableCanvasBuffer; + } + + set enableCanvasBuffer(value) { + this._canvas2D.enableCanvasBuffer = value; + } + + get antiAliasing() { + return this._canvas2D.antiAliasing; + } + + set antiAliasing(value) { + this._canvas2D.antiAliasing = value; + } + + get transparentOverlayImg() { + return this._canvas2D.transparentOverlayImg; + } + + set transparentOverlayImg(value) { + this._canvas2D.transparentOverlayImg = value; + this._canvas2D.enableCanvasBuffer = true; + } + + get transparentOverlayRect() { + return this._canvas2D.transparentOverlayRect; + } + + set transparentOverlayRect(value) { + this._canvas2D.transparentOverlayRect = value; + } + + compileShader(gl, type, source) { + const shader = gl.createShader(type); + gl.shaderSource(shader, source); + gl.compileShader(shader); + + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + const shaderType = type === gl.VERTEX_SHADER ? "Vertex" : "Fragment"; + const errorLog = gl.getShaderInfoLog(shader); + gl.deleteShader(shader); + throw new Error(`${shaderType} shader compilation failed: ${errorLog}`); + } + + return shader; + } + + drawTransparentOverlayImg() { + this._canvas2D.drawTransparentOverlayImg(); + } + + viewportChangeSize(width, height) { + return this._canvas2D.viewportChangeSize(width, height); + } + + rescale(factor, width, height, serverWidth, serverHeight, viewPortWidth) { + this._canvas2D.rescale(factor, width, height, serverWidth, serverHeight, viewPortWidth); + } + + resize(width, height, screens) { + this._canvas2D.resize(width, height, screens); + } + + // + + blitImage(x, y, width, height, arr, offset) { + this._canvas2D.blitImage(x, y, width, height, arr, offset); + } + + blitQoi(arr, x, y) { + this._canvas2D.blitQoi(arr, x, y); + } + + clearRect(x, y, width, height) { + this._canvas2D.clearRect(x, y, width, height); + } + + copyImage(oldX, oldY, newX, newY, w, h) { + this._canvas2D.copyImage(oldX, oldY, newX, newY, w, h); + } + + drawImage(img, x, y, w, h) { + this._canvas2D.drawImage(img, x, y, w, h); + } + + drawVideoFrame(frame, x, y, w, h) { + const gl = this.gl; + + // Upload the frame to texture + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame); + frame.close(); + + // Set viewport to match the video region where we want to draw + gl.viewport(x, this._webglCanvas.height - y - h, w, h); + + // Clear with transparent background so Canvas2D shows through + gl.clearColor(0.0, 0.0, 0.0, 0.0); + gl.clear(gl.COLOR_BUFFER_BIT); + + // Draw the frame + gl.drawArrays(gl.TRIANGLE_FAN, 0, 4); + } + + fillRect(x, y, width, height, color) { + this._canvas2D.fillRect(x, y, width, height, color); + } + + _writeCtxBuffer() { + this._canvas2D._writeCtxBuffer(); + } + + dispose() { + // Remove WebGL canvas from DOM + if (this._webglCanvas && this._webglCanvas.parentNode) { + this._webglCanvas.parentNode.removeChild(this._webglCanvas); + } + this._canvas2D.dispose(); + } +} \ No newline at end of file diff --git a/core/rfb.js b/core/rfb.js index 58de0fe62..986a1f8fe 100644 --- a/core/rfb.js +++ b/core/rfb.js @@ -29,25 +29,26 @@ import DES from "./des.js"; import KeyTable from "./input/keysym.js"; import XtScancode from "./input/xtscancodes.js"; import { encodings } from "./encodings.js"; +import { messages } from "./messages.js"; import { MouseButtonMapper, xvncButtonToMask } from "./mousebuttonmapper.js"; import RawDecoder from "./decoders/raw.js"; import CopyRectDecoder from "./decoders/copyrect.js"; import RREDecoder from "./decoders/rre.js"; import HextileDecoder from "./decoders/hextile.js"; +import KasmVideoDecoder from "./decoders/kasmvideo.js"; import TightDecoder from "./decoders/tight.js"; import TightPNGDecoder from "./decoders/tightpng.js"; import UDPDecoder from './decoders/udp.js'; import { toSignedRelative16bit } from './util/int.js'; +import {FPS, UI_SETTING_PROFILE_OPTIONS} from '../app/constants.js'; // How many seconds to wait for a disconnect to finish const DISCONNECT_TIMEOUT = 3; const DEFAULT_BACKGROUND = 'rgb(40, 40, 40)'; -const CLIENT_MSG_TYPE_KEEPALIVE = 184; -const SERVER_MSG_TYPE_DISCONNECT_NOTIFY = 185; // Minimum wait (ms) between two mouse moves -const MOUSE_MOVE_DELAY = 17; +const MOUSE_MOVE_DELAY = 17; // Wheel thresholds let WHEEL_LINE_HEIGHT = 19; // Pixels for one line step (on Windows) @@ -75,7 +76,7 @@ const extendedClipboardActionNotify = 1 << 27; const extendedClipboardActionProvide = 1 << 28; export default class RFB extends EventTargetMixin { - constructor(target, touchInput, urlOrChannel, options, isPrimaryDisplay) { + constructor(target, touchInput, urlOrChannel, options, videoCodecs, isPrimaryDisplay) { if (!target) { throw new Error("Must specify target"); } @@ -101,6 +102,7 @@ export default class RFB extends EventTargetMixin { this._repeaterID = options.repeaterID || ''; this._wsProtocols = options.wsProtocols || ['binary']; this._isPrimaryDisplay = (isPrimaryDisplay !== false); + this.videoCodecs = videoCodecs; // Internal state this._rfbConnectionState = ''; @@ -141,7 +143,7 @@ export default class RFB extends EventTargetMixin { this._videoTime = 5; this._videoOutTime = 3; this._videoScaling = 2; - this._frameRate = 30; + this._frameRate = FPS.MIN; this._maxVideoResolutionX = 960; this._maxVideoResolutionY = 540; this._forcedResolutionX = null; @@ -229,7 +231,7 @@ export default class RFB extends EventTargetMixin { this._controlChannel = new BroadcastChannel(this._connectionID); this._controlChannel.addEventListener('message', this._handleControlMessage.bind(this)); Log.Debug("Attached to registrationChannel for secondary displays.") - + } if (!this._isPrimaryDisplay) { this._screenIndex = 2; @@ -268,6 +270,7 @@ export default class RFB extends EventTargetMixin { this._canvas.height = 0; this._canvas.tabIndex = -1; this._canvas.overflow = 'hidden'; + // this._canvas.style.zIndex = "2"; this._screen.appendChild(this._canvas); // Cursor @@ -299,6 +302,7 @@ export default class RFB extends EventTargetMixin { this._decoders[encodings.encodingCopyRect] = new CopyRectDecoder(); this._decoders[encodings.encodingRRE] = new RREDecoder(); this._decoders[encodings.encodingHextile] = new HextileDecoder(); + this._decoders[encodings.encodingKasmVideo] = new KasmVideoDecoder(this._display); this._decoders[encodings.encodingTight] = new TightDecoder(this._display); this._decoders[encodings.encodingTightPNG] = new TightPNGDecoder(); this._decoders[encodings.encodingUDP] = new UDPDecoder(); @@ -310,7 +314,7 @@ export default class RFB extends EventTargetMixin { if (this._isPrimaryDisplay) { this._setupWebSocket(); - } + } Log.Debug("<< RFB.constructor"); @@ -334,10 +338,16 @@ export default class RFB extends EventTargetMixin { this._qualityLevel = 6; this._compressionLevel = 2; this._clipHash = 0; + + this._hwEncoderProfile = UI_SETTING_PROFILE_OPTIONS.BASELINE; + this._gop = this._frameRate; + this._videoStreamQuality = 23; + this._qualityPreset = 3; + this._streamMode = encodings.pseudoEncodingStreamingModeJpegWebp; } // ===== PROPERTIES ===== - + get connectionID() { return this._connectionID; } get translateShortcuts() { return this._keyboard.translateShortcuts; } @@ -367,9 +377,9 @@ export default class RFB extends EventTargetMixin { } get pointerRelative() { return this._pointerRelativeEnabled; } - set pointerRelative(value) - { - this._pointerRelativeEnabled = value; + set pointerRelative(value) + { + this._pointerRelativeEnabled = value; if (value) { let max_w = ((this._display.scale === 1) ? this._fbWidth : (this._fbWidth * this._display.scale)); let max_h = ((this._display.scale === 1) ? this._fbHeight : (this._fbHeight * this._display.scale)); @@ -388,8 +398,8 @@ export default class RFB extends EventTargetMixin { set clipboardBinary(val) { this._clipboardMode = val; } get videoQuality() { return this._videoQuality; } - set videoQuality(quality) - { + set videoQuality(quality) + { //if changing to or from a video quality mode that uses a fixed resolution server side if (this._videoQuality <= 1 || quality <= 1) { this._pendingApplyResolutionChange = true; @@ -399,8 +409,8 @@ export default class RFB extends EventTargetMixin { } get preferBandwidth() { return this._preferBandwidth; } - set preferBandwidth(val) { - this._preferBandwidth = val; + set preferBandwidth(val) { + this._preferBandwidth = val; this._pendingApplyEncodingChanges = true; } @@ -409,8 +419,7 @@ export default class RFB extends EventTargetMixin { Log.Debug("Setting viewOnly to " + viewOnly); this._viewOnly = viewOnly; - if (this._rfbConnectionState === "connecting" || - this._rfbConnectionState === "connected") { + if (this.isConnecting || this.isConnected) { if (viewOnly) { this._keyboard.ungrab(); } else { @@ -456,11 +465,11 @@ export default class RFB extends EventTargetMixin { set background(cssValue) { this._screen.style.background = cssValue; } get enableWebP() { return this._enableWebP; } - set enableWebP(enabled) { + set enableWebP(enabled) { if (this._enableWebP === enabled) { return; } - this._enableWebP = enabled; + this._enableWebP = enabled; this._pendingApplyEncodingChanges = true; } @@ -472,11 +481,11 @@ export default class RFB extends EventTargetMixin { this._decoders[encodings.encodingTight].enableQOI = enabled; this._enableQOI = this._decoders[encodings.encodingTight].enableQOI - + if (this._enableQOI === enabled) { this._pendingApplyEncodingChanges = true; } - + } get antiAliasing() { return this._display.antiAliasing; } @@ -719,11 +728,19 @@ export default class RFB extends EventTargetMixin { this._compressionLevel = compressionLevel; - if (this._rfbConnectionState === 'connected') { + if (this.isConnected) { this._sendEncodings(); } } + get isConnected() { + return this._rfbConnectionState === 'connected'; + } + + get isConnecting() { + return this._rfbConnectionState === 'connecting'; + } + get statsFps() { return this._display.fps; } get statsDroppedFps() { return this._display.droppedFps; } @@ -731,11 +748,11 @@ export default class RFB extends EventTargetMixin { set enableWebRTC(value) { this._useUdp = value; if (!value) { - if (this._rfbConnectionState === 'connected' && (this._transitConnectionState !== this.TransitConnectionStates.Tcp)) { + if (this.isConnected && (this._transitConnectionState !== this.TransitConnectionStates.Tcp)) { this._sendUdpDowngrade(); - } + } } else { - if (this._rfbConnectionState === 'connected' && (this._transitConnectionState !== this.TransitConnectionStates.Udp)) { + if (this.isConnected && (this._transitConnectionState !== this.TransitConnectionStates.Udp)) { this._sendUdpUpgrade(); } } @@ -758,6 +775,49 @@ export default class RFB extends EventTargetMixin { } } + get hwEncoderProfile() { return this._hwEncoderProfile; } + set hwEncoderProfile(value) { + if (value !== this._hwEncoderProfile) { + this._hwEncoderProfile = value; + this._pendingApplyEncodingChanges = true + } + } + + get gop() { return this._gop; } + set gop(value) { + if (value !== this._gop) { + this._gop = value; + this._pendingApplyEncodingChanges = true + } + } + + get videoStreamQuality() { + return this._videoStreamQuality; + } + + set videoStreamQuality(value) { + if (value !== this._videoStreamQuality) { + this._videoStreamQuality = value; + this._pendingApplyEncodingChanges = true; + } + } + + get qualityPreset() { return this._qualityPreset; } + set qualityPreset(value) { + if (value !== this._qualityPreset) { + this._qualityPreset = value; + this._pendingApplyEncodingChanges = true + } + } + + get streamMode() { return this._streamMode; } + set streamMode(value) { + if (value !== this._streamMode) { + this._streamMode = value; + this._pendingApplyEncodingChanges = true + } + } + // ===== PUBLIC METHODS ===== refreshSecondaryDisplays() { @@ -794,7 +854,7 @@ export default class RFB extends EventTargetMixin { minX = Math.min(minX, screenPlan.screens[i].x); minY = Math.min(minY, screenPlan.screens[i].y); for (let z = 0; z < fullPlan.screens.length; z++) { - if (screenPlan.screens[i].screenID == fullPlan.screens[z].screenID) { + if (screenPlan.screens[i].screenID === fullPlan.screens[z].screenID) { numScreensFound++; } } @@ -815,7 +875,7 @@ export default class RFB extends EventTargetMixin { //send updates to secondary screens for (let i = 0; i < screenPlan.screens.length; i++) { for (let z = 1; z < fullPlan.screens.length; z++) { - if (screenPlan.screens[i].screenID == fullPlan.screens[z].screenID) { + if (screenPlan.screens[i].screenID === fullPlan.screens[z].screenID) { this._proxyRFBMessage('applyScreenPlan', [ fullPlan.screens[z].screenID, fullPlan.screens[z].screenIndex, screenPlan.screens[i].width, screenPlan.screens[i].height, screenPlan.screens[i].x, screenPlan.screens[i].y ]); } } @@ -825,7 +885,7 @@ export default class RFB extends EventTargetMixin { } else { Log.Debug("Screen plan did not apply, no changes detected."); } - + return changes; } } @@ -858,8 +918,8 @@ export default class RFB extends EventTargetMixin { This function must be called after changing any properties that effect rendering quality */ updateConnectionSettings() { - if (this._rfbConnectionState === 'connected' && this._isPrimaryDisplay) { - + if (this.isConnected && this._isPrimaryDisplay) { + if (this._pendingApplyVideoRes) { RFB.messages.setMaxVideoResolution(this._sock, this._maxVideoResolutionX, this._maxVideoResolutionY); } @@ -878,7 +938,7 @@ export default class RFB extends EventTargetMixin { if (this._display.screens.length > 1) { this.refreshSecondaryDisplays(); - } + } if (this._resizeSession || (this._forcedResolutionX && this._forcedResolutionY)) { this.dispatchEvent(new CustomEvent("screenregistered", {})); @@ -909,7 +969,7 @@ export default class RFB extends EventTargetMixin { this._requestRemoteResize(); } } - + } disconnect() { @@ -970,7 +1030,7 @@ export default class RFB extends EventTargetMixin { if (code !== null) { this._setLastActive(); } - + if (down === undefined) { this.sendKey(keysym, code, true); this.sendKey(keysym, code, false); @@ -1028,7 +1088,7 @@ export default class RFB extends EventTargetMixin { this.clipboardPasteDataFrom(data); }, (err) => { Log.Debug("No data in clipboard: " + err); - }); + }); } else { if (navigator.clipboard && navigator.clipboard.readText) { navigator.clipboard.readText().then(function (text) { @@ -1065,7 +1125,7 @@ export default class RFB extends EventTargetMixin { } else { this._proxyRFBMessage('sendBinaryClipboard', [ dataset, mimes ]); } - + } async clipboardPasteDataFrom(clipdata) { @@ -1104,7 +1164,7 @@ export default class RFB extends EventTargetMixin { continue; } - mimes.push(mime); + mimes.push(mime); dataset.push(data); Log.Debug('Sending mime type: ' + mime); break; @@ -1135,7 +1195,7 @@ export default class RFB extends EventTargetMixin { this._proxyRFBMessage('sendBinaryClipboard', [ dataset, mimes ]); } } - + } requestBottleneckStats() { @@ -1175,7 +1235,7 @@ export default class RFB extends EventTargetMixin { this._handleMessage(); }); this._sock.on('open', () => { - if ((this._rfbConnectionState === 'connecting') && + if ((this.isConnecting) && (this._rfbInitState === '')) { this._rfbInitState = 'ProtocolVersion'; Log.Debug("Starting VNC handshake"); @@ -1403,7 +1463,7 @@ export default class RFB extends EventTargetMixin { me._handleUdpRect(u8.slice(20), frame_id); } else { // Use buffer const now = Date.now(); - + if (udpBuffer.has(id)) { let item = udpBuffer.get(id); item.recieved_pieces += 1; @@ -1482,7 +1542,7 @@ export default class RFB extends EventTargetMixin { this._primaryDisplayChannel = null; } } - + try { this._target.removeChild(this._screen); } catch (e) { @@ -1523,7 +1583,7 @@ export default class RFB extends EventTargetMixin { } else { Log.Debug("Window focused while user switched between tabs."); } - + } if (document.visibilityState === "visible" && this._lastVisibilityState === "hidden") { @@ -1666,14 +1726,18 @@ export default class RFB extends EventTargetMixin { top: window.screenTop } } - + this._registerSecondaryDisplay(this._display.screens[0], details); } } + _requestFullRefresh() { + RFB.messages.fbUpdateRequest(this._sock, false, 0, 0, this._fbWidth, this._fbHeight); + } + // Gets the the size of the available screen _screenSize (limited) { - return this._display.getScreenSize(this.videoQuality, this.forcedResolutionX, this.forcedResolutionY, this._hiDpi, limited, !this._resizeSession); + return this._display.getScreenSize(this.videoQuality, this.forcedResolutionX, this.forcedResolutionY, this._hiDpi, limited, !this._resizeSession, this._streamMode); } _fixScrollbars() { @@ -1776,7 +1840,7 @@ export default class RFB extends EventTargetMixin { this._sock.off('close'); } } - + switch (state) { case 'connecting': this._connect(); @@ -1849,7 +1913,7 @@ export default class RFB extends EventTargetMixin { } _proxyRFBMessage(messageType, data) { - let message = { + let message = { eventType: messageType, args: data, screenID: this._display.screenID, @@ -1976,22 +2040,22 @@ export default class RFB extends EventTargetMixin { window.close(); break; case 'applySettings': - if (!this._isPrimaryDisplay) { - this.enableHiDpi = event.data.args[0]; - this.clipViewport = event.data.args[1]; - this.scaleViewport = event.data.args[2]; - this.resizeSession = event.data.args[3]; - this.videoQuality = event.data.args[4]; - //TODO: add support for forced static resolution for multiple monitors - //this._forcedResolutionX = event.data.args[5]; - //this._forcedResolutionY = event.data.args[6]; - - //TODO, do we need to do this twice - this.scaleViewport = event.data.args[3]; - this.updateConnectionSettings(); - } - - break; + if (!this._isPrimaryDisplay) { + this.enableHiDpi = event.data.args[0]; + this.clipViewport = event.data.args[1]; + this.scaleViewport = event.data.args[2]; + this.resizeSession = event.data.args[3]; + this.videoQuality = event.data.args[4]; + //TODO: add support for forced static resolution for multiple monitors + //this._forcedResolutionX = event.data.args[5]; + //this._forcedResolutionY = event.data.args[6]; + + //TODO, do we need to do this twice + this.scaleViewport = event.data.args[3]; + this.updateConnectionSettings(); + } + + break; case 'applyScreenPlan': if (event.data.args[0] == this._display.screenID) { this._display.screens[0].screenIndex = event.data.args[1]; @@ -1999,7 +2063,7 @@ export default class RFB extends EventTargetMixin { this._display.screens[0].height = event.data.args[3]; this._display.screens[0].x = event.data.args[4]; this._display.screens[0].y = event.data.args[5]; - + this.updateConnectionSettings(); } break; @@ -2010,18 +2074,16 @@ export default class RFB extends EventTargetMixin { break; } } - } _unregisterSecondaryDisplay() { - if (!this._isPrimaryDisplay){ + if (!this._isPrimaryDisplay) { let message = { eventType: 'unregister', screenID: this._display.screenID } this._controlChannel.postMessage(message); } - } _registerSecondaryDisplay(currentScreen = false, details = null) { @@ -2031,7 +2093,7 @@ export default class RFB extends EventTargetMixin { let size = this._screenSize(); this._display.resize(size.screens[0].serverWidth, size.screens[0].serverHeight); this._display.autoscale(size.screens[0].serverWidth, size.screens[0].serverHeight, size.screens[0].scale); - + let screen = size.screens[0]; const windowId = new URLSearchParams(document.location.search).get('windowId'); @@ -2058,7 +2120,6 @@ export default class RFB extends EventTargetMixin { // return screen.screenID return screen } - } identify(screens) { @@ -2120,9 +2181,8 @@ export default class RFB extends EventTargetMixin { } else { this._requestRemoteResize(); } - } - } + } } _handleMouse(ev) { @@ -2209,7 +2269,7 @@ export default class RFB extends EventTargetMixin { } else { this._proxyRFBMessage('mousedown', [ pos.x, pos.y, xvncButtonToMask(mappedButton) ]); } - + Log.Debug('Mouse Down'); break; case 'mouseup': @@ -2219,7 +2279,7 @@ export default class RFB extends EventTargetMixin { } else { this._proxyRFBMessage('mouseup', [ pos.x, pos.y, xvncButtonToMask(mappedButton) ]); } - + Log.Debug('Mouse Up'); break; case 'mousemove': @@ -2373,14 +2433,14 @@ export default class RFB extends EventTargetMixin { var rel_16_y = toSignedRelative16bit(y - this._pointerLockPos.y); RFB.messages.pointerEvent(this._sock, rel_16_x, rel_16_y, mask); - + // reset the cursor position to center this._mousePos = { x: this._pointerLockPos.x , y: this._pointerLockPos.y }; this._cursor.move(this._pointerLockPos.x, this._pointerLockPos.y); } else { RFB.messages.pointerEvent(this._sock, this._display.absX(x), this._display.absY(y), mask); } - + } _sendScroll(x, y, dX, dY) { @@ -3151,6 +3211,7 @@ export default class RFB extends EventTargetMixin { } RFB.messages.pixelFormat(this._sock, this._fbDepth, true); + RFB.messages.videoEncodersRequest(this._sock, this.videoCodecs); this._sendEncodings(); RFB.messages.fbUpdateRequest(this._sock, false, 0, 0, this._fbWidth, this._fbHeight); @@ -3198,7 +3259,8 @@ export default class RFB extends EventTargetMixin { Log.Debug("Multiple displays detected, disabling copyrect encoding."); } // Only supported with full depth support - if (this._fbDepth == 24) { + if (this._fbDepth === 24) { + encs.push(encodings.encodingKasmVideoAVC); encs.push(encodings.encodingTight); encs.push(encodings.encodingTightPNG); encs.push(encodings.encodingHextile); @@ -3223,7 +3285,6 @@ export default class RFB extends EventTargetMixin { encs.push(encodings.pseudoEncodingWEBP); if (this._enableQOI) encs.push(encodings.pseudoEncodingQOI); - // kasm settings; the server may be configured to ignore these encs.push(encodings.pseudoEncodingJpegVideoQualityLevel0 + this.jpegVideoQuality); @@ -3237,12 +3298,18 @@ export default class RFB extends EventTargetMixin { encs.push(encodings.pseudoEncodingVideoScalingLevel0 + this.videoScaling); encs.push(encodings.pseudoEncodingFrameRateLevel10 + this.frameRate - 10); encs.push(encodings.pseudoEncodingMaxVideoResolution); - - // preferBandwidth choses preset settings. Since we expose all the settings, lets not pass this + + // Order is important: first options, then streaming mode + // encs.push(encodings.pseudoEncodingHardwareProfile0 + this.hwEncoderProfile); + encs.push(encodings.pseudoEncodingGOP1 + this.gop); + encs.push(encodings.pseudoEncodingStreamingVideoQualityLevel0 + this.videoStreamQuality); + encs.push(this.streamMode); + + // preferBandwidth choses preset settings. Since we expose all the settings, let's not pass this if (this.preferBandwidth) // must be last - server processes in reverse order encs.push(encodings.pseudoEncodingPreferBandwidth); - if (this._fbDepth == 24) { + if (this._fbDepth === 24) { encs.push(encodings.pseudoEncodingVMwareCursor); encs.push(encodings.pseudoEncodingCursor); } @@ -3480,7 +3547,7 @@ export default class RFB extends EventTargetMixin { Log.Info(num + ' Clipboard items recieved.'); Log.Debug('Started clipbooard processing with Client sockjs buffer size ' + this._sock.rQlen); - + for (let i = 0; i < num; i++) { if (this._sock.rQwait("Binary Clipboard op id", 4, buffByteLen)) { return false; } @@ -3502,7 +3569,7 @@ export default class RFB extends EventTargetMixin { if (this._sock.rQwait("Binary Clipboard data", Math.abs(len), buffByteLen)) { return false; } let data = this._sock.rQshiftBytes(len); buffByteLen+=len; - + switch(mime) { case "image/png": case "text/html": @@ -3524,9 +3591,9 @@ export default class RFB extends EventTargetMixin { } Log.Info("Processed binary clipboard (ID: " + clipid + ") of MIME " + mime + " of length " + len); - + if (!this.clipboardBinary) { continue; } - + clipItemData[mime] = new Blob([data], { type: mime }); break; default: @@ -3559,7 +3626,7 @@ export default class RFB extends EventTargetMixin { this._clipHash = hashUInt8Array(textdata); } }, - (err) => { + (err) => { Log.Error("Error writing to client clipboard: " + err); // Lets try writeText if (textdata.length > 0) { @@ -3700,7 +3767,7 @@ export default class RFB extends EventTargetMixin { RFB.messages.sendFrameStats(this._sock, this._display.fps, this._display.renderMs); this._trackFrameStats = false; } - + return ret; case 1: // SetColorMapEntries @@ -3748,20 +3815,23 @@ export default class RFB extends EventTargetMixin { case 183: // KASM unix relay data return this._handleUnixRelay(); - case SERVER_MSG_TYPE_DISCONNECT_NOTIFY: // KASM disconnect notice + case messages.msgTypeServerDisconnect: // KASM disconnect notice return this._handleDisconnectNotify(); + case messages.msgTypeVideoEncoders: + return this._handleServerVideoEncoders(); + case 248: // ServerFence return this._handleServerFenceMsg(); case 250: // XVP return this._handleXvpMsg(); - case 253: // KASM user joined a shared sessionAdd commentMore actions + case messages.msgTypeUserAddedToSession: // KASM user joined a shared sessionAdd commentMore actions return this._handleUserJoin(); - case 254: // KASM user left a shared session - return this._handleUserLeft(); + case messages.msgTypeUserRemovedFromSession: // KASM user left a shared session + return this._handleUserLeft(); default: this._fail("Unexpected server message (type " + msgType + ")"); @@ -3787,7 +3857,7 @@ export default class RFB extends EventTargetMixin { encoding: parseInt((data[8] << 24) + (data[9] << 16) + (data[10] << 8) + data[11], 10) }; - + switch (frame.encoding) { case encodings.pseudoEncodingLastRect: this._display.flip(frame_id, frame.x + 1); //Last Rect message, first 16 bytes contain rect count @@ -3915,6 +3985,58 @@ export default class RFB extends EventTargetMixin { processRelay && processRelay(payload); } + _handleServerVideoEncoders() { + if (this._sock.rQwait("VideoEncoders header", 1, 1)) + return false; + + let num = this._sock.rQshift8(); + + // Each encoder has variable length data: + // codec(4) + minQuality(4) + maxQuality(4) + numPresets(1) + presets(4*n) + // Minimum is 13 bytes per encoder + if (this._sock.rQwait("VideoEncoders data", num * 13, 1)) + return false; + + let serverSupportedEncoders = []; + let codecConfigurations = {}; + + for (let i = 0; i < num; i++) { + const codec = this._sock.rQshift32(); + + const minQuality = this._sock.rQshift32(); + const maxQuality = this._sock.rQshift32(); + + const numPresets = this._sock.rQshift8(); + if (numPresets > 0) { + if (this._sock.rQwait("VideoEncoders presets", numPresets * 4)) { + return false; + } + } + + const presets = []; + for (let j = 0; j < numPresets; j++) { + presets.push(this._sock.rQshift32()); + } + + serverSupportedEncoders.push(codec); + codecConfigurations[codec] = { + minQuality, + maxQuality, + presets + }; + } + + this.videoCodecs = serverSupportedEncoders; + this.videoCodecConfigurations = codecConfigurations; + + this.dispatchEvent(new CustomEvent("videocodecschange", { + detail: { + codecs: serverSupportedEncoders, + configurations: codecConfigurations + } + })); + } + _handleDisconnectNotify() { if (this._sock.rQwait("DisconnectNotify header", 8, 1)) { return false; } const flags = this._sock.rQshift8(); @@ -3972,8 +4094,7 @@ export default class RFB extends EventTargetMixin { this._FBU.encoding = parseInt((hdr[8] << 24) + (hdr[9] << 16) + (hdr[10] << 8) + hdr[11], 10); } - - + if (!this._handleRect()) { return false; } @@ -3985,7 +4106,7 @@ export default class RFB extends EventTargetMixin { if (this._FBU.rect_total > 1) { this._display.flip(this._FBU.frame_id, this._FBU.rect_total); } - + return true; // We finished this FBU } @@ -4023,7 +4144,7 @@ export default class RFB extends EventTargetMixin { if (this._handleDataRect()) { this._FBU.rect_total++; //only track rendered rects and last rect return true; - } + } return false; } } @@ -4513,7 +4634,7 @@ RFB.messages = { const buff = sock._sQ; const offset = sock._sQlen; - buff[offset] = CLIENT_MSG_TYPE_KEEPALIVE; + buff[offset] = messages.msgTypeKeepAlive; sock._sQlen += 1; sock.flush(); @@ -4661,7 +4782,7 @@ RFB.messages = { sendBinaryClipboard(sock, dataset, mimes) { - + const buff = sock._sQ; let offset = sock._sQlen; @@ -4991,6 +5112,31 @@ RFB.messages = { sock.flush(); }, + videoEncodersRequest(sock, codecs) { + if (codecs == null) + codecs = []; + + const buff = sock._sQ; + const offset = sock._sQlen; + + buff[offset] = messages.msgTypeVideoEncoders; // msg-type + buff[offset + 1] = codecs.length; + + let j = offset + 2; + for (let i = 0; i < codecs.length; i++) { + const enc = codecs[i]; + buff[j] = enc >> 24; + buff[j + 1] = enc >> 16; + buff[j + 2] = enc >> 8; + buff[j + 3] = enc; + + j += 4; + } + + sock._sQlen += j - offset; + sock.flush(); + }, + fbUpdateRequest(sock, incremental, x, y, w, h) { const buff = sock._sQ; const offset = sock._sQlen; diff --git a/index.html b/index.html index 24956467a..58a2f6628 100644 --- a/index.html +++ b/index.html @@ -77,6 +77,11 @@
Loading statistics...
+
+ + + +
@@ -89,9 +94,8 @@

- + KasmVNC Learn More

@@ -141,7 +145,7 @@

- +
@@ -211,13 +215,13 @@

Settings
  • -
  • -
  • +
  • -

  • -
  • - +
    0
    diff --git a/vite.config.js b/vite.config.js index 21e399739..0d3b57f3b 100644 --- a/vite.config.js +++ b/vite.config.js @@ -17,7 +17,8 @@ export default defineConfig({ ViteRestart({restart: ['core/**', 'app/**','kasmvnc-version.txt']}), ], build: { - sourcemap: false, + sourcemap: true, + minify: false, rollupOptions: { input: { main: './index.html',