Specialized components
Physics and spatial systems
XR (extended reality) integration
16 min
overview the xr integration system enables the ir engine to support immersive virtual reality (vr) and augmented reality (ar) experiences it provides a bridge between the engine and the browser's webxr api, allowing applications to initiate xr sessions, track user movement, handle xr input devices, and render content to xr displays this system is essential for creating applications where users can interact with virtual environments through head mounted displays and motion controllers core components the xr integration system consists of several key components that work together to enable immersive experiences xrstate the xrstate serves as the central repository for xr related information it tracks the current status of xr sessions and provides access to essential xr objects // simplified from src/xr/xrstate ts import { definestate } from '@ir engine/hyperflux'; export const xrstate = definestate({ name 'xrstate', initial { // whether an xr session is currently active sessionactive false, // whether a session request is in progress requestingsession false, // the type of active session ('inline', 'immersive ar', 'immersive vr', or 'none') sessionmode 'none' as 'inline' | 'immersive ar' | 'immersive vr' | 'none', // the browser's xrsession object session null as xrsession | null, // the current xrframe containing tracking data xrframe null as xrframe | null, // which session modes are supported by the current device supportedsessionmodes { 'inline' true, 'immersive vr' false, 'immersive ar' false } } }); xrsessionfunctions the xrsessionfunctions module provides methods for initiating and managing xr sessions // simplified from src/xr/xrsessionfunctions ts import { getmutablestate, getstate } from '@ir engine/hyperflux'; import { xrstate } from ' /xrstate'; import { referencespacestate } from ' /referencespacestate'; // request an xr session of the specified mode export async function requestxrsession(action = { mode 'immersive vr' }) { const xrstate = getmutablestate(xrstate); xrstate requestingsession set(true); try { await setupxrsession(action mode); } catch (error) { console error('failed to start xr session ', error); } finally { xrstate requestingsession set(false); } } // end the current xr session export async function endxrsession() { const xrstate = getstate(xrstate); if (xrstate sessionactive && xrstate session) { await xrstate session end(); } } // internal function to set up an xr session async function setupxrsession(requestedmode) { // implementation details } webxrmanager the webxrmanager serves as a bridge between the engine's rendering system and the browser's webxr api // simplified concept from src/xr/webxrmanager ts export class webxrmanager { constructor(renderer, gl) { this renderer = renderer; this gl = gl; this ispresenting = false; this xrframe = null; this session = null; this referencespace = null; } // set up the renderer for xr presentation async setsession(session, framebufferscalefactor = 1) { this session = session; // make the webgl context compatible with xr await this gl makexrcompatible(); // create appropriate render targets for xr const baselayer = new xrwebgllayer(session, this gl, { framebufferscalefactor }); session updaterenderstate({ baselayer, depthnear 0 1, depthfar 1000 }); // configure the renderer for xr this renderer setrendertarget(this createrendertarget(baselayer)); this ispresenting = true; } // additional methods for handling xr rendering } xr specific components the system includes several specialized components for xr functionality xrspacecomponent the xrspacecomponent links an entity to an xr tracked object (like a controller or headset) // simplified from src/xr/xrcomponents ts export const xrspacecomponent = definecomponent({ name 'xrspacecomponent', schema s object({ // the xrspace from the webxr api space s type\<xrspace>(), // the reference space to use for tracking referencespace s string({ default 'local floor' }), // whether this space is currently being tracked tracked s boolean({ default false }) }), // reactor updates the entity's transform based on xr tracking data reactor () => { const entity = useentitycontext(); const component = usecomponent(entity, xrspacecomponent); const xrstate = usehookstate(getstate(xrstate)); useeffect(() => { // when xrframe updates, get the pose for this space if (xrstate xrframe value && component space value) { const pose = xrstate xrframe value getpose( component space value, getreferencespace(component referencespace value) ); if (pose) { // update the entity's transform with the pose data updateentitytransformfrompose(entity, pose); component tracked set(true); } else { component tracked set(false); } } }, \[xrstate xrframe]); return null; } }); xrhandcomponent the xrhandcomponent provides access to hand tracking data when available // simplified from src/xr/xrcomponents ts export const xrlefthandcomponent = definecomponent({ name 'xrlefthandcomponent', schema s object({ // store rotations for each joint in the hand rotations s class(() => new float32array(4 19)), // store positions for each joint positions s class(() => new float32array(3 19)), // whether the hand is currently being tracked tracked s boolean({ default false }) }), // reactor updates hand data from xr input sources reactor () => { // implementation details return null; } }); // similar definition for xrrighthandcomponent xranchorcomponent the xranchorcomponent enables ar applications to attach virtual objects to real world locations // simplified from src/xr/xranchorcomponents ts export const xranchorcomponent = definecomponent({ name 'xranchorcomponent', schema s object({ // the xranchor from the webxr api anchor s type\<xranchor>(), // whether the anchor is currently being tracked tracked s boolean({ default false }) }), // reactor updates the entity's transform based on anchor tracking reactor () => { // implementation details return null; } }); xr session workflow the following sequence illustrates the process of starting and using an xr session 1\ session initialization // check if vr is supported navigator xr? issessionsupported('immersive vr') then((supported) => { if (supported) { // enable vr button or ui element vrbutton disabled = false; } }); // when user clicks "enter vr" button async function onentervrclick() { try { await requestxrsession({ mode 'immersive vr' }); console log("vr session started successfully"); } catch (error) { console error("failed to start vr session ", error); } } 2\ session setup when requestxrsession is called, the following steps occur xrstate requestingsession is set to true setupxrsession is called with the requested mode the browser's webxr api is used to request a session navigator xr requestsession(mode, options) the user may see a permission prompt from the browser if approved, the webxrmanager is configured with the new session xr reference spaces are requested and linked to the reference space entities xrstate is updated with the active session information sequencediagram participant app as application code participant xrfunc as xrsessionfunctions participant browser as browser webxr api participant xrmgr as webxrmanager participant xrst as xrstate app >>xrfunc requestxrsession({mode 'immersive vr'}) xrfunc >>xrst requestingsession = true xrfunc >>browser navigator xr requestsession('immersive vr', options) note over browser user permission prompt browser >>xrfunc xrsession object xrfunc >>xrmgr setsession(xrsession) xrmgr >>xrmgr configure renderer for xr xrfunc >>xrst sessionactive = true\<br>sessionmode = 'immersive vr'\<br>session = xrsession xrfunc >>xrfunc request reference spaces xrfunc >>xrst requestingsession = false 3\ per frame updates during an active xr session, the engine performs these operations each frame the current xrframe is obtained from the session's requestanimationframe callback the xrframe is stored in xrstate xrframe the viewer entity's transform is updated based on head tracking data xr controller entities are updated based on input source tracking hand tracking data is processed if available the scene is rendered from the xr viewpoint // simplified concept of xr frame handling function onxrframe(time, xrframe) { // store the current frame getmutablestate(xrstate) xrframe set(xrframe); // get viewer pose const viewerpose = xrframe getviewerpose(referencespace); if (viewerpose) { // update camera transform updateviewerentityfrompose(viewerpose); // process input sources for (const inputsource of xrframe session inputsources) { if (inputsource gripspace) { // update controller entity const pose = xrframe getpose(inputsource gripspace, referencespace); if (pose) { updatecontrollerentityfrompose(inputsource handedness, pose); } } // process hand tracking if available if (inputsource hand) { updatehandtracking(inputsource handedness, inputsource hand, xrframe); } } // render the scene for xr renderxrframe(viewerpose); } // request the next frame xrframe session requestanimationframe(onxrframe); } xr input handling the xr system integrates with the input system to provide controller and hand tracking controller tracking xr controllers are represented as entities with xrspacecomponent and inputsourcecomponent // create an entity for an xr controller function createcontrollerentity(inputsource) { const controllerentity = createentity(); // set up transform setcomponent(controllerentity, transformcomponent); // link to xr tracking setcomponent(controllerentity, xrspacecomponent, { space inputsource gripspace, referencespace 'local floor' }); // set up input handling setcomponent(controllerentity, inputsourcecomponent, { source inputsource }); return controllerentity; } hand tracking when hand tracking is available, hand joint data is processed and stored // update hand tracking data function updatehandtracking(handedness, hand, xrframe) { const handentity = handedness === 'left' ? lefthandentity righthandentity; const handcomponent = handedness === 'left' ? getcomponent(handentity, xrlefthandcomponent) getcomponent(handentity, xrrighthandcomponent); let tracked = false; // process each joint for (let i = 0; i < hand size; i++) { const joint = hand get(xrhand wrist + i); if (joint) { const jointpose = xrframe getjointpose(joint, referencespace); if (jointpose) { // store joint rotation handcomponent rotations\[i 4] = jointpose transform orientation x; handcomponent rotations\[i 4+1] = jointpose transform orientation y; handcomponent rotations\[i 4+2] = jointpose transform orientation z; handcomponent rotations\[i 4+3] = jointpose transform orientation w; // store joint position handcomponent positions\[i 3] = jointpose transform position x; handcomponent positions\[i 3+1] = jointpose transform position y; handcomponent positions\[i 3+2] = jointpose transform position z; tracked = true; } } } handcomponent tracked = tracked; } next steps with an understanding of how the engine integrates with xr technologies, the next chapter explores the techniques used to adapt the three js library to work within the ecs architecture next three js monkey patching & proxies docid 1j iapvs83 0oxvbrgelv