Core components
Core engine
Avatar system
25 min
overview the avatar system is responsible for creating and managing interactive characters within the ir engine it provides a comprehensive framework for defining character appearance, skeletal structure, animations, movement, and realistic behaviors by combining specialized components and systems, the avatar system enables developers to create lifelike digital actors that can represent players or non player characters (npcs) in virtual environments this chapter explores the concepts, structure, and implementation of the avatar system within the ir engine core components avatar component the avatarcomponent serves as the primary identifier for avatar entities and stores fundamental character properties // simplified from src/avatar/components/avatarcomponent ts import { definecomponent, s } from '@ir engine/ecs'; export const avatarcomponent = definecomponent({ name 'avatarcomponent', schema s object({ avatarheight s number(), // overall height of the avatar // additional physical properties like torsolength, eyeheight, etc }) }); this component marks an entity as an avatar defines basic physical characteristics provides reference values for animations and interactions serves as a central point for avatar specific data rig component the avatarrigcomponent defines the skeletal structure of the avatar // simplified from src/avatar/components/avataranimationcomponent ts import { definecomponent, s } from '@ir engine/ecs'; // import { vrmhumanbonename } from ' /maps/vrmhumanbonename'; export const avatarrigcomponent = definecomponent({ name 'avatarrigcomponent', schema s object({ // maps standard bone names to entity ids bonestoentities s record(s string() / vrmhumanbonename /, s entity()), // additional rig data for animation retargeting }) }); this component maps standardized bone names (e g , "hips", "lefthand", "head") to actual bone entities enables animation systems to find and manipulate specific body parts provides a consistent interface regardless of the original model's bone naming supports animation retargeting between different character models animation components two components work together to manage avatar animations // simplified from src/avatar/components/animationcomponent ts import { definecomponent, s } from '@ir engine/ecs'; // import { animationmixer, animationclip } from 'three'; export const animationcomponent = definecomponent({ name 'animationcomponent', schema s object({ mixer s type\<any / animationmixer />(), animations s array(s type\<any / animationclip />()) }) }); // simplified from src/avatar/components/avataranimationcomponent ts export const avataranimationcomponent = definecomponent({ name 'avataranimationcomponent', schema s object({ animationgraph s object({ // animation state machine properties }), locomotion s object({ x s number({ default 0 }), y s number({ default 0 }), z s number({ default 0 }) }) }) }); these components work together animationcomponent stores the raw animation data and the mixer that plays them avataranimationcomponent manages the animation state and blending logic the locomotion values drive movement related animations the animation graph determines which animations play and how they blend controller component the avatarcontrollercomponent manages movement intent and control state // simplified from src/avatar/components/avatarcontrollercomponent ts import { definecomponent, s } from '@ir engine/ecs'; export const avatarcontrollercomponent = definecomponent({ name 'avatarcontrollercomponent', schema s object({ cameraentity s entity(), // camera associated with this avatar isjumping s bool({ default false }), iswalking s bool({ default false }), gamepadlocalinput s object({ x s number({ default 0 }), y s number({ default 0 }), z s number({ default 0 }) }) }) }); this component stores the player's input or ai's movement intent tracks movement states like jumping or walking links to the camera entity for view relative movement provides input values that drive both movement and animations inverse kinematics components for realistic limb positioning, the avatar system includes inverse kinematics (ik) components // simplified from src/avatar/components/avatarikcomponents ts import { definecomponent, s } from '@ir engine/ecs'; export const avatarikcomponent = definecomponent({ name 'avatarikcomponent', schema s object({ enabled s bool({ default true }), // ik settings and parameters }) }); export const avatariktargetcomponent = definecomponent({ name 'avatariktargetcomponent', schema s object({ targettype s string(), // e g , "leftfoot", "righthand" weight s number({ default 1 0 }) }) }); these components enable precise positioning of limbs (e g , feet on uneven ground, hands on objects) define target positions for specific body parts control the influence of ik on the final pose allow for dynamic adjustment of limb positions based on the environment avatar systems several systems work together to process these components and bring avatars to life animation system the avataranimationsystem manages the playback and blending of animations // simplified concept from src/avatar/systems/avataranimationsystem tsx const avataranimationsystem = definesystem({ uuid 'ir engine avataranimationsystem', execute (deltaseconds) => { // find all entities with animation components const entities = avataranimationquery(); for (const entity of entities) { // get the animation state and mixer const animstate = getcomponent(entity, avataranimationcomponent); const animcomp = getcomponent(entity, animationcomponent); // update animation state based on movement updateanimationstate(entity, animstate); // update the animation graph (play/blend animations) updateanimationgraph(entity, deltaseconds, animcomp mixer, animstate); // update the animation mixer animcomp mixer update(deltaseconds); } } }); this system processes all entities with avatar animation components updates animation states based on movement and actions manages the animation graph to determine which animations play updates the animation mixer to advance the animations ensures smooth blending between different animations movement system the avatarmovementsystem handles avatar locomotion // simplified concept from src/avatar/systems/avatarmovementsystem ts const avatarmovementsystem = definesystem({ uuid 'ir engine avatarmovementsystem', execute (deltaseconds) => { // find all entities with controller components const entities = avatarcontrollerquery(); for (const entity of entities) { // get the controller and transform components const controller = getcomponent(entity, avatarcontrollercomponent); const transform = getmutablecomponent(entity, transformcomponent); // calculate movement based on input const movement = calculatemovement(entity, controller, deltaseconds); // apply physics (gravity, collisions, etc ) const finalmovement = applyphysics(entity, movement); // update the avatar's position transform position x += finalmovement x; transform position y += finalmovement y; transform position z += finalmovement z; // update rotation to face movement direction if (finalmovement lengthsquared() > 0) { transform rotation = calculatefacingrotation(finalmovement); } } } }); this system processes all entities with avatar controller components translates input values into movement vectors applies physics constraints like gravity and collisions updates the avatar's position and rotation ensures realistic movement behavior inverse kinematics system the avatariksystem adjusts limb positions for realistic interactions // simplified concept from src/avatar/systems/avatariksystem tsx const avatariksystem = definesystem({ uuid 'ir engine avatariksystem', execute () => { // find all entities with ik components const entities = avatarikquery(); for (const entity of entities) { // get the rig component const rig = getcomponent(entity, avatarrigcomponent); // process foot ik for ground adaptation processfootik(entity, rig); // process hand ik for object interaction processhandik(entity, rig); // process look at ik for head tracking processlookatik(entity, rig); } } }); this system processes all entities with avatar ik components adjusts foot positions to match the ground surface positions hands to interact with objects orients the head to look at points of interest applies these adjustments after standard animations avatar workflow the process of creating and animating an avatar follows this general workflow sequencediagram participant developer participant assetsystem as asset system participant avatarsetup as avatar setup participant inputsystem as input system participant movementsystem as movement system participant animationsystem as animation system participant iksystem as ik system participant renderer as renderer developer >>assetsystem load avatar model (glb/gltf) assetsystem >>avatarsetup model loaded avatarsetup >>avatarsetup create avatar entity avatarsetup >>avatarsetup add avatarcomponent avatarsetup >>avatarsetup set up avatarrigcomponent avatarsetup >>avatarsetup initialize animationcomponent avatarsetup >>avatarsetup add avatarcontrollercomponent loop each frame inputsystem >>avatarsetup update controller input movementsystem >>avatarsetup calculate and apply movement animationsystem >>avatarsetup update animation state animationsystem >>avatarsetup play/blend animations iksystem >>avatarsetup apply ik adjustments renderer >>renderer render avatar end this diagram illustrates the initial setup phase where the avatar is created and configured the per frame update cycle that processes input, movement, animations, and ik the flow of data between different systems that bring the avatar to life practical example let's create a simple character that can walk and wave // create an entity for our avatar const characterentity = createentity(); // add the basic avatar component setcomponent(characterentity, avatarcomponent, { avatarheight 1 8 // 1 8 meters tall }); // load the 3d model using gltfcomponent setcomponent(characterentity, gltfcomponent, { src "models/character glb", castshadow true, receiveshadow true }); // add controller component for movement setcomponent(characterentity, avatarcontrollercomponent, { cameraentity maincameraentity, isjumping false, iswalking false, gamepadlocalinput { x 0, y 0, z 0 } }); // the following components would typically be added automatically // by systems after the model is loaded // avatarrigcomponent (set up by analyzing the model's skeleton) // animationcomponent (populated with animations from the model) // avataranimationcomponent (initialized with default state) // to make the character walk forward function movecharacterforward() { const controller = getmutablecomponent(characterentity, avatarcontrollercomponent); controller gamepadlocalinput z = 1; // forward movement controller iswalking = true; } // to make the character wave function makecharacterwave() { // this is simplified; real systems use animation graphs/layers const animstate = getmutablecomponent(characterentity, avataranimationcomponent); animstate playemote = "wave"; } this example demonstrates creating an avatar entity with basic components loading a character model with the gltfcomponent setting up controller input for movement triggering walking and waving animations implementation details rig setup when a model is loaded, the system analyzes its skeleton to set up the rig // simplified concept from src/avatar/components/avataranimationcomponent ts function setupavatarrig(avatarentity, gltfmodel) { // create a new rig component setcomponent(avatarentity, avatarrigcomponent, { bonestoentities {} }); // get the rig component for updating const rig = getmutablecomponent(avatarentity, avatarrigcomponent); // iterate through the model's bones for (const node of gltfmodel nodes) { if (node isbone) { // try to match the bone name to a standard name const standardbonename = mapbonenametostandard(node name); if (standardbonename) { // map the standard bone name to this bone entity rig bonestoentities\[standardbonename] = node entity; } } } } this function creates an avatarrigcomponent for the avatar analyzes the loaded model's skeleton maps bone names to standardized names (e g , "mixamo leftarm" → "leftupperarm") builds a dictionary of bone entities for animation and ik systems animation graph the animation system uses an animation graph to manage animation states // simplified concept from src/avatar/animation/avataranimationgraph ts function updateanimationgraph(entity, deltaseconds, mixer, animstate) { // get the current locomotion values const { x, y, z } = animstate locomotion; const speed = math sqrt(x x + y y + z z); // determine which animation to play based on speed if (speed < 0 1) { // play idle animation playanimation(mixer, "idle", 1 0); fadeoutanimation(mixer, "walk"); } else { // play walk animation with intensity based on speed playanimation(mixer, "walk", 1 0, speed); fadeoutanimation(mixer, "idle"); } // handle emote animations (like waving) if (animstate playemote) { playanimation(mixer, animstate playemote, 1 0); // clear the emote after starting it animstate playemote = null; } } this function analyzes the current movement state determines which animations should play manages blending between different animations handles special animations like emotes inverse kinematics the ik system adjusts limb positions for realistic interactions // simplified concept from src/avatar/animation/twoboneiksolver ts function solvetwoboneik(rootmatrix, upperbone, lowerbone, endbone, targetposition, polevector) { // calculate the total chain length const upperlength = upperbone length; const lowerlength = lowerbone length; const totallength = upperlength + lowerlength; // get the root position const rootposition = new vector3() setfrommatrixposition(rootmatrix); // calculate the direction to the target const targetdirection = new vector3() subvectors(targetposition, rootposition) normalize(); // calculate the distance to the target const targetdistance = rootposition distanceto(targetposition); // if the target is too far, extend the limb as far as possible if (targetdistance > totallength) { // fully extend the limb toward the target upperbone quaternion setfromunitvectors(new vector3(1, 0, 0), targetdirection); lowerbone quaternion set(0, 0, 0, 1); // no bend } else { // calculate the angles using the law of cosines const upperangle = math acos( (targetdistance targetdistance + upperlength upperlength lowerlength lowerlength) / (2 targetdistance upperlength) ); const lowerangle = math acos( (upperlength upperlength + lowerlength lowerlength targetdistance targetdistance) / (2 upperlength lowerlength) ); // apply the calculated angles to the bones // (simplified actual implementation would use quaternions and pole vectors) upperbone rotation y = upperangle; lowerbone rotation y = math pi lowerangle; } } this function implements a two bone ik solver (e g , for arms or legs) calculates the appropriate angles to reach a target position handles cases where the target is beyond reach applies the calculated rotations to the bones benefits of the avatar system the avatar system provides several key benefits modular design components can be added or removed to create different types of avatars animation flexibility the animation graph enables complex blending and transitions realistic movement ik systems ensure natural limb positioning and ground adaptation standardized interface the rig component provides a consistent way to work with different models separation of concerns different systems handle specific aspects of avatar behavior extensibility new components and systems can be added to enhance avatar capabilities these benefits make the avatar system a powerful tool for creating lifelike characters in virtual environments next steps with an understanding of how characters are represented and animated, the next chapter explores how they interact with the world around them next interaction system docid 9azrwtebxvwh1xk josaz