diff --git a/README.md b/README.md
index 733d445..0c8dbf7 100644
--- a/README.md
+++ b/README.md
@@ -12,6 +12,10 @@ The library allows you to easily generate videos like this (source code is [here

+or like this (by installing a few more [dependencies](docs/human.md); [source code](examples/05_teaser_with_human.py)):
+
+
+
# Installation
## From
@@ -68,6 +72,9 @@ scene.reset_camera() # reset camera such that you can control it with your mous
For complete examples of object and robot interface see our two examples: [01_object.py](examples/01_objects.py)
and [02_robots.py](examples/02_robots.py).
+It is also possible to visualize human model after installing a few more dependencies,
+see [installation](docs/human.md) and example [06_human.py](examples/06_human.py).
+
## Animation
This library allows you to animate all the properties of the objects and robots (e.g. position, robot configuration,
diff --git a/docs/human.md b/docs/human.md
new file mode 100644
index 0000000..a4d7969
--- /dev/null
+++ b/docs/human.md
@@ -0,0 +1,79 @@
+# Visualizing human model in meshcat via SMPL-X
+
+## Installation
+
+The SMPL-X library and model data are not included in a standard RoboMeshCat packages.
+It must be installed as described in [SMPL-X package](https://github.com/vchoutas/smplx), i.e. by running
+
+```bash
+pip install smplx[all]
+```
+
+and then downloading the data from [here](https://smpl-x.is.tue.mpg.de/), we use SMPL-X v1_1 data from the webpage.
+The path to the model must be specified while creating the instance of the Human.
+In the examples, the following folder structure is assumed:
+
+```bash
+examples/
+ models/ # this is a folder downloaded from the webpage above
+ smplx/
+ SMPLX_FEMALE.npz
+ SMPLX_FEMALE.pkl
+ SMPLX_MALE.npz
+ ...
+ 06_human.py # this is an example script
+```
+
+## Usage
+
+The human model functionality is available through the class `Human`.
+You can change human pose (i.e. position and orientation), color, visibility, etc. in the same way as for regular
+RoboMeshCat objects, and you can also change the body pose (i.e. configuration), shape and expression through the
+instance of the class.
+For complete example have a look at [06_human.py](../examples/06_human.py).
+
+### Online usage
+
+```python
+human = Human(pose=human_default_pose, color=[1., 0., 0.], model_path=smplx_models_path)
+scene.add_object(human) # add human to the scene, it will be visualized immediately
+
+human.update_vertices(vertices=human.get_vertices(expression=torch.randn([1, 10])))
+
+human.smplx_model.body_pose.data += 0.1
+human.update_vertices()
+```
+
+### Animation
+
+Function `update_vertices` cannot be used in animation as it is modifying geometry of the object internally.
+Instead, you need to use 'morphologies', that need to be specified before adding human to the scene:
+
+```python
+human = Human(pose=human_default_pose, color=[1., 0., 0.], model_path=smplx_models_path)
+human.smplx_model.betas.data += 1
+human.add_morph(human.get_vertices()) # the first morph changes the shape
+
+human.smplx_model.body_pose.data += 0.1
+human.add_morph(human.get_vertices()) # the second morp changes the body pose
+
+scene.add_object(human) # add human to the scene, no morphology can be added/modified after this step
+
+"Let's animate"
+with scene.animation(fps=1):
+ human.display_morph(None) # this will display the human shape that is not affected by morphologies
+ scene.render()
+
+ human.display_morph(0)
+ scene.render()
+
+ human.display_morph(1)
+ scene.render()
+```
+
+### Coloring of human
+
+You have two options to color the human mesh: (i) uniform color and (ii) per vertex color.
+Uniform color is default, it can be set with `color` argument of human and changed/animated by `.color` property.
+
+Per vertex color cannot be animated as it requires to change the geometry internally.
\ No newline at end of file
diff --git a/docs/human_teaser.gif b/docs/human_teaser.gif
new file mode 100644
index 0000000..cafea82
Binary files /dev/null and b/docs/human_teaser.gif differ
diff --git a/examples/05_teaser_with_human.py b/examples/05_teaser_with_human.py
new file mode 100644
index 0000000..9ca33ae
--- /dev/null
+++ b/examples/05_teaser_with_human.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+#
+# Copyright (c) CTU -- All Rights Reserved
+# Created on: 2022-12-8
+# Author: Vladimir Petrik
+#
+import time
+
+import numpy as np
+from pathlib import Path
+from example_robot_data.robots_loader import TalosFullLoader as TalosLoader
+import pinocchio as pin
+import torch
+from robomeshcat import Scene, Robot, Human
+
+scene = Scene()
+
+smplx_models_path = str(Path(__file__).parent.joinpath('models').joinpath('smplx'))
+pose = np.eye(4)
+human_default_pose = pin.exp6(np.array([0, 0, 0, np.pi / 2, 0., 0.])).homogeneous
+human_default_pose[2, 3] = -.2
+human = Human(pose=human_default_pose.copy(), color=[0.6] * 3, model_path=smplx_models_path)
+
+sad_face = torch.tensor([[-0.4969, -0.2114, 1.5251, 0.1503, 0.4488, 1.7344, 2.1032, -0.3620, -1.2451, 1.8487]])
+smile_face = torch.tensor([[3.4081, -1.1111, -1.4181, 0.5018, 0.0286, -0.5347, -0.0042, 0.1118, -0.2230, -0.1172]])
+neutral_face = torch.tensor([[-0.5131, 1.0546, 0.6059, -0.6452, 2.7049, 0.8512, 0.0777, 0.8451, -1.4651, 0.3700]])
+
+human.add_morph(human.get_vertices(expression=sad_face))
+human.add_morph(human.get_vertices(expression=smile_face))
+human.add_morph(human.get_vertices(expression=neutral_face))
+# add some dancing morph
+human.add_morph(human.get_vertices(body_pose=torch.randn(human.smplx_model.body_pose.shape) * 0.1))
+human.add_morph(human.get_vertices(body_pose=torch.randn(human.smplx_model.body_pose.shape) * 0.1))
+human.add_morph(human.get_vertices(body_pose=torch.randn(human.smplx_model.body_pose.shape) * 0.1))
+scene.add_object(human)
+
+talos = Robot(urdf_path=TalosLoader().df_path, mesh_folder_path=Path(TalosLoader().model_path).parent.parent)
+scene.add_robot(talos)
+talos.pos[0] = 1.
+talos.pos[2] = 1.075
+talos.rot = pin.utils.rotate('z', np.deg2rad(-90))
+talos.opacity = 0.
+
+q1 = np.array(
+ [0.57943216, -0.1309057, -0.75505065, 0.78430028, -0.61956061, -0.27349631, 0.13615252, 0.0711049, -0.03615876,
+ 0.49826378, 0.17217602, 0.50618769, 0.44123115, -0.02707293, -1.18121182, 0.30893653, 2.01942401, -2.13127587,
+ -0.10865551, 0.30782173, -0.58293303, -0.23586322, 0.42843663, 0.3494325, 0.52727565, 0.50386685, -0.48822942,
+ 0.09145592, -0.6189864, -0.09982653, -0.33399487, -0.99386967, -0.78832615, 1.12503886, 0.4816953, -0.33853157,
+ 0.15645548, 0.77799908, 0.25617193, 0.92783777, -0.06406897, 1.03065562, 0.65546472, 0.28488222])
+
+q2 = np.array(
+ [0.67953954, -0.23498704, -0.30815908, 1.26050064, -0.75429557, 0.39308716, -0.09183746, -0.3519678, 0.6029438,
+ 1.92670204, -0.85517111, 0.31218583, 1.12134325, -0.08521749, -0.2414049, 0.41116012, 2.19232313, -0.13271861,
+ 0.13766665, 0.79690452, -0.64291739, -1.02337668, 0.74399798, 0.32299157, 0.25029159, 0.81949992, -0.4262274,
+ 0.61293056, 0.01760217, -2.08710036, 0.20761188, -0.27267571, 0.2487861, -0.8711323, -0.19324595, -0.19482248,
+ 0.06016944, 0.13445533, 1.02400687, 0.02380557, -0.13022461, 0.19958255, 0.60717046, 0.81290787])
+
+q0 = np.zeros_like(q1)
+scene.render()
+
+with scene.animation(fps=1):
+ scene.camera_pos = [0, -0.3, 0.2]
+ scene.render()
+ human.display_morph(0)
+ scene.render()
+ human.display_morph(1)
+ scene.render()
+ human.display_morph(2)
+ human.pos[0] = -.5
+ human.pos[2] = 1.3
+ scene.camera_pos = [0, -2., 2.5]
+ scene.render()
+ talos.opacity = 1.
+ scene.render()
+
+ for _ in range(2):
+ talos[:] = q1
+ human.display_morph(3)
+ scene.render()
+ talos[:] = q2
+ human.display_morph(4)
+ scene.render()
+ talos[:] = q0
+ human.display_morph(5)
+ scene.render()
+
+ human.display_morph(None)
+ human.pose = human_default_pose
+ scene.camera_pos = [0, -0.3, 0.2]
+ scene.render()
+
+time.sleep(3.)
diff --git a/examples/06_human.py b/examples/06_human.py
new file mode 100644
index 0000000..9558ddc
--- /dev/null
+++ b/examples/06_human.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+#
+# Copyright (c) CTU -- All Rights Reserved
+# Created on: 2022-11-30
+# Author: Vladimir Petrik
+#
+from pathlib import Path
+import numpy as np
+import pinocchio as pin
+import torch
+
+from robomeshcat import Scene, Human
+
+"This examples show how to use the human model and how to animate its pose and color. "
+"We show three case studies: "
+"(i) the first case study shows how to manipulate human online, i.e. without animation"
+"(ii) this case study shows how to animate pose and color of the human in case of uniform color"
+"(iii) this case study shows how use per vertex color of the human (only in online mode, no animation yet!)"
+
+case_study = 2 # chose which case study to visualize
+scene = Scene()
+"Set smplx_models_path to the directory where are your SMPLX models"
+smplx_models_path = str(Path(__file__).parent.joinpath('models').joinpath('smplx'))
+human_default_pose = pin.exp6(np.array([0, 0, 0, np.pi / 2, 0., 0.])).homogeneous
+human_default_pose[2, 3] = 1.2
+
+if case_study == 0:
+ "First let's create the human, arguments are forward to smplx constructor, so you can adjust the human model args"
+ human = Human(pose=human_default_pose, color=[1., 0., 0.], model_path=smplx_models_path)
+ scene.add_object(human) # add human to the scene, it will be visualized immediately
+
+ input('Press enter to change the body pose and shape of the human')
+ human.smplx_model.body_pose.data += 0.1 # modify the pose
+ human.smplx_model.betas.data += 0.1 # modify shape params
+ human.smplx_model.expression.data += 0.1 # modify expression param
+ human.update_vertices() # recreate the geometry model to update in the viewer, this is allowed only in online use
+
+ input('Press enter to change the color, opacity, and position of the human')
+ human.pos[0] = 1.
+ human.color = [0, 1, 0]
+ human.opacity = 0.5
+ input('Press enter to hide the model and exit.')
+ human.hide()
+
+elif case_study == 1:
+ human = Human(pose=human_default_pose, color=[1., 0., 0.], model_path=smplx_models_path)
+
+ # You need to create all the animation poses of the human in advance of adding the human to the scene
+ # It's called morphologies of the pose
+ human.smplx_model.betas.data += 1
+ human.add_morph(human.get_vertices()) # the first morph changes the shape
+
+ human.smplx_model.body_pose.data += 0.1
+ human.add_morph(human.get_vertices()) # the second morp changes the body pose
+
+ scene.add_object(human) # add human to the scene, no morphology can be added/modified after this step
+
+ "Let's animate"
+ with scene.animation(fps=1):
+ human.display_morph(None) # this will display the human shape that is not affected by morphologies
+ scene.render()
+
+ human.display_morph(0)
+ scene.render()
+
+ human.display_morph(1)
+ scene.render()
+
+ human.color = [0, 0.8, 0]
+ scene.render()
+
+ # You can also change the .pos, .rot, .opacity, .visible, in animation
+elif case_study == 2:
+ # To have per vertex colors, use attribute use_vertex_colors=True
+ human = Human(pose=human_default_pose, color=[1., 0., 0.], model_path=smplx_models_path, use_vertex_colors=True)
+ scene.add_object(human)
+
+ input('press enter to change colors to random')
+ human.update_vertices(vertices_colors=np.random.rand(human.smplx_model.get_num_verts(), 3))
+
+ input('press enter to change colors to blue')
+ human.update_vertices(vertices_colors=[[0., 0., 0.75]] * human.smplx_model.get_num_verts())
+
+ input('press enter to display wireframe')
+ human._show_wireframe = True
+ human.update_vertices(vertices_colors=[[0., 0., 0.]] * human.smplx_model.get_num_verts())
+
+ input('sample new expressions and store them into video, rotate manually to see the face')
+ # human.update_vertices(vertices=human.get_vertices(betas=torch.randn([1, 10])))
+ human._show_wireframe = False
+ human._vertex_colors[:] = 0.6
+ with scene.video_recording(filename='/tmp/face_expression.mp4', fps=1):
+ for _ in range(10):
+ human.update_vertices(vertices=human.get_vertices(expression=torch.randn([1, 10])))
+ scene.render()
diff --git a/src/robomeshcat/__init__.py b/src/robomeshcat/__init__.py
index c46cd60..62215e0 100644
--- a/src/robomeshcat/__init__.py
+++ b/src/robomeshcat/__init__.py
@@ -1,3 +1,4 @@
from .object import Object
from .robot import Robot
from .scene import Scene
+from .human import Human
diff --git a/src/robomeshcat/human.py b/src/robomeshcat/human.py
new file mode 100644
index 0000000..52220b3
--- /dev/null
+++ b/src/robomeshcat/human.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+#
+# Copyright (c) CTU -- All Rights Reserved
+# Created on: 2022-12-6
+# Author: Vladimir Petrik
+#
+
+import numpy as np
+from typing import Optional, List
+from meshcat import geometry as g
+
+from . import Object
+
+
+class Human(Object):
+
+ def __init__(self, pose=None, color: Optional[List[float]] = None, opacity: float = 1., name: str = None,
+ use_vertex_colors=False, show_wireframe=False, **kwargs) -> None:
+ from smplx import SMPLX # we import SMPLX here on purpose, so that smplx dependencies are optional
+ self.smplx_model = SMPLX(**kwargs)
+ super().__init__(None, pose, color if not use_vertex_colors else [1, 1, 1], None, opacity, name)
+
+ self._use_vertex_colors = use_vertex_colors
+ self._show_wireframe = show_wireframe
+ clr = self._color_from_input(color)[np.newaxis, :]
+ self._vertex_colors = np.repeat(clr, self.smplx_model.get_num_verts(), 0) if use_vertex_colors else None
+ self.update_vertices(set_object=False) # this will create a geometry
+
+ "Additional properties that are modifiable "
+ self._morph_target_influences = None
+
+ @property
+ def _material(self):
+ mat = super()._material
+ mat.wireframe = self._show_wireframe
+ mat.vertexColors = self._use_vertex_colors
+ return mat
+
+ def get_vertices(self, **kwargs):
+ """Return vertices of the mesh for the given smplx parameters."""
+ output = self.smplx_model(return_verts=True, **kwargs)
+ return output.vertices.detach().cpu().numpy().squeeze()
+
+ def update_vertices(self, vertices=None, vertices_colors=None, set_object=True):
+ if self._is_animation():
+ print('Update vertices of the mesh will recreate the geometry. It cannot be used in animation for '
+ 'which you should use this.add_morph function')
+ return
+ self._geometry = TriangularMeshGeometryWithMorphAttributes(
+ vertices=np.asarray(vertices) if vertices is not None else self.get_vertices(),
+ faces=self.smplx_model.faces,
+ color=np.asarray(vertices_colors) if vertices_colors is not None else self._vertex_colors,
+ morph_positions=[], morph_colors=[],
+ )
+ if set_object:
+ self._set_object()
+
+ def add_morph(self, vertices, vertex_colors=None):
+ """Add new morphology through which we can create animations."""
+ self._geometry.morph_positions.append(vertices)
+ if vertex_colors is not None:
+ self._geometry.morph_colors.append(np.asarray(vertex_colors))
+
+ def display_morph(self, morph_id):
+ """Set morphTargetInfluences to display only the given morph_id. """
+ self._morph_target_influences = [0] * self._geometry.number_of_morphs()
+ if morph_id is not None:
+ self._morph_target_influences[morph_id] = 1
+
+ def _set_morph_property(self):
+ if self._morph_target_influences is None:
+ self._morph_target_influences = [0] * len(self._geometry.morph_positions)
+ self._set_property('morphTargetInfluences', self._morph_target_influences, 'vector')
+
+ def _reset_all_properties(self):
+ super()._reset_all_properties()
+ self._set_morph_property()
+
+
+class TriangularMeshGeometryWithMorphAttributes(g.TriangularMeshGeometry):
+ def __init__(self, morph_positions=None, morph_colors=None, **kwargs):
+ super(TriangularMeshGeometryWithMorphAttributes, self).__init__(**kwargs)
+ self.morph_positions = morph_positions
+ self.morph_colors = morph_colors
+
+ def number_of_morphs(self) -> int:
+ return max(len(self.morph_colors), len(self.morph_positions))
+
+ def lower(self, object_data):
+ ret = super(TriangularMeshGeometryWithMorphAttributes, self).lower(object_data=object_data)
+ ret[u"data"][u"morphAttributes"] = {}
+ if self.morph_positions is not None:
+ ret[u"data"][u"morphAttributes"][u"position"] = [g.pack_numpy_array(pos.T) for pos in self.morph_positions]
+ if self.morph_colors is not None:
+ ret[u"data"][u"morphAttributes"][u"color"] = [g.pack_numpy_array(c.T) for c in self.morph_colors]
+ return ret
diff --git a/src/robomeshcat/object.py b/src/robomeshcat/object.py
index 7a2db7f..55e2679 100644
--- a/src/robomeshcat/object.py
+++ b/src/robomeshcat/object.py
@@ -97,6 +97,8 @@ def _is_animation(self):
@property
def _material(self):
+ if isinstance(self._geometry, g.Object):
+ return None
if self._texture is not None:
return g.MeshLambertMaterial(map=self._texture, opacity=self.opacity)
color = self.color.copy() * 255