pyhri (Python)

Python wrapper library to the ROS4HRI (https://www.ros.org/reps/rep-0155.html) framework.

Each exported object is documented, to view it use print(<class>.__doc__) and/or help(<class). The main entry point for its usage is HRIListener class.

class pyhri.Body

Bases: FeatureTracker

The body feature instance being tracked.

This class should be created and managed only by :py:class`HRIListener`, it is exposed only for read access purposes. It inherits from FeatureTracker, check its documentation for additional properties. All its properties may return None if not available.

property cropped: numpy.ndarray | None

Body image, cropped from the source image (numpy.ndarray)

property roi: Tuple[float, float, float, float] | None

Normalized 2D region of interest (RoI) of the body (Tuple (x,y,width,height))

property skeleton: Dict[SkeletalKeypoint, PointOfInterest] | None

2D skeleton keypoints (Dict[SkeletalKeypoint, PointOfInterest])

class pyhri.EngagementLevel(*values)

Bases: Enum

DISENGAGED = 0
DISENGAGING = 3
ENGAGED = 2
ENGAGING = 1
class pyhri.Expression(*values)

Bases: Enum

AMAZED = 23
ANGRY = 1
ANNOYED = 15
ASLEEP = 21
BORED = 19
CONFUSED = 22
DESPAIRED = 9
DISAPPOINTED = 11
DISGUSTED = 5
EMBARRASSED = 12
EXCITED = 24
FURIOUS = 16
GUILTY = 10
HAPPY = 3
HORRIFIED = 13
NEUTRAL = 0
PLEADING = 7
REJECTED = 18
SAD = 2
SCARED = 6
SKEPTICAL = 14
SURPRISED = 4
SUSPICIOUS = 17
TIRED = 20
VULNERABLE = 8
class pyhri.Face

Bases: FeatureTracker

The face feature instance being tracked.

This class should be created and managed only by :py:class`HRIListener`, it is exposed only for read access purposes. It inherits from FeatureTracker, check its documentation for additional properties. All its properties may return None if not available.

property age: float | None

Person’s age in years (float)

property aligned: numpy.ndarray | None

Face image, cropped and aligned from the source image (numpy.ndarray)

property cropped: numpy.ndarray | None

Face image, cropped from the source image (numpy.ndarray)

property expression: Expression | None

Face expression as a discrete state

property expression_confidence: float | None

Person’s expression confidence

property expression_va: ExpressionVA | None

Face expression as a continuous value in the circumplex model space (ExpressionVA)

property facial_action_units: Dict[FacialActionUnit, IntensityConfidence] | None

Facial action units (Dict[FacialActionUnit, IntensityConfidence])

property facial_landmarks: Dict[FacialLandmark, PointOfInterest] | None

Facial landmarks (Dict[FacialLandmark, PointOfInterest])

property gaze_transform: geometry_msgs.msg.TransformStamped | None

Gaze’s stamped 3D transform (geometry_msgs.msg.TransformStamped)

property gender: Gender | None

Person’s gender (Gender)

property roi: Tuple[float, float, float, float] | None

Normalized 2D region of interest (RoI) of the face (Tuple (x,y,width,height))

class pyhri.FacialActionUnit(*values)

Bases: Enum

BROWS_AND_FOREHEAD_NOT_VISIBLE = 70
BROW_LOWERER = 4
CHEEK_BLOW = 33
CHEEK_PUFF = 34
CHEEK_RAISER = 6
CHEEK_SUCK = 35
CHEWING = 81
CHIN_RAISER = 17
CROSS_EYE = 66
DIMPLER = 14
ENTIRE_FACE_NOT_VISIBLE = 73
EYES_CLOSED = 43
EYES_DOWN = 64
EYES_NOT_VISIBLE = 71
EYES_POSITIONED_TO_LOOK_AT_OTHER_PERSON = 69
EYES_TURN_LEFT = 61
EYES_TURN_RIGHT = 62
EYES_UP = 63
FAST_UP_DOWN_LOOK = 94
FLASH = 91
HEAD_BACK = 58
HEAD_DOWN = 54
HEAD_FORWARD = 57
HEAD_NOD_UP_AND_DOWN = 84
HEAD_SHAKE_BACK_AND_FORTH = 83
HEAD_TILT_LEFT = 55
HEAD_TILT_RIGHT = 56
HEAD_TURN_LEFT = 51
HEAD_TURN_RIGHT = 52
HEAD_UP = 53
INNER_BROW_RAISER = 1
JAW_CLENCHER = 31
JAW_DROP = 26
JAW_SIDEWAYS = 30
JAW_THRUST = 29
LID_DROOP = 41
LID_TIGHTENER = 7
LIPS_PART = 25
LIPS_TOWARD_EACH_OTHER = 8
LIP_BITE = 32
LIP_CORNER_DEPRESSOR = 15
LIP_CORNER_PULLER = 12
LIP_FUNNELER = 22
LIP_PRESSOR = 24
LIP_PUCKER = 18
LIP_STRETCHER = 20
LIP_SUCK = 28
LIP_TIGHTENER = 23
LIP_WIPE = 37
LOWER_FACE_NOT_VISIBLE = 72
LOWER_LIP_DEPRESSOR = 16
MOUTH_STRETCH = 27
NASOLABIAL_DEEPENER = 11
NECK_TIGHTENER = 21
NEUTRAL_FACE = 0
NOSE_WRINKLER = 9
NOSTRIL_COMPRESSOR = 39
NOSTRIL_DILATOR = 38
OUTER_BROW_RAISER = 2
PARTIAL_FLASH = 92
SHARP_LIP_PULLER = 13
SHIVER_TREMBLE = 93
SHOULDER_SHRUG = 82
SLIT = 42
SNIFF = 40
SPEECH = 50
SQUINT = 44
SWALLOW = 80
TONGUE_BULGE = 36
TONGUE_SHOW = 19
UNSOCIABLE = 74
UPPER_LID_RAISER = 5
UPPER_LIP_RAISER = 10
WALLEYE = 65
WINK = 46
class pyhri.FacialLandmark(*values)

Bases: Enum

LEFT_EAR = 9
LEFT_EYEBROW_1 = 30
LEFT_EYEBROW_2 = 31
LEFT_EYEBROW_3 = 32
LEFT_EYEBROW_INSIDE = 33
LEFT_EYEBROW_OUTSIDE = 29
LEFT_EYE_BOTTOM_1 = 38
LEFT_EYE_BOTTOM_2 = 39
LEFT_EYE_INSIDE = 37
LEFT_EYE_OUTSIDE = 34
LEFT_EYE_TOP_1 = 35
LEFT_EYE_TOP_2 = 36
LEFT_PROFILE_1 = 10
LEFT_PROFILE_2 = 11
LEFT_PROFILE_3 = 12
LEFT_PROFILE_4 = 13
LEFT_PROFILE_5 = 14
LEFT_PROFILE_6 = 15
LEFT_PROFILE_7 = 16
LEFT_PUPIL = 40
MENTON = 8
MOUTH_INNER_BOTTOM_1 = 67
MOUTH_INNER_BOTTOM_2 = 68
MOUTH_INNER_BOTTOM_3 = 69
MOUTH_INNER_LEFT = 66
MOUTH_INNER_RIGHT = 62
MOUTH_INNER_TOP_1 = 63
MOUTH_INNER_TOP_2 = 64
MOUTH_INNER_TOP_3 = 65
MOUTH_OUTER_BOTTOM_1 = 57
MOUTH_OUTER_BOTTOM_2 = 58
MOUTH_OUTER_BOTTOM_3 = 59
MOUTH_OUTER_BOTTOM_4 = 60
MOUTH_OUTER_BOTTOM_5 = 61
MOUTH_OUTER_LEFT = 56
MOUTH_OUTER_RIGHT = 50
MOUTH_OUTER_TOP_1 = 51
MOUTH_OUTER_TOP_2 = 52
MOUTH_OUTER_TOP_3 = 53
MOUTH_OUTER_TOP_4 = 54
MOUTH_OUTER_TOP_5 = 55
NOSE = 44
NOSE_1 = 42
NOSE_2 = 43
NOSTRIL_1 = 45
NOSTRIL_2 = 46
NOSTRIL_3 = 47
NOSTRIL_4 = 48
NOSTRIL_5 = 49
RIGHT_EAR = 0
RIGHT_EYEBROW_1 = 18
RIGHT_EYEBROW_2 = 19
RIGHT_EYEBROW_3 = 20
RIGHT_EYEBROW_INSIDE = 21
RIGHT_EYEBROW_OUTSIDE = 17
RIGHT_EYE_BOTTOM_1 = 26
RIGHT_EYE_BOTTOM_2 = 27
RIGHT_EYE_INSIDE = 25
RIGHT_EYE_OUTSIDE = 22
RIGHT_EYE_TOP_1 = 23
RIGHT_EYE_TOP_2 = 24
RIGHT_PROFILE_1 = 1
RIGHT_PROFILE_2 = 2
RIGHT_PROFILE_3 = 3
RIGHT_PROFILE_4 = 4
RIGHT_PROFILE_5 = 5
RIGHT_PROFILE_6 = 6
RIGHT_PROFILE_7 = 7
RIGHT_PUPIL = 28
SELLION = 41
class pyhri.FeatureTracker

Bases: object

The generic feature instance being tracked.

This class should be created and managed only by :py:class`HRIListener`, it is exposed only for read access purposes. All its properties may return None if not available.

property frame: str

Name of the tf frame that correspond to this feature

property id: str

Unique ID of this feature

property ns: str

Fully-qualified topic namespace under which this feature is published

property transform: geometry_msgs.msg.TransformStamped | None

Feature stamped 3D transform (geometry_msgs.msg.TransformStamped)

property valid: bool

Whether the feature is still ‘valid’, i.e., existing

class pyhri.Gender(*values)

Bases: Enum

FEMALE = 0
MALE = 1
OTHER = 2
class pyhri.HRIListener(node_name: str, auto_spin: bool = True, use_sim_time: bool = False)

Bases: object

Main entry point to the library.

The class must be instantiated through the factory function create. I will spawn a ROS node and use it to subscribe to all the ROS4HRI topics. The tracked features information can be accessed in Python native objects throught this object properties.

property bodies: Dict[str, Body]

Currently tracked bodies (Dict[str, Body])

property faces: Dict[str, Face]

Currently tracked faces (Dict[str, Face])

on_body(callback: Callable[[Body], None])

Registers a callback function, to be invoked everytime a new body is tracked

on_body_lost(callback: Callable[[str], None])

Registers a callback function, to be invoked everytime a tracked body is lost

on_face(callback: Callable[[Face], None])

Registers a callback function, to be invoked everytime a new face is tracked

on_face_lost(callback: Callable[[str], None])

Registers a callback function, to be invoked everytime a tracked face is lost

on_person(callback: Callable[[Person], None])

Registers a callback function, to be invoked everytime a new person is known

on_person_lost(callback: Callable[[str], None])

Registers a callback function, to be invoked everytime a known person is forgotten

on_tracked_person(callback: Callable[[Person], None])

Registers a callback function, to be invoked everytime a new person is tracked

on_tracked_person_lost(callback: Callable[[str], None])

Registers a callback function, to be invoked everytime a tracked person is lost

on_voice(callback: Callable[[Voice], None])

Registers a callback function, to be invoked everytime a new voice is tracked

on_voice_lost(callback: Callable[[str], None])

Registers a callback function, to be invoked everytime a tracked voice is lost

property persons: Dict[str, Person]

Currently known persons (Dict[str, Person])

set_reference_frame(frame: str)

Selects the reference frame for all the transform properties

spin_all(timeout: float)

If the class node does not spin automatically, this function must be called regularly to manually spin it. Internally calls rclcpp::executors::SingleThreadedExecutor::spin_all()

property tracked_persons: Dict[str, Person]

Currently tracked persons (Dict[str, Person])

property voices: Dict[str, Voice]

Currently tracked voices (Dict[str, Voice])

class pyhri.Person

Bases: FeatureTracker

The person feature instance being tracked or known.

This class should be created and managed only by :py:class`HRIListener`, it is exposed only for read access purposes. It inherits from FeatureTracker, check its documentation for additional properties. All its properties may return None if not available.

property alias: str | None

ID of another Person object associated with the same person (str)

property anonymous: bool

Whether the person has not been identified yet (bool)

property body: Body | None

Body associated with the person (Body)

property engagement_status: EngagementLevel | None

Current engagement status with the robot (EngagementLevel)

property face: Face | None

Face associated with the person (Face)

property location_confidence: float

confidence of the person transform estimate (float [0., 1.])

property voice: Voice | None

Voice associated with the person (Voice)

class pyhri.SkeletalKeypoint(*values)

Bases: Enum

LEFT_ANKLE = 13
LEFT_EAR = 16
LEFT_ELBOW = 6
LEFT_EYE = 14
LEFT_HIP = 11
LEFT_KNEE = 12
LEFT_SHOULDER = 5
LEFT_WRIST = 7
NECK = 1
NOSE = 0
RIGHT_ANKLE = 10
RIGHT_EAR = 17
RIGHT_ELBOW = 3
RIGHT_EYE = 15
RIGHT_HIP = 8
RIGHT_KNEE = 9
RIGHT_SHOULDER = 2
RIGHT_WRIST = 4
class pyhri.Voice

Bases: FeatureTracker

The voice feature instance being tracked.

This class should be created and managed only by :py:class`HRIListener`, it is exposed only for read access purposes. It inherits from FeatureTracker, check its documentation for additional properties. All its properties may return None if not available.

property incremental_speech: str | None

Last recognised incremental sentence (str)

property is_speaking: bool

Whether speech is currently detected in this voice (bool)

property locale: str | None

Last recognised speech locale (str)

on_incremental_speech(callback: Callable[[str, str], None])

Registers a callback function, to be invoked everytime an incremental sentence is detected

on_speaking(callback: Callable[[bool], None])

Registers a callback function, to be invoked everytime speech is detected

on_speech(callback: Callable[[str, str], None])

Registers a callback function, to be invoked everytime a final sentence is detected

property speech: str | None

Last recognised final sentence (str)