Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
145 changes: 110 additions & 35 deletions misty_human_interaction/HumanInteraction.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
/* ----------------- MISTY DIAGNOSTIC CAPTURE ----------------- */

// head pitch and yaw callback functions for face tracking
function _headYaw(data) {
misty.Set("headYaw", data.AdditionalResults[1], false);
Expand All @@ -6,59 +8,122 @@ function _headPitch(data) {
misty.Set("headPitch", data.AdditionalResults[1], false);
}

function FaceDetect (data){



//* ------------------- DEFAULT CONFIGURATION ------------------ */
misty.Set("basePitch", 0, false);
misty.Set("skill", "FaceDetect", false);
misty.Set("callbackArgs", [], false);
misty.Set("faceDataLabel", "unknown person", false);
misty.Set("faceDataBearing", 0, false); // -13 right and +13 left
misty.Set("faceDataElevation", 0, false); // -13 up and +13 down




/* ----------------- MISTY INTERACTION SKILLS ----------------- */
function _FaceDetect (data){
// debugging function used to test if camera is able to recognize face
// to monitor filter misty debug messages by 'face_detect'
if (data.PropertyTestResults[0].PropertyParent.Label == "unknown person") {
data = (typeof data.Label !== "undefined" && typeof data.bearing !== "undefined" && typeof data.elevation !== "undefined") ? data : {"Label": misty.Get("faceDataLabel"), "bearing": misty.Get("faceDataBearing"), "elevation": misty.Get("faceDataElevation")};

if (data.Label == "unknown person") {
misty.Debug("face_detect: unknown face detected");
misty.Debug(data);
}
else {
misty.Debug("face_detect: known face detected");
misty.Debug(data);
}
misty.Debug("success");
}

function ReadOnFaceDetect (data, callbackArgs) {
function _ReadOnFaceDetect(data, callbackArgs=misty.Get("callbackArgs")) {
// function that causes misty to speak the passed callback args on a face being detected
misty.Speak(callbackArgs[0]);
misty.Speak(callbackArgs); // Speak is still not working on misty
misty.Debug("speaking: " + callbackArgs);
}

function _TrackFace(data){
// function to track a person's face once they have been identified and misty has said hello

data = (typeof data.Label !== "undefined" && typeof data.bearing !== "undefined" && typeof data.elevation !== "undefined") ? data : {"Label": misty.Get("faceDataLabel"), "bearing": misty.Get("faceDataBearing"), "elevation": misty.Get("faceDataElevation")};

const faceDetect = data.Label;
const bearing = data.bearing; // -13 right and +13 left
const elevation = data.elevation; // -13 up and +13 down

misty.Debug(bearing);

const update = faceDetect + " detected, following face...";
misty.Debug(update);

const headYaw = misty.Get("headYaw");
const headPitch = misty.Get("headPitch");
const yawRight = misty.Get("yawRight");
const yawLeft = misty.Get("yawLeft");
const pitchUp = misty.Get("pitchUp");
const pitchDown = misty.Get("pitchDown");

if (bearing != 0 && !(elevation < 7 && elevation > -7)) { // move misty's head so that it is oriented towards the user's face (this gets triggered if misty needs to reorient the pitch)
misty.MoveHeadDegrees(headPitch + ((pitchDown - pitchUp) / 66) * elevation, 0, headYaw + ((yawLeft - yawRight) / 132) * bearing, 100); // adjust pitch and yaw based on the location of the face (100% velocity)
} else if (bearing != 0) {
if (Math.abs(bearing) > 2){ // if the bearing is offset by more than from center, rotate the entire robot to face the person
var direction;
if (bearing > 0) { // positive bearing
direction = 1;
}
else { // negative bearing
direction = -1;
}
//misty.DriveTime(0 /* linear velocity */, 100 * direction /* angular velocity */, 1500 /* time */); // rotate misty to the direction of the person
misty.MoveHeadDegrees(5 /* pitch */, 0 /* roll */, -((headYaw + (yawLeft - yawRight) / 132) * bearing)/10 /* yaw */, 100 /* velocity */); // rotate misty's head in the opposite direction to offset the rotation of the body
//misty.Pause(2000);
}
else {
misty.MoveHeadDegrees(0, 0, headYaw + ((yawLeft - yawRight) / 132) * bearing, 100);
}
} else {
misty.MoveHeadDegrees(headPitch + ((pitchDown - pitchUp) / 66) * elevation, 0, 0, 100);
}
misty.Pause(500);

}




/* ----------------------- LIBRARY CLASS ---------------------- */
class HumanInteraction {
constructor() {
// arguments of the form: this.basePitch=0, skill=this.FaceDetect, callbackArgs=[]
let basePitch = 0;
let skill = Skills.FaceDetect;
let callbackArgs = [];

if (typeof arguments[0] == "number") {
basePitch = arguments[0];
misty.Set("basePitch", arguments[0], false);
}
else if (typeof arguments[0] == "function") {
this.skill = arguments[0];
else if (typeof arguments[0] == "string") {
misty.Set("skill", arguments[0], false);
}
else if (typeof arguments[0] == "object") {
callbackArgs = arguments[0];
misty.Set("callbackArgs", arguments[0], false);
}

if (typeof arguments[1] == "function") {
this.skill = arguments[1];
if (typeof arguments[1] == "string") {
misty.Set("skill", arguments[1], false);
}
else if (typeof arguments[1] == "object") {
callbackArgs = arguments[1];
misty.Set("callbackArgs", arguments[1], false);
}

if (typeof arguments[2]) {
let callbackArgs = arguments[2];
misty.Set("callbackArgs", arguments[2], false);
}

// Global variable to store current pitch and yaw position of the head
// 'basePitch' is the default base angle of the pitch of the head (usually adjusted so to make misty look up if on the ground)
misty.Debug("Centering Head");
misty.MoveHeadDegrees(-basePitch, 0, 0, null, 0.5);
misty.MoveHeadDegrees(-misty.Get("basePitch"), 0, 0, null, 0.5);
misty.Set("headYaw", 0.0, false);
misty.Set("headPitch", -basePitch, false);
misty.Set("headPitch", -misty.Get("basePitch"), false);

// register listener for head yaw position from ActuatorPosition events
function registerYaw()
Expand Down Expand Up @@ -91,27 +156,37 @@ class HumanInteraction {
return 0;
}
initiateHeadPhysicalLimitVariables();
}

// define a method to register the face recognition events
registerFaceRec(debounce=250){
// Cancels any face recognition that's currently underway
misty.StopFaceRecognition();
// Starts face recognition
misty.StartFaceRecognition();

misty.Debug("registered");

misty.AddPropertyTest("FaceRec", "Label", "exists", "", "string"); // AddPropertyTest adds a test to determine which data will be sent to the ev
misty.RegisterEvent("FaceRec", "FaceRecognition", debounce, true); // RegisterEvent to register an event for face recognition (see callback func
}

// define a function to register the face recognition events
function registerFaceRec(){
// Cancels any face recognition that's currently underway
misty.StopFaceRecognition();
// Starts face recognition
misty.StartFaceRecognition();
}

misty.Debug("registered");

misty.AddPropertyTest("FaceRec", "Label", "exists", "", "string"); // AddPropertyTest adds a test to determine which data will be sent to the event, in this case, if there is a person that goes with the detected face
misty.RegisterEvent("FaceRec", "FaceRecognition", 1000, true); // RegisterEvent to register an event for face recognition (see callback function definition below)
}
registerFaceRec();
}

}

/* ------------------ MAIN CALLBACK FUNCTION ------------------ */
function _FaceRec(data) { // FaceRec callback function
misty.Debug("this part is working");
FaceDetect(data);
misty.Debug("running skill: " + misty.Get("skill"));
//misty.Set("faceData", data.PropertyTestResults[0].PropertyParent, false);
misty.Set("faceDataLabel", data.PropertyTestResults[0].PropertyParent.Label, false);
misty.Set("faceDataBearing", data.PropertyTestResults[0].PropertyParent.Bearing/2, false); // -13 right and +13 left
misty.Set("faceDataElevation", data.PropertyTestResults[0].PropertyParent.Elevation/2, false); // -13 up and +13 down

misty.RegisterTimerEvent(misty.Get("skill"), 800, false);
}

const test = new HumanInteraction(0, HumanInteraction.FaceDetect);

const test = new HumanInteraction(10, "TrackFace", []);
test.registerFaceRec(800);
7 changes: 7 additions & 0 deletions misty_video_tools/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# How to use video stream
- upload and run the video tools script to the misty
- open the VLC media player
- Go to media > stream
- click on the network tab
- where it asks for a url enter rtsp://*__ip of the robot__*:1935
- click the stream button near the bottom