nannon-Body

Face Face Revolution 

Vimeo

(sadly the "levels" label disappears!!)

 
 
 

-----
GIF

 
 
 

----
Still

 
 
 
 

Though I was super impressed/awed by all the examples shown in class of digital interactive artworks, I personally had a hard time coming up with an idea for this project. My DDR concept still seems a bit boring to me, but I'm glad I got it to work in a reasonable amount of time (relative to how stressed I was about previous assignmentS). The game itself is mildly fun, if a little hard. In coming up with this concept, I wanted to make sure I wasn't scaling out of proportion, so I looked at the templates carefully first before trying to figure out what to do, which really helped me scope down my project. Moving forwards, if this project were to be improved, I think it'd be really fun to try to get people to contort their faces to match what comes up in the grid--though I can imagine that being 1) really hard to play and 2) needing to randomize the grid in a really careful way i.e. the mouth is never above the eyes.

 
 
 
 

//
// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//
import oscP5.*;
OscP5 oscP5;
 
 
PFont intersect50;
PFont intersect25;
 
 
//game bools
boolean pressplay =false;
boolean gameOver = false;
 
int level;
 
// num faces found
int found;
 
// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();
 
 
 
 
 
// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;
 
 
//mouth, left eye, right eye, nose
String[] parts = {"mouth", "left eye", "right eye","nose"};
int partsActive; 
 
// grid variables
int[][] grid = new int[10][10];
 
int gridNum = 2;
 
int gridSize = 700;
int margin = 100;
 
int sqSize;
int randX;
int randY;
int points;
//timer
 
int time;
int wait = 8000;
float countdown;
 
 
void setup() {
  // Uncomment the following two lines to see the available fonts 
  //String[] fontList = PFont.list();
  //printArray(fontList);
  //String[] fontList = PFont.list();
  //for (int i =300;i<700; i++) { // println(fontList[i]); //} intersect50 = createFont("IntersectB44Solid", 120); intersect25 = createFont("IntersectB24", 30); translate(0,0); size(700,700); frameRate(30); oscP5 = new OscP5(this, 8338); oscP5.plug(this, "found", "/found"); oscP5.plug(this, "poseScale", "/pose/scale"); oscP5.plug(this, "posePosition", "/pose/position"); oscP5.plug(this, "poseOrientation", "/pose/orientation"); oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width"); oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height"); oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left"); oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right"); oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left"); oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right"); oscP5.plug(this, "jawReceived", "/gesture/jaw"); oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils"); points = 0; getNewRand(); //timer time = millis();//store the current time println("time", time); //game level =1; partsActive = int(random(4)); } void draw() { if (!pressplay && !gameOver) { gameStart(); } else if (pressplay && !gameOver){ game(); } else if (pressplay && gameOver) { gameend(); } } void gameStart(){ background(1, 34, 160); textAlign(CENTER, CENTER); textFont(intersect50); fill(133,242,231); text("face", width/2-3,height/2-100); text("face", width/2-3, height/2); fill(0,222,190); text("face", width/2, height/2-100+3); text("face", width/2, height/2+3); textFont(intersect25); int passedMillis = millis() - time; // calculates passed milliseconds if(passedMillis >= 300){
      time = millis();
      fill(1, 34, 160);  // if more than 215 milliseconds passed set fill color to red
  }
  else {
    fill (0,222,190);
  }
   text("free play", width/2, 490);
   if (keyPressed) {
     if (key=='p' || key=='P') {
       pressplay = true;
     }
   }
}
 
void gameend() {
  background(1, 34, 160);
   textAlign(CENTER, CENTER);
   textFont(intersect50);
   fill(133,242,231);
   text("game", width/2-3,height/2-100);
   text("over", width/2-3, height/2);
   fill(0,222,190);
   text("game", width/2, height/2-100+3);
   text("over", width/2, height/2+3);
   textFont(intersect25);
   int passedMillis = millis() - time; // calculates passed milliseconds
  if(passedMillis >= 300){
      time = millis();
      fill(1, 34, 160);  // if more than 215 milliseconds passed set fill color to red
  }
  else {
    fill (0,222,190);
  }
   text("good bye", width/2, 490);
   if (keyPressed) {
     if (key=='s' || key=='S') {
       gameOver =false;
       pressplay = false;
       //level =0;
       //points =0;
       setup();
     }
   }
 
}
 
void game() {
 
if (points<40) {
   level =1;
 }
 
 else if (points<70) {
   level =2;
 }
 
 else if (points<100) {
   level =3;
 }
 
   else if (points<130) {
   level =4;
 }
 
 else {
 level =5;
 }
 
  pushMatrix();
  background(0);
   translate(sqSize/2,sqSize/2);
 
 
  fill(0);
 
  gridNum=level+1;
  sqSize = (gridSize-margin*2)/gridNum;
  //500/3
  //
  println(sqSize);
 
  for (int i=0; i<gridNum; i++) {
        for (int j=0; j<gridNum; j++) { //size/ grid size stroke(133,242,231); //noFill();222 //println(i, j, sqSize, sqSize*j+margin); rect(sqSize*j+margin, sqSize*i+margin, sqSize, sqSize); } } textFont(intersect25); fill (0,222,190); textAlign(LEFT); text("level "+str(level), -50,-50); fill(133,242,231); rect(sqSize*randX+margin, sqSize*randY+margin, sqSize, sqSize); fill(0); rect(sqSize*randX+margin, sqSize*randY+margin, sqSize-20, sqSize-20); fill(133,242,231); rect(sqSize*randX+margin, sqSize*randY+margin, sqSize-40, sqSize-40); //rect(300,300,40,40); popMatrix(); pushMatrix(); textFont(intersect25,40); textAlign(CENTER,CENTER); fill(0); text(parts[partsActive], sqSize*randX+margin+sqSize/2, sqSize*randY+margin+sqSize/2); popMatrix(); //write points count fill(133,242,231); textSize(60); //println(points); text(str(points), 350, 630); //timer if(millis() - time >= wait){
      gameOver = true;
      //also update the stored time
 
    }
    else {
      countdown = (millis()-time);
 
      //println("wait", wait);
      //println("test", millis()-time);
 
      countdown = countdown/wait;
      //println("countdown", countdown);
    }
    //println((3*PI)/2, ((3*PI)/2)*countdown);
    pushMatrix();
    noFill();
    stroke(133,242,231);
    strokeWeight(7);
    arc(600, 70, 30,30, 3*(PI/2)*countdown, 3*(PI/2), OPEN);
    popMatrix();
 
    strokeWeight(1);
 
  //translate(width,0);
  //  scale(-1,1);
  //facial data
  if(found > 0) {
 
    pushMatrix();
 
    translate(posePosition.x, posePosition.y);
    scale(poseScale);
    noFill();
    ellipse(-20, eyeLeft * -9, 7, 7);
    ellipse(20, eyeRight * -9, 7, 7);
    ellipse(0, 20, 7, 7);
    ellipse(0, nostrils * -1, 7, 7);
    rectMode(CENTER);
    fill(0);
    rect(-20, eyebrowLeft * -5, 25, 5);
    rect(20, eyebrowRight * -5, 25, 5);
    popMatrix();
 
 
    //gamify your LEFT eye
    float realMouthX = (posePosition.x);
    float realMouthY = posePosition.y+((eyeLeft*7)*poseScale);
 
 
    //gamify your LEFT eye
    float realLeftEyeX = (posePosition.x-(20*poseScale));
    float realLeftEyeY = posePosition.y+((eyeLeft*-9)*poseScale);
 
     //gamify your RIGHT eye
    float realRightEyeX = (posePosition.x+(20*poseScale));
    float realRightEyeY = posePosition.y+((eyeLeft*-9)*poseScale);
 
     //gamify your NOOOSE
    float realNoseX = (posePosition.x);
    float realNoseY = posePosition.y+((eyeLeft*-1)*poseScale);
 
 
    //translate(125,125);
    stroke(255,0,0);
 
 
    //MOUTH POINTS
     if (partsActive==0) {
      ellipse(realMouthX, realMouthY, 20,20);
        if (realMouthX >= sqSize*randX+margin && realMouthX <= sqSize*randX+sqSize+margin && realMouthY>= sqSize*randY+margin && realMouthY<= sqSize*randY+sqSize+margin) { //println("hello"); points+=10; getNewRand(); } } //LEFT EYE POINTS if (partsActive==1) { ellipse(realLeftEyeX, realLeftEyeY, 20,20); if (realLeftEyeX >= sqSize*randX+margin && realLeftEyeX <= sqSize*randX+sqSize+margin && realLeftEyeY>= sqSize*randY+margin && realLeftEyeY<= sqSize*randY+sqSize+margin) { //println("hello"); points+=10; getNewRand(); } } //RIGHT EYE POINTS if (partsActive==2) { ellipse(realRightEyeX, realRightEyeY, 20,20); if (realRightEyeX >= sqSize*randX+margin && realRightEyeX <= sqSize*randX+sqSize+margin && realRightEyeY>= sqSize*randY+margin && realRightEyeY<= sqSize*randY+sqSize+margin) { //println("hello"); points+=10; getNewRand(); } } if (partsActive==3) { ellipse(realNoseX, realNoseY, 20,20); if (realNoseX >= sqSize*randX+margin && realNoseX <= sqSize*randX+sqSize+margin && realNoseY>= sqSize*randY+margin && realNoseY<= sqSize*randY+sqSize+margin) {
        //println("hello");
        points+=10;
        getNewRand();
      }  
    }
 
  }
}
 
 
 
void getNewRand() {
  randX = int(random(0,gridNum));
  randY = int(random(0,gridNum));
  partsActive = int(random(4));
  time = millis();
}
 
void mouseClicked() {
  randX = int(random(0,gridNum));
  randY = int(random(0,gridNum));
  partsActive = int(random(4));
  time = millis();
}
 
// OSC CALLBACK FUNCTIONS
 
public void found(int i) {
  //println("found: " + i);
  found = i;
}
 
public void poseScale(float s) {
  //println("scale: " + s);
  poseScale = s;
}
 
public void posePosition(float x, float y) {
  //println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}
 
public void poseOrientation(float x, float y, float z) {
  //println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}
 
public void mouthWidthReceived(float w) {
  //println("mouth Width: " + w);
  mouthWidth = w;
}
 
public void mouthHeightReceived(float h) {
  //println("mouth height: " + h);
  mouthHeight = h;
}
 
public void eyeLeftReceived(float f) {
  //println("eye left: " + f);
  eyeLeft = f;
}
 
public void eyeRightReceived(float f) {
  //println("eye right: " + f);
  eyeRight = f;
}
 
public void eyebrowLeftReceived(float f) {
  //println("eyebrow left: " + f);
  eyebrowLeft = f;
}
 
public void eyebrowRightReceived(float f) {
  //println("eyebrow right: " + f);
  eyebrowRight = f;
}
 
public void jawReceived(float f) {
  //println("jaw: " + f);
  jaw = f;
}
 
public void nostrilsReceived(float f) {
  //println("nostrils: " + f);
  nostrils = f;
}
 
// all other OSC messages end up here
void oscEvent(OscMessage m) {
  if(m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}

 

harsh-Body

1. Wolly-Willy

This was a reference to the children's toy Wolly-Willy, but done in a digital format.

2. Beard of Boids

Here's a beard made out of tiny hair follicles that behave like boids.

3. Animate Artworks

Here I tried to bring to life already made artwork - I liked the recursion in the Dali piece and how computation enabled changing it dynamically.

Bonus: Here's one with Mona-Lisa

 

Code: Wolly Willy

//
// a template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker
//
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
//
// adapted from from Greg Borenstein's 2011 example
// http://www.gregborenstein.com/
// https://gist.github.com/1603230
//
import oscP5.*;
OscP5 oscP5;
import processing.video.*;
 
 
// num faces found
int found;
 
float[] rawArray;
 
// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();
 
int numPoints = 4000;
Vehicle[] vehicles;
PVector[] lowPoints;
 
float prevMouseX;
float prevMouseY;
 
PImage img;
PImage lightOff;
PImage lightOn;
Capture cam;
 
float mappedPoseScale;
 
float[][] mouseCircle;
 
float[][] setupPoints;
float[][] centDist;
 
String mode;
 
void setup() {
  size(640, 480);
  frameRate(30);
  rawArray = new float[132];
  cam = new Capture(this, 640, 480, 30);
  cam.start();
  img = loadImage("hair.png");
  lightOff = loadImage("lightoff.png");
  lightOn = loadImage("lighton.png");
 
 
  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "rawData", "/raw");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
 
  vehicles = new Vehicle[numPoints];
  lowPoints = new PVector[numPoints];
  initializeVehicles();
 
  setupPoints = new float[numPoints][2];
  centDist = new float[numPoints][2];
 
  mode = "setup";
}
 
void draw() {  
  //println(posePosition.x,posePosition.y);
  background(255);
  stroke(0);  
  mappedPoseScale = map(poseScale,0,6,0,1);
 
  if(keyPressed){
    if(key == ' '){
      mode = "follow";
    }
    if(key == 'r'){
      mode = "reset";
    }
  }
 
 ////////////////////// DISPLAY WEBCAM IMAGE //////////////////////
  if(cam.available()) {
    cam.read();
  }
  image(cam,0,0);
  //////////////////////////////////////////////////////////////////
  if(mode == "setup"){  
    mouseCircle = getCirclePoints(30, mouseX, mouseY);   
 
    moveBoidsInside(mouseCircle, mouseX,mouseY,30);
    for(int i=0; i<numPoints; i++){
      float[] curLoc = new float[2];
      curLoc[0] = vehicles[i].location.x;
      curLoc[1] = vehicles[i].location.y;
 
      float[] curDist = new float[2];
      curDist[0] = vehicles[i].location.x-posePosition.x;
      curDist[1] = vehicles[i].location.y-posePosition.y;   
      setupPoints[i] = curLoc;
      centDist[i] = curDist;
    }
    //if(mousePressed){ 
    //    image(lightOn,mouseX,mouseY);
    //}
    //else{
    //    image(lightOff,mouseX,mouseY);
    //}
  }
 
 
  if(mode == "follow" && found != 0){
    for(int i=0; i<numPoints; i++){
      Vehicle curBoid = vehicles[i];
      float[] curBoidLoc = new float[2];
      curBoidLoc[0] = curBoid.location.x;
      curBoidLoc[1] = curBoid.location.y;
      curBoid.maxspeed = 5;
      curBoid.maxforce = 0.2;
 
      if(curBoid.location.y < height-60){
        PVector newLoc = new PVector(posePosition.x+centDist[i][0]*mappedPoseScale,posePosition.y+centDist[i][1]*mappedPoseScale);
        //println(newLoc);
        curBoid.arrive(newLoc);
        curBoid.update();
        curBoid.display();
      }
    }
  }
 
  if(mode == "reset"){
    int num = 0;
    for(int i=0; i< numPoints; i++){
      Vehicle curBoid = vehicles[i];
      curBoid.maxspeed = 50;
      curBoid.maxforce = 10;
      curBoid.jiggleRadius = 1;
      curBoid.arrive(lowPoints[i]);
      curBoid.update();
      curBoid.display();
      if((curBoid.location.y-lowPoints[i].y) < 2){ num++; } } if(num >= numPoints){
        mode = "setup";
      }
  }
 
}
 
 
 
////////////////////////////////////////////////////////////////////////
/////////////////////Vehicle Settings //////////////////////////////////
////////////////////////////////////////////////////////////////////////
 
void initializeVehicles(){
  for(int i=0; i<numPoints; i++){
    PVector pos = new PVector(random(width), random(height-30,height));
    lowPoints[i] = pos;
    Vehicle new_vehicle = new Vehicle(pos.x,pos.y);
    vehicles[i] = new_vehicle;
  }
}
 
void jiggle(){
  for(int i=0; i<numPoints; i++){
    Vehicle v = vehicles[i];
    v.arrive(v.location);
    v.update();
    v.display();
  }
}
 
void display(){
    for(int i=0; i<numPoints; i++){
    Vehicle v = vehicles[i];
    v.update();
    v.display();
  }
}
 
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
 
// OSC CALLBACK FUNCTIONS
 
public void found(int i) {
  //println("found: " + i);
  found = i;
}
 
public void poseScale(float s) {
  //println("scale: " + s);
  poseScale = s;
}
 
public void posePosition(float x, float y) {
  //println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}
 
public void poseOrientation(float x, float y, float z) {
  //println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}
 
public void rawData(float[] raw) {
  rawArray = raw; // stash data in array
}
 
// all other OSC messages end up here
void oscEvent(OscMessage m) {
  if(m.isPlugged() == false) {
    //println("UNPLUGGED: " + m);
  }
}

Code: Artwork

 

// Processing 3.0x template for receiving raw points from
// Kyle McDonald's FaceOSC v.1.1 
// https://github.com/kylemcdonald/ofxFaceTracker
//
// Adapted by Kaleb Crawford and Golan Levin, 2016-7, after:
// 2012 Dan Wilcox danomatika.com
// for the IACD Spring 2012 class at the CMU School of Art
// adapted from from Greg Borenstein's 2011 example
// https://gist.github.com/1603230
 
import oscP5.*;
OscP5 oscP5;
int found;
float[] rawArray;
int highlighted; //which point is selected
float poseScale;
 
PImage background;
PImage mouth;
PImage left_eyebrow;
PImage right_eyebrow;
PImage small_eyebrows;
PImage smallest_eyebrows;
PImage eyes;
 
 
 
float mouthOff;
 
float rightBrowOff;
float leftBrowOff;
float smallBrowOff;
 
float prevRightBrowOff;
float prevLeftBrowOff;
float mappedScale;
 
//--------------------------------------------
void setup() {
  size(800, 682);
  frameRate(30);
 
  rawArray = new float[132]; 
  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "rawData", "/raw");
  oscP5.plug(this, "poseScale", "/pose/scale");
 
  background = loadImage("base.png");
  mouth = loadImage("mouths.png");
  left_eyebrow = loadImage("left eyebrow.png");
  right_eyebrow = loadImage("right eyebrow.png");
  small_eyebrows = loadImage("small eyebrows.png");
  eyes = loadImage("eyes.png");
  smallest_eyebrows = loadImage("smallest eyebrows.png");
 
 
}
 
//--------------------------------------------
void draw() {  
  mappedScale = map(poseScale, 0,5,0,1);
 
  prevRightBrowOff = rightBrowOff;
  prevLeftBrowOff = leftBrowOff;
 
  rightBrowOff = (getLeftBrowHeight());
 
  leftBrowOff =  (getRightBrowHeight());
 
  smallBrowOff = (getRightBrowHeight());
 
  image(background, 0, 0);
 
  pushMatrix();
  translate(0,-(rightBrowOff*2));
  image(left_eyebrow, 0, -2);
  popMatrix();
 
  pushMatrix();
  translate(0,-(leftBrowOff*8));
  image(right_eyebrow, 0, 0);
  popMatrix();
 
  pushMatrix();
  translate(0,-(leftBrowOff));
  image(small_eyebrows, 0, 0);
  popMatrix();
 
  pushMatrix();
  translate(0,-(leftBrowOff*0.5));
  image(smallest_eyebrows, 0, 0);
  popMatrix();
 
  if(leftEyeClosed() || rightEyeClosed()){
    image(eyes, 0, 0);
  }
 
  pushMatrix();
  tint(255, 300-(getMouthMovement())*5);
  image(mouth, 0, 0);
  popMatrix();
 
 
 
 
}
 
//--------------------------------------------
void drawFacePoints() {
  int nData = rawArray.length;
  for (int val=0; val<nData; val+=2) {
    if (val == highlighted) { 
      fill(255, 0, 0);
      ellipse(rawArray[val], rawArray[val+1], 11, 11);
    } else {
      fill(100);
      ellipse(rawArray[val], rawArray[val+1], 8, 8);
    }
  }
}
 
//--------------------------------------------
void drawFacePolygons() {
  noFill(); 
  stroke(100); 
 
  // Face outline
  beginShape();
  for (int i=0; i<34; i+=2) { vertex(rawArray[i], rawArray[i+1]); } for (int i=52; i>32; i-=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape(CLOSE);
 
  // Eyes
  beginShape();
  for (int i=72; i<84; i+=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape(CLOSE);
  beginShape();
  for (int i=84; i<96; i+=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape(CLOSE);
 
  // Upper lip
  beginShape();
  for (int i=96; i<110; i+=2) { vertex(rawArray[i], rawArray[i+1]); } for (int i=124; i>118; i-=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape(CLOSE);
 
  // Lower lip
  beginShape();
  for (int i=108; i<120; i+=2) { vertex(rawArray[i], rawArray[i+1]); } vertex(rawArray[96], rawArray[97]); for (int i=130; i>124; i-=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape(CLOSE);
 
  // Nose bridge
  beginShape();
  for (int i=54; i<62; i+=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape();
 
  // Nose bottom
  beginShape();
  for (int i=62; i<72; i+=2) {
    vertex(rawArray[i], rawArray[i+1]);
  }
  endShape();
}
 
 
//--------------------------------------------
public void found(int i) {
  found = i;
}
public void rawData(float[] raw) {
  rawArray = raw; // stash data in array
}
 
public void poseScale(float s) {
  //println("scale: " + s);
  poseScale = s;
}
 
//--------------------------------------------
void keyPressed() {
  int len = rawArray.length; 
  if (keyCode == RIGHT) {
    highlighted = (highlighted + 2) % len;
  }
  if (keyCode == LEFT) {
    highlighted = (highlighted - 2 + len) % len;
  }
}

dinkolas-Body

This project takes the motion from an entire human body and maps it to a face. Hands control eyes, head controls brow, hips control jaw, and feet control lips. Technically, the way this was done was by first recording the motion using the Brekel software for the Kinect, which outputs a BVH file. Then, I brought the MoCap data into Blender, which creates an animated armature. Using bpy, the Blender Python library, I took the locations of the armature bones as input and used them to animate location, rotation, and shape keys associated with the face model.

While the bulk of the work for this project was just writing the code to hook up the input motion to the output motion, the most important work, the work that I think gives this project some personality, was everything else. The modelling of the face, the constructing of the extreme poses, and of course the performing of the MoCap "dance" ultimately have the most impact on the piece. Overall, I'm happy with the piece, but it would have been nice if I had implemented a real-time version. I would have done a real-time version I could find a Python real-time MoCap library so I could connect it to Blender, but most of the real-time MoCap stuff is in Javascript.

Here are some images/GIFs/sketches:

Here's the code I wrote in Blender's Python interpreter:

import bpy
 
defaultBoneLocs = {'Spine1': (0.7207083106040955, 9.648646354675293, 4.532780170440674), 'LeftUpLeg': (0.9253001809120178, 9.532548904418945, 2.795626401901245), 'LeftLeg': (1.0876638889312744, 9.551751136779785, 1.751688838005066), 'LeftHand': (1.816838026046753, 8.849924087524414, 3.9350945949554443), 'Head': (0.7248507738113403, 9.63467788696289, 4.774600028991699), 'Spine': (0.7061706185340881, 9.661049842834473, 3.7947590351104736), 'RightArm': (0.17774519324302673, 9.660733222961426, 4.388589382171631), 'LeftArm': (1.259391188621521, 9.625649452209473, 4.377967834472656), 'RightUpLeg': (0.42640626430511475, 9.538918495178223, 2.812265634536743), 'LeftForeArm': (1.7386583089828491, 9.596687316894531, 3.683629274368286), 'RightShoulder': (0.6663911938667297, 9.649639129638672, 4.518487453460693), 'Hips': (0.6839468479156494, 9.647804260253906, 2.792393445968628), 'LeftShoulder': (0.7744865417480469, 9.646313667297363, 4.5172953605651855), 'LeftFoot': (1.2495332956314087, 9.810073852539062, 0.5447696447372437), 'RightForeArm': (-0.3234933316707611, 9.588683128356934, 3.855139970779419), 'RightFoot': (0.08095724135637283, 9.851096153259277, 0.5348520874977112), 'RightLeg': (0.23801341652870178, 9.571942329406738, 1.788295030593872), 'RightHand': (-0.3675239682197571, 8.814794540405273, 4.040530681610107), 'Neck': (0.7212125062942505, 9.647202491760254, 4.556796550750732)}
 
boneLocs = {}
normBoneLocs = {}
 
for frame in range(30,1491,3):
    if frame%90 == 0:
        print(frame/1491*100)
    bpy.context.scene.frame_set(frame)
 
    for arm in bpy.data.armatures[:]:
        obj = bpy.data.objects[arm.name]
        for poseBone in obj.pose.bones[:]:
            finalMatrix = obj.matrix_world * poseBone.matrix
            global_location = (finalMatrix[0][3],finalMatrix[1][3],finalMatrix[2][3])
            boneLocs[poseBone.name] = global_location
 
    for key in boneLocs:
        x = (boneLocs[key][0] - boneLocs['Hips'][0]) - (defaultBoneLocs[key][0] - defaultBoneLocs['Hips'][0])
        y = (boneLocs[key][1] - boneLocs['Hips'][1]) - (defaultBoneLocs[key][1] - defaultBoneLocs['Hips'][1])
        z = (boneLocs[key][2] - boneLocs['Hips'][2]) - (defaultBoneLocs[key][2] - defaultBoneLocs['Hips'][2])
        if key == 'Hips':
            z = boneLocs[key][2] - defaultBoneLocs[key][2]
        normBoneLocs[key] = (x,y,z)
 
    val = -0.6*(normBoneLocs['Hips'][2])
    bpy.data.meshes['Head'].shape_keys.key_blocks['JawOpen'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['JawOpen'].keyframe_insert("value")
    val = 0.6*(normBoneLocs['Hips'][2])
    bpy.data.meshes['Head'].shape_keys.key_blocks['JawUp'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['JawUp'].keyframe_insert("value")
 
    val = (normBoneLocs['LeftFoot'][0])
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthLOut'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthLOut'].keyframe_insert("value")
    val = -(normBoneLocs['LeftFoot'][0])
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthLIn'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthLIn'].keyframe_insert("value")
 
    val = -(normBoneLocs['RightFoot'][0])
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthROut'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthROut'].keyframe_insert("value")
    val = (normBoneLocs['RightFoot'][0])
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthRIn'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['MouthRIn'].keyframe_insert("value")
 
    val = -(normBoneLocs['Head'][2])
    bpy.data.meshes['Head'].shape_keys.key_blocks['BrowDown'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['BrowDown'].keyframe_insert("value")
    val = (normBoneLocs['Head'][2])
    bpy.data.meshes['Head'].shape_keys.key_blocks['BrowUp'].value = val
    bpy.data.meshes['Head'].shape_keys.key_blocks['BrowUp'].keyframe_insert("value")
 
    bpy.data.objects['EyeRTrack'].location.z = 0.294833 + normBoneLocs['RightHand'][2]
    bpy.data.objects['EyeRTrack'].location.x = -0.314635 + normBoneLocs['RightHand'][0]
    bpy.data.objects["EyeRTrack"].keyframe_insert(data_path='location')
 
    bpy.data.objects['EyeLTrack'].location.z = 0.294833 + normBoneLocs['LeftHand'][2]
    bpy.data.objects['EyeLTrack'].location.x = 0.314635 + normBoneLocs['LeftHand'][0]
    bpy.data.objects["EyeLTrack"].keyframe_insert(data_path='location')
 
    bpy.data.objects['Head'].rotation_euler = bpy.data.objects['Armature'].pose.bones['Spine'].matrix.to_euler()
    bpy.data.objects["Head"].rotation_euler.x -= 1.7
    bpy.data.objects["Head"].rotation_euler.x *= 0.3
    bpy.data.objects["Head"].keyframe_insert(data_path='rotation_euler')

 

Also, here's a bonus GIF I made using a walk cycle from CMU's Motion Capture Database: