FaceTracker Banana

 import oscP5.*;
OscP5 oscP5;
float premouth1;
 PImage topb;
PImage botb;
PImage midb;
PImage nakbot;



int found;
// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();
// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;
float x=0;
float prevmouth1;
 float check;
void setup(){
  size(800,800);
 topb = loadImage("topbanana.png");//height:567, width: 650
botb= loadImage("bottombanana.png");
 midb= loadImage("middlebanana.png");
 nakbot= loadImage ("nakedbanana.png");
 check= 0;
  
 
 
  frameRate(30);
oscP5 = new OscP5(this, 8338);
oscP5.plug(this, "found", "/found");
oscP5.plug(this, "poseScale", "/pose/scale");
oscP5.plug(this, "posePosition", "/pose/position");
oscP5.plug(this, "poseOrientation", "/pose/orientation");
oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
oscP5.plug(this, "jawReceived", "/gesture/jaw");
oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
 
}

void draw(){
  background(255); 


// if(prevmouth1!=mouthWidth){
//    check= mouthWidth;

//if the jaw is less then a certain range(ie, closed) close the banana
   if (jaw < 21.5){
        jaw = 0; 
     
   }
//  }
//  prevmouth1= mouthWidth;
//translate(posePosition.x, posePosition.y);
//scale(poseScale);
//noFill();

image(nakbot,(jaw+152),nostrils+155 );
image(topb,(jaw * -5.5 +129),nostrils+32 )  ;
image(botb, jaw * 5.5 +125  ,nostrils+145);
image(midb, jaw * 5.5 +127 ,nostrils+45 );








//}

  
  
  
}


public void found(int i) {
//println("found: " + i);
found = i;
}
public void poseScale(float s) {
//println("scale: " + s);
poseScale = s;
}
public void posePosition(float x, float y) {
//println("pose position\tX: " + x + " Y: " + y );
posePosition.set(x, y, 0);
}
public void poseOrientation(float x, float y, float z) {
//println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
poseOrientation.set(x, y, z);
}
public void mouthWidthReceived(float w) {
//println("mouth Width: " + w);
mouthWidth = w;
}
public void mouthHeightReceived(float h) {
//println("mouth height: " + h);
mouthHeight = h;
}
public void eyeLeftReceived(float f) {
//println("eye left: " + f);
eyeLeft = f;
}
public void eyeRightReceived(float f) {
//println("eye right: " + f);
eyeRight = f;
}
public void eyebrowLeftReceived(float f) {
//println("eyebrow left: " + f);
eyebrowLeft = f;
}
public void eyebrowRightReceived(float f) {
//println("eyebrow right: " + f);
eyebrowRight = f;
}
public void jawReceived(float f) {
println("jaw: " + f);
jaw = f;
}
public void nostrilsReceived(float f) {
//println("nostrils: " + f);
nostrils = f;
}
// all other OSC messages end up here
void oscEvent(OscMessage m) {
if(m.isPlugged() == false) {
println("UNPLUGGED: " + m);
}
}

 

My piece explanation. Fruit. I’ve been working with, and eating fruit the most I’ve ever had to in my life. It’s been through this experience that I’ve come to understand how frustrating it is to peel fruit. With that in mind, my piece works in conjunction with idea. The frustration I have when peeling fruit, my constant frowning and creased brows, are reflected in the piece and as such, in order to peel the fruit depicted, the user must frown, a lot. In the future I would want to uses the movement of the link and the eyebrows to track how much of the fruit to peel, and the blinking of the eyes to switch to the next fruit.
*Edited: The video now works. I just had to change some coding errors and use someone else’s face. FaceOS likes light rooms, and people apparently.
*My program isn’t working. For one thing, the FaceOSC can’t seem to track my face for more than too seconds. Also, I loaded an image which doesn’t change at all despite changing the positing of your face. So with that in mind, here’s a picture of a banana that should have been peeled. I will note that trying to peel a banana in real life is almost as frustrating as trying to program a banana to peel. Almost. main b

 

csb – face-osc – text-embedded easter egg

Screen Shot 2014-10-06 at 6.02.46 PM

Screen Shot 2014-10-06 at 5.49.37 PM

Screen Shot 2014-10-06 at 6.04.01 PM

Screen Shot 2014-10-06 at 6.03.02 PM

Screen Shot 2014-10-06 at 6.02.34 PM

Screen Shot 2014-10-06 at 6.02.21 PM

Screen Shot 2014-10-06 at 6.03.21 PM

Screen Shot 2014-10-06 at 5.46.08 PM

Screen Shot 2014-10-06 at 5.47.58 PM

Screen Shot 2014-10-06 at 6.03.14 PM

Screen Shot 2014-10-06 at 6.03.05 PM

Screen Shot 2014-10-06 at 6.03.58 PM

Screen Shot 2014-10-06 at 6.02.37 PM

Screen Shot 2014-10-06 at 5.46.02 PM

CODE:

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

String[] lines;

int string_spacing = 20;

void setup() {
size(804, 805, P3D);
background(0);
lights();

lines = loadStrings(“lorem_ipsum.txt”);

oscP5 = new OscP5(this, 8338);
oscP5.plug(this, “found”, “/found”);
oscP5.plug(this, “poseScale”, “/pose/scale”);
oscP5.plug(this, “posePosition”, “/pose/position”);
oscP5.plug(this, “poseOrientation”, “/pose/orientation”);
oscP5.plug(this, “mouthWidthReceived”, “/gesture/mouth/width”);
oscP5.plug(this, “mouthHeightReceived”, “/gesture/mouth/height”);
oscP5.plug(this, “eyeLeftReceived”, “/gesture/eye/left”);
oscP5.plug(this, “eyeRightReceived”, “/gesture/eye/right”);
oscP5.plug(this, “eyebrowLeftReceived”, “/gesture/eyebrow/left”);
oscP5.plug(this, “eyebrowRightReceived”, “/gesture/eyebrow/right”);
oscP5.plug(this, “jawReceived”, “/gesture/jaw”);
oscP5.plug(this, “nostrilsReceived”, “/gesture/nostrils”);
}

float i = 0;
float count = 0;

void draw() {
//baby();
lorum();
//pushMatrix();
//translate(422, height*0.52, -366);
//sphere(noise(204));
//popMatrix();
i = i + 1;
count = count + 1;
string_spacing+=100;
}

void lorum() {
for (int n = 0; n < lines.length; n+=1) { baby(); translate(0, string_spacing+posePosition.y); text(lines[n], poseOrientation.x, noise(20)+string_spacing); } string_spacing = 0; //background(0,253); } void baby() { fill(-16, 130, 5, 84); stroke(212, 101, 309, 429); // translate(eyeLeft, eyeRight, mouthHeight); translate(eyeLeft*30.0, posePosition.y, -155); sphere((mouthHeight*150)); translate(eyeRight*30.0, posePosition.y, -155); sphere((mouthHeight*150)); int sizing = abs(int(70 /poseScale)); textSize(sizing); fill(0, 102, 153); text("word", posePosition.x, posePosition.y, posePosition.z+100); // Specify a z-axis value text("Default depth, no z-value specified", posePosition.x, posePosition.y, posePosition.z); // Default depth, no z-value specified String s = "The quick brown fox jumped over the lazy dog."; fill(243); text(s, posePosition.x-135, posePosition.y+-21, posePosition.z+-153, 279); // text("Text wraps within text box", poseOrientation.x, poseOrientation.y, poseOrientation.z, poseScale); } //ellipse(noise(poseScale), poseOrientation.x, mouthHeight, posePosition.y); // OSC CALLBACK FUNCTIONS public void found(int i) { println("found: " + i); found = i; } public void poseScale(float s) { println("scale: " + s); poseScale = s; } public void posePosition(float x, float y) { println("pose position\tX: " + x + " Y: " + y ); posePosition.set(x, y, 0); } public void poseOrientation(float x, float y, float z) { println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z); poseOrientation.set(x, y, z); } public void mouthWidthReceived(float w) { println("mouth Width: " + w); mouthWidth = w; } public void mouthHeightReceived(float h) { println("mouth height: " + h); mouthHeight = h; } public void eyeLeftReceived(float f) { println("eye left: " + f); eyeLeft = f; } public void eyeRightReceived(float f) { println("eye right: " + f); eyeRight = f; } public void eyebrowLeftReceived(float f) { println("eyebrow left: " + f); eyebrowLeft = f; } public void eyebrowRightReceived(float f) { println("eyebrow right: " + f); eyebrowRight = f; } public void jawReceived(float f) { println("jaw: " + f); jaw = f; } public void nostrilsReceived(float f) { println("nostrils: " + f); nostrils = f; } // all other OSC messages end up here void oscEvent(OscMessage m) { /* print the address pattern and the typetag of the received OscMessage */ println("#received an osc message"); println("Complete message: "+m); println(" addrpattern: "+m.addrPattern()); println(" typetag: "+m.typetag()); println(" arguments: "+m.arguments()[0].toString()); if(m.isPlugged() == false) { println("UNPLUGGED: " + m); } } lorem_ipsum

FaceOSC: Anonymizing Masks for Flame Wars

Have you ever been an angry kid on YouTube? Has someone ever trolled your Let’s Play videos? Have you ever wanted to make a super angry threatening response video to your haters, but are afraid of giving away your identity?



Now you can use the Anonymizing Mask for Flame Wars and make all the angry videos you want without the embarrassment of finding your angry younger self on YouTube years down the line!

Use the default Minecraft mask, or supply the program with your own by dragging a 3D model into the /masks folder! Once you’ve chosen your mask, simply record your screen and upload the video to YouTube!

Starry Eyes

I wanted to use particles to fill in the the shapes of the eyebrows, eyes, and mouth, but this was not possible in the timeframe given. Instead I decided to fill the shapes with a starry pattern using randomly placed points (stars) and then screening them based on the face shapes. Given more time I would make the stars blink, fade, and move slowly to one direction.

Below is a quick sketch of the concept.

starry_eyes

Here is the code:

 

////////////////////////////////////////
// Starry Eyes (and mouth..and Eyebrows)
// by Matthew Kellogg
// October 6, 2014
///////////////////////////////////////
//
// This is based off a template by Golan Levin for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker

import oscP5.*;
OscP5 oscP5;

import java.util.ArrayList;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

void setup() {
  size(800, 600);
  frameRate(30);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void draw() {  
  //background(100,130,210);
  background(0);
  noStroke();
  
  fill(5);
  for (int i = 0; i< 1000; i++){     ellipse(random(0, width), random(0, height), random(1,3), random(1,3));   }      fill(240);   blendMode(MULTIPLY);   if(found > 0) {
    pushMatrix();
    translate(posePosition.x, posePosition.y);
    rotate(poseOrientation.z);
    scale(poseScale);
    ellipse(-20, eyeLeft * -9, 20, 7);
    ellipse(20, eyeRight * -9, 20, 7);
    ellipse(0, 20, mouthWidth* 3, mouthHeight * 3);
    rectMode(CENTER);
    rect(-20, eyebrowLeft * -5, 25, 5);
    rect(20, eyebrowRight * -5, 25, 5);
    popMatrix();
  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {
  
  /* print the address pattern and the typetag of the received OscMessage */
  println("#received an osc message");
  println("Complete message: "+m);
  println(" addrpattern: "+m.addrPattern());
  println(" typetag: "+m.typetag());
  println(" arguments: "+m.arguments()[0].toString());
  
  if(m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}

Looking Outwards – maxmsp

The Infinite Adventure Machine (prototype 01) from David Benqué on Vimeo.

“The Infinite Adventure Machine” by David Benqué is a program designed to create fully computationally generated stories based on common storybook traits. Apparently, all aspects Russian folk-tales (the type of stories this project was based upon) can be boiled down into 31 traits. This is very reminiscent of Joseph Campbell’s “Hero” analysis in Western literature.

The project on it’s own can only generate synopsis of stories, but to think that these synopsis are actually functional and engaging is quite impressive. I would like to incorporate this type of computational generation into some of my own work.

This Monome-like device is actually made out of 64 arcade-style buttons. The outputs of the buttons are strung through maxmsp to determine the final sound. I think that as a piece, the design is great. An arcade button is something which begs to be touched. The responsiveness of the button paired with sound allows users to transfer that urge to play into musical notes. This project was simple and to the point and I really enjoy that. Possibly designing some kind of system which could replicate human-response to this machine is something I would add on.

Synthpond is an application which uses maxmsp to create soundwaves/nodes that generate responsive sounds. Users can place nodes anywhere along a plane and once utilized, the nodes interact with each other. Soundwaves bounce off certain nodes and don’t react with others to create an immersive user experience. From the documentation I have seen though, Synthpond just seems like a quick application to use rather than an experience. I feel that to more truly realize the Synthpond idea it should be showcased in an area much larger than a phone so that people as well as placed nodes can interact with the waves created by the sounds they trigger.

Looking Outwards

Project That Inspired

This project was very inspring to me. The space is well thought out and perfect for encompassing this tiny world.

Dramatic lighting, sound, and characters make this piece seem layered in narratives and concepts. I also greatly appreciate the aesthetic of this project I love the weirdly clean but childlike taste in material. It still has a roughness to it, not everything is perfect, but all of the little mechanisms are tuned perfectly.

Surprised

This piece by Felix Luque is comprised of 10 rhombic dodecahedrons that can be combined to create a tessellation of an infinite space. Watching the implementation of these devises was very to me, much more interesting than stills of the possible shapes you can create. The choices of colors in the materials for some reason seemed too heavy to me. The sounds that the system makes are most interesting to me and the patterns of flashing lights.

 

Potential

This video isn’t a piece, but a study of max/msp jitters. The visual images created hear have vast potential to me. The choices of color made by this artist are very interesting and to me these images look extremely unique. I will reference this video in the future for aesthetic choices.

fidgity

For this assignment, I found something very endearing about the motion tracker and its interaction with primitives. While quite simple, I had fun watching these tiny shapes nervously twitch to the movement of my face.

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

void setup() {
  size(640, 480);
  frameRate(30);
 

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void draw() {  
  
  
  background(149,186,177);
  
  
  
  if(found > 0) {
    translate(posePosition.x, posePosition.y);
    scale(poseScale);
    stroke (191, 222, 215);
    strokeWeight (1);
  
   
    for (int i=5; i< 30; i+=70){
      fill (216, 242, 236);
    ellipse (i, nostrils, 5, 5);
    }
    
    line (20, eyebrowLeft, 1, 10);
    line (21, eyebrowRight, 1, 10);
    line (22, eyebrowLeft, 1, 10);
    line (23, eyebrowRight, 1, 10);
    line (eyebrowLeft, 23, 1, 10);
    
    stroke (216, 242, 236);
    noFill ();
    ellipse (jaw, 100, 2, 2);
    ellipse (jaw, 30, 4, 4);
    ellipse (45, jaw, 2, 2);
    
    line (eyebrowLeft, 10, 25,50);
    line (10,eyebrowRight, 25, 50); 
    
    line (eyebrowLeft, eyebrowRight, 5, 70);
    ellipse (eyebrowLeft*2, 60, 1, 1);
    ellipse (eyebrowLeft*2, 10, 1, 1);
    stroke (191, 222, 215);
    ellipse (eyebrowRight, 55, 1, 1);
    ellipse (eyebrowRight, 10, 1, 1);
    
    stroke (224, 213, 216);
    ellipse (mouthHeight/height, 5, 1, 1);
    ellipse (mouthWidth/height, 1, 1, 1);
    ellipse (40, mouthHeight, 3, 3);
    

   
      }
    }
    


// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {

/* print the address pattern and the typetag of the received OscMessage */
println(“#received an osc message”);
println(“Complete message: “+m);
println(” addrpattern: “+m.addrPattern());
println(” typetag: “+m.typetag());
println(” arguments: “+m.arguments()[0].toString());

if(m.isPlugged() == false) {
println(“UNPLUGGED: ” + m);
}
}

Looking Outwards 4 MAX/MSP

Versum

Versum by Tarik Barri, an artist from the Netherlands, is a real time 3d virtual space, which is controlled by the user. It invites the audience to fly through and create their own musical trip. The virtual world of Versum is seen and heard from the viewpoint of a moving virtual camera with virtual microphones attached. This camera, controlled in realtime by means of a joystick (or any other kind of controller) moves through space. It was the base of many other projects by Barri. One deviation of Versum was where he used his software to visualize tweets in Germany, called Tweetscape, it shows how one project that is as open ended and malleable as Versum can be applied to different spheres.

∆ inkblot

Inkblot by Amanda Ghassaei is very simple and very accessible to not only use but to build themselves by an average person. It is like a simplified and graceful version of Versum. It produces abstract symmetric patterns and a slowly evolving synthetic drone. It layers the drones on top of each other creating a layered complex sound. With that said, I do feel like its missing something. I am a little disappointed that it creates a soundscape in an environment that has its own sounds, but does not incorporate its environment.

Drawing Machine

Drawing Machine by Harvey Moon is made from processing, Max/MSP, stepper motors and adafruit arduino. It takes weeks or months to draw a picture. This surprised me because it would take its feed from a live camera from Taiwan and then draw the city real time, incapsulating single moments along the way.

Screen Shot 2014-10-06 at 5.27.02 PM

SpongeHizal SquareFace

Spongebob face match project

For this project, after a bit of thinking I decided to make the iconic Spongebob face for this project. The hardest part was trying to figure out exactly why the given datapoints were so weird, and in the negatives, but I got a hang of it after a bit. Another hard part was creating the bezier curves (nose, mouth and chin-thing), purely because bezier curves are annoyingly hard, esp. when using variables.
Controls: Eyebrows control the eyelashes (length), funny right?
and smile controls the smile on Spongebob. Otherwise, the rest are mapped to locations on the face.

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;
float lineradiusL = 10;
float lineradiusR = 10;

void setup() {
  size(640, 480);
  //frameRate(30);
  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void draw() {  
  background(#ffee34);
  stroke(0);
  lineradiusL = eyebrowLeft*4;
  lineradiusR = eyebrowRight*4;
  if (found > 0) {
    translate(posePosition.x, posePosition.y+50);
    scale(poseScale);
    //EYEBROWS
    strokeWeight(1.6);
    line(-25, eyeLeft*-8, -25+cos(radians(270))*lineradiusL, eyeLeft*-8+sin(radians(270))*lineradiusL);
    line(-25, eyeLeft*-8, -25+cos(radians(240))*lineradiusL, eyeLeft*-8+sin(radians(240))*lineradiusL);
    line(-25, eyeLeft*-8, -25+cos(radians(300))*lineradiusL, eyeLeft*-8+sin(radians(300))*lineradiusL);
    line(25, eyeRight*-8, 25+cos(radians(270))*lineradiusR, eyeRight*-8+sin(radians(270))*lineradiusR);
    line(25, eyeRight*-8, 25+cos(radians(240))*lineradiusR, eyeRight*-8+sin(radians(240))*lineradiusR);
    line(25, eyeRight*-8, 25+cos(radians(300))*lineradiusR, eyeRight*-8+sin(radians(300))*lineradiusR);
    //EYES
    strokeWeight(.8);
    fill(255);
    ellipse(-25, eyeLeft*-8, 50, 50);
    ellipse(25, eyeRight*-8, 50, 50);
    fill(#34bcf3);
    ellipse(-23, eyeLeft*-8, 20, 20);
    ellipse(23, eyeRight*-8, 20, 20);
    fill(0);
    ellipse(-23, eyeLeft*-8, 11, 11);
    ellipse(23, eyeRight*-8, 11, 11);
    //FRECKLES
    fill(#b96119);
    noStroke();
    ellipse(-35, eyeLeft*-8+30, 3, 2);
    ellipse(-45, eyeLeft*-8+30, 4, 2);
    ellipse(-40, eyeLeft*-8+35, 1, 3);
    ellipse(35, eyeRight*-8+30, 3, 2);
    ellipse(45, eyeRight*-8+30, 4, 2);
    ellipse(40, eyeRight*-8+35, 1, 3);
    //NOSE
    fill(#ffee34);
    stroke(0);
    strokeWeight(0.8);
    beginShape();
    vertex(nostrils*-1+1, nostrils+2);
    bezierVertex(nostrils*-1-17, nostrils*-7, nostrils*-1+30, nostrils*-7, nostrils*-1+11, nostrils+2);
    endShape();
    //MOUTH
    fill(255);
    rect(-13,40-mouthWidth*.75,10,13);
    rect(0,40-mouthWidth*.75,10,13);
    if(mouthWidth*-3+70>0){noFill();}
    else{
    fill(#ffee34);}
    bezier(-40,mouthWidth*-3+70, -30, 30, 30, 30, 40, mouthWidth*-3+70);
    //CHIN THING
    strokeWeight(2);
    stroke(#f59c9f);
    curve(-30, 13-mouthWidth*.75, -20, 56-mouthWidth*.75, 0, 58-mouthWidth*.75, 0, 63-mouthWidth*.75);
    curve(0, 63-mouthWidth*.75, 0, 58-mouthWidth*.75, 20, 56-mouthWidth*.75, 30, 13-mouthWidth*.75);
  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {

  /* print the address pattern and the typetag of the received OscMessage */
  println("#received an osc message");
  println("Complete message: "+m);
  println(" addrpattern: "+m.addrPattern());
  println(" typetag: "+m.typetag());
  println(" arguments: "+m.arguments()[0].toString());

  if (m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}

the thing with the face and the stuff

Ok yeah I know this is supes uninteresting and mostly just a cutesy cartoon me following the mapping of the actual me, only sometimes the eyebrows mess up and do a thing.

Initially, I wanted to do a thing with actual photographs of eyes and noses and stuff, and depending on the location of the eye or whatever in faceOSC, it would show a different photograph of an eye or something. As it turns out, randomly googling various facial features is all well and good until you start scrolling down and then there’s pictures of horrifying facial injuries and hospital stuff and facial surgeries and I literally cannot look at that stuff without feeling sick at the least and having a panic attack at most. Anyway the surprise gory nose surgery photos kinda put me off a) my lunch and b) the idea for the time being, but at some point I want to revisit that concept because it would be super cool to make something with all sorts of weird flickering images of eyes and stuff.

My goal for this once I got on the second trajectory, which was making a silly cartoony thing, was to be able to make the entire rest of the physical face and the hair follow the actual head around. It mostly works; the bangs do a lifty thing that normal hair doesn’t do, but for the most part it works. All I really did was edit the code we were given.

// Don't forget to install OscP5 library for Processing, 
// or nothing will work! 
//
// A template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

void setup() {
  size(640, 480);
  frameRate(30);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void draw() {  
  background(255);
  stroke(0);
  
  if(found > 0) {
    translate(posePosition.x, posePosition.y);
    scale(poseScale);
    
    //face?
    shapeMode(CENTER);
      fill(55,164,193);
      //hair
        beginShape();
        curveVertex(0,-50);
        curveVertex(40,-50);
        curveVertex(50,jaw+5);
        curveVertex(50,jaw+5);
        curveVertex(-50,jaw+5);
        curveVertex(-50,jaw+5);
        curveVertex(-40,-50);
        curveVertex(0,-50);
      endShape(CLOSE);
      
      fill(234,212,197);
      //ears
        beginShape();//left ear
          curveVertex(eyeLeft-28,jaw-20);
          curveVertex(eyeLeft-28,jaw-20);
          curveVertex(eyeLeft-48,jaw-25);
          curveVertex(eyeLeft-48,jaw-55);
          curveVertex(eyeLeft-38,jaw-50);
          curveVertex(eyeLeft-38,jaw-50);
        endShape(CLOSE);
        beginShape();//right ear
          curveVertex(eyeRight+20,jaw-20);
          curveVertex(eyeRight+20,jaw-20);
          curveVertex(eyeRight+40,jaw-25);
          curveVertex(eyeRight+40,jaw-55);
          curveVertex(eyeRight+30,jaw-50);
          curveVertex(eyeRight+30,jaw-50);
        endShape(CLOSE);
      
      beginShape();//faceshape
        curveVertex(30,-100);
        curveVertex(45,-100);
        curveVertex(23,jaw-10);
        curveVertex(-23,jaw-10);
        curveVertex(-45, -100);
        curveVertex(-30, -100);
      endShape(CLOSE);
      
      fill(55,164,193);
      beginShape();//bangs
        curveVertex(0,-100);
        curveVertex(40,-100);
        curveVertex(31,jaw-5);
        curveVertex(25,jaw-5);
        curveVertex(30,(eyeRight*-9)-10);
        curveVertex(-30,(eyeLeft*-9)-10);
        curveVertex(-25,jaw-5);
        curveVertex(-31,jaw-5);
        curveVertex(-40,-100);
        curveVertex(0,-100);
      endShape(CLOSE);
      beginShape();
        curveVertex(0,-100);
        curveVertex(40,-100);
        curveVertex(28,(eyeRight*-9)-15);
        curveVertex(-28,(eyeLeft*-9)-15);
        curveVertex(-40,-100);
        curveVertex(0,-100);
      endShape(CLOSE);
      
    //eyes
      fill(100,195,240);
    ellipse(-20, eyeLeft * -9, 20, eyeLeft*-6);
    ellipse(20, eyeRight * -9, 20, eyeRight*-6);
    
    //mouth
      fill(245,161,197);
      shapeMode(CENTER);
      beginShape();
        curveVertex(mouthWidth, -mouthHeight);
        curveVertex(mouthWidth, -mouthHeight);
        curveVertex(mouthWidth, mouthHeight);
        curveVertex(-mouthWidth, mouthHeight);
        curveVertex(-mouthWidth, -mouthHeight);
        curveVertex(-mouthWidth, -mouthHeight);
      endShape(CLOSE);
      
    //eyebrows
      noFill();
      beginShape();
        curveVertex(-30, eyebrowLeft*-3);
        curveVertex(-30, eyebrowLeft*-5);
        curveVertex(-5,eyebrowLeft*-5);
        curveVertex(-5,eyebrowLeft*-3);
      endShape();  
      beginShape();
        curveVertex(30, eyebrowRight*-3);
        curveVertex(30, eyebrowRight*-5);
        curveVertex(5,eyebrowRight*-5);
        curveVertex(5,eyebrowRight*-3);
      endShape();    
  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {
  
  /* print the address pattern and the typetag of the received OscMessage */
  println("#received an osc message");
  println("Complete message: "+m);
  println(" addrpattern: "+m.addrPattern());
  println(" typetag: "+m.typetag());
  println(" arguments: "+m.arguments()[0].toString());
  
  if(m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}