FaceOSC: The Orange Gobbler

Screen Shot 2014-10-06 at 2.34.37 AM

My goal was to create a creature with expressive eyes and an expressive mouth. I tracked the positions of the left eye, right eye, left eyebrow, right eyebrow, the nostrils, and the jaw. I decided to work with primitives so I could easily be able to see what motions were changing which shapes.

The body was a bit of an afterthought, and it acts a bit like a slinky due to how the position of the jaw was used in placing shapes. If I were to revisit this project, I’d try to utilize some of the mouth parameters to make the creature more expressive. However, I’m undecided on whether or not to give “The Orange Gobbler” a set of teeth.

EMScreatureAt one point, The Orange Gobbler did have highlights in its eyes, but for some reason the highlights made the Gobbler look a bit too unsettling.

//Miranda Jacoby
//EMS Inetractivity Section A
//majacoby@andrew.cmu.edu
//Creature design is copyright Miranda Jacoby 2014
//Adapted from Golan Levin's "FaceOSCReceiver" program

// Don't forget to install OscP5 library for Processing, 
// or nothing will work! 
//
// A template for receiving face tracking osc messages from
// Kyle McDonald's FaceOSC https://github.com/kylemcdonald/ofxFaceTracker

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

//dimensions
float nostrilHeight;

void setup() {
  size(640, 480);
  frameRate(30);

  oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseScale", "/pose/scale");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
  
  nostrilHeight = 7;
}

void draw() {  
  background(249, 245, 255);
  stroke(0);
  
  if(found > 0) {
    translate(posePosition.x, posePosition.y);
    scale(poseScale);
    noStroke();
 //Legs
    fill(155, 84, 14);
    pushMatrix();
    rotate(6.25);
    ellipse(20, jaw * 20, 125, 200);
    popMatrix();
    pushMatrix();
    rotate(-6.25);
    ellipse(-20, jaw * 20, 125, 200);
    popMatrix();
//Neck and Body
    fill(155, 84, 14);
    ellipse(0, jaw * 3, 110, 180);
    ellipse(0, jaw * 5, 120, 190);
    ellipse(0, jaw * 7, 130, 200);
    ellipse(0, jaw * 9, 140, 210);
    ellipse(0, jaw * 11, 150, 220);
    ellipse(0, jaw * 15, 275, 375);
    fill(206, 119, 48);
    ellipse(0, jaw * 3, 100, 150);
    ellipse(0, jaw * 7, 100, 150);
    ellipse(0, jaw * 11, 100, 150);
    ellipse(0, jaw * 15, 200, 275);
//Arms
    fill(155, 84, 14);
    pushMatrix();
    rotate(6);
    ellipse(20, jaw * 15, 125, 200);
    popMatrix();
    pushMatrix();
    rotate(-6);
    ellipse(-20, jaw * 15, 125, 200);
    popMatrix();

//Mouth Lower Jaw
    fill(155, 84, 14);
    ellipse(0, jaw * .5, 100, 100);
    fill(58, 42, 72);
    ellipse(0, jaw * .5, 73, 80);
//Mouth Lower Jaw Tip
    fill(155, 84, 14);
    ellipse(0, jaw * 2.25, 50, 30);
//Space Between the Eyes (CHECK FOR OVERLAP PROBLEMS)
    fill(155, 84, 14);
    ellipse(0, 10 * -2.7, 35, 20);
    ellipse(-20, 10 * -2.7, 35, 20);
    ellipse(20, 10 * -2.7, 35, 20);
    ellipse(-20, 5 * -2.7, 35, 20);
    ellipse(20, 5 * -2.7, 35, 20);
    quad(-20, eyeLeft * -10, 20, eyeRight * -10, 5, (nostrils + nostrilHeight * .5) * 3, -5, (nostrils + nostrilHeight * .5) * 3);
//Horns
    fill(206, 119, 48);
    pushMatrix();
    rotate(6);
    ellipse(-30, -40, 35, 55);
    popMatrix();
    pushMatrix();
    rotate(-6);
    ellipse(30, -40, 35, 55);
    popMatrix();
    fill(155, 84, 14);
    ellipse(-20, 0, 50, 30);
    ellipse(20, 0, 50, 30);
    fill(206, 119, 48);
    ellipse(-40, 0, 50, 30);
    ellipse(40, 0, 50, 30);

//Mouth Upper Jaw
    fill(206, 119, 48);
    pushMatrix();
    rotate(-5);
    ellipse(-0, 30, 40, 25);
    popMatrix();
    pushMatrix();
    rotate(5);
    ellipse(0, 30, 40, 25);
    popMatrix();
    pushMatrix();
    //rotate(-5);
    ellipse(15, 20, 30, 40);
    popMatrix();
    pushMatrix();
    //rotate(5);
    ellipse(-15, 20, 30, 40);
    popMatrix();
//    ellipse(0, jaw * .5, 100, 100);
//    ellipse(0, jaw * 2.5, 50, 30);
//Bridge of Nose
    fill(155, 84, 14);
    quad(-20, eyeLeft * -2, 20, eyeRight * -2, 5, (nostrils + nostrilHeight + (jaw * -2.5)/15) * 3, -5, (nostrils + nostrilHeight + (jaw * -2.5)/15) * 3);
    fill(206, 119, 48);
    quad(-20 + 27, eyeLeft * -5, 20 - 27, eyeRight * -5, -5, (nostrils + nostrilHeight + (jaw * -2.5)/15) * 3, 5, (nostrils + nostrilHeight + (jaw * -2.5)/15) * 3);
    fill(240, 172, 104);
    quad(-30 + 27, eyeLeft * -5, 30 - 27, eyeRight * -5, 5, (nostrils + nostrilHeight + (jaw * -2.5)/15) * 3, -5, (nostrils + nostrilHeight + (jaw * -2.5)/15) * 3);
//Eye Bags
    //fill(206, 119, 48);
    fill(58, 42, 72);
    ellipse(-20, 0, 25, 15);
    ellipse(20, 0, 25, 15);
//Eye Sclera
    fill(245, 229, 175);
    ellipse(-20, eyeLeft * -2, 25, 20);
    ellipse(20, eyeRight * -2, 25, 20);
//Eye Iris
    fill(84, 115, 134);
    ellipse(-20, eyeLeft * -2, 15, 15);
    ellipse(20, eyeRight * -2, 15, 15);
//Eye Pupil
    fill(58, 42, 72);
    ellipse(-20, eyeLeft * -2, 5, 5);
    ellipse(20, eyeRight * -2, 5, 5);
//Eye Highlight
    //fill(255, 253, 234);
    //ellipse(-27, eyeLeft * -2.5, 4, 2);
    //ellipse(13, eyeRight * -2.5, 4, 2);
    //Mouth?
    //ellipse(0, 20, mouthWidth* 3, mouthHeight * 3);
//Nostrils
    fill(58, 42, 72);
    ellipse(-5, nostrils * 3, 3, nostrilHeight);
    ellipse(5, nostrils * 3, 3, nostrilHeight);
//Mouth Upper Jaw Tip
    fill(206, 119, 48);
    ellipse(0, 80 + jaw * -2.25, 20, 30);
    fill(240, 172, 104);
    ellipse(0, 70 + jaw * -2.25, 10, 15);
    //ellipseMode(CENTER);
//Eyebrows
    fill(155, 84, 14);
    pushMatrix();
    ellipse(-20, eyebrowLeft * -2.7, 35, 20);
    popMatrix();
    pushMatrix();
    ellipse(20, eyebrowRight * -2.7, 35, 20);
    popMatrix();
    fill(206, 119, 48);
    pushMatrix();
    ellipse(-20, eyebrowLeft * -3, 35, 20);
    popMatrix();
    pushMatrix();
    ellipse(20, eyebrowRight * -3, 35, 20);
    popMatrix();

  }
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
  println("found: " + i);
  found = i;
}

public void poseScale(float s) {
  println("scale: " + s);
  poseScale = s;
}

public void posePosition(float x, float y) {
  //println("pose position\tX: " + x + " Y: " + y );
  posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
  //println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
  poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
  //println("mouth Width: " + w);
  mouthWidth = w;
}

public void mouthHeightReceived(float h) {
  //println("mouth height: " + h);
  mouthHeight = h;
}

public void eyeLeftReceived(float f) {
  //println("eye left: " + f);
  eyeLeft = f;
}

public void eyeRightReceived(float f) {
  //println("eye right: " + f);
  eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
  //println("eyebrow left: " + f);
  eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
  //println("eyebrow right: " + f);
  eyebrowRight = f;
}

public void jawReceived(float f) {
  println("jaw: " + f);
  jaw = f;
}

public void nostrilsReceived(float f) {
  //println("nostrils: " + f);
  nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {
  
  /* print the address pattern and the typetag of the received OscMessage */
  println("#received an osc message");
  println("Complete message: "+m);
  println(" addrpattern: "+m.addrPattern());
  println(" typetag: "+m.typetag());
  println(" arguments: "+m.arguments()[0].toString());
  
  if(m.isPlugged() == false) {
    println("UNPLUGGED: " + m);
  }
}


Distorting a Classic


You can use your face to turn a classical sculpture into a glitching digital artifact, eyebrow height is correlated with noise on the surface and mouth openness is correlated with twisting the sculpture.
This has been a long process. The end product resulted in a modification of my original idea, which was to be able to manipulate specific parts of the face, but I couldn’t grab only a specific section of pixels to distort, since it was an imported object. If I had made the head in processing using 3d objects I would have been able to. I used Hemesh to distort the head and toxiclibs to import the stl file into processing. The head is a model of Memory by Daniel Chester French, it is found in the Metropolitan

code:

//Charlotte Stiles
//Thank you Matthew Plummer-Fernandez for the code to import stl to hemesh using toxiclibs http://www.plummerfernandez.com/
//bend and noise code from Hemesh library http://hemesh.wblut.com/
//head is from scotta3d on thingsverse http://www.thingiverse.com/thing:24335
//the head is a model of Memory by Daniel Chester French, it is found in the Metropolitan


import wblut.math.*;
import wblut.processing.*;
import wblut.core.*;
import wblut.*;
import wblut.hemesh.*;
import wblut.geom.*;

// Toxiclibs for the import stl and save color stl

import toxi.geom.*;
import toxi.geom.mesh.*;
import toxi.math.*;
import toxi.processing.*;


import oscP5.*;
OscP5 oscP5;
//pshape is for importing my stl
PShape s;

PVector posePosition;
boolean found;
float eyeLeftHeight;
float eyeRightHeight;
float mouthHeight;
float mouthWidth;
float leftEyebrowHeight;
float rightEyebrowHeight;

float MN; //eyebrow to eye
 
float poseScale;


String stlFilename = "memory_head.stl";

//this is for noise and everything else vv
HE_Mesh mesh, copymesh;
WB_Render render;

//this is for bend vv
WB_Plane P;
WB_Line L;
HEM_Bend bendModifier;
WB_GeometryFactory gf=WB_GeometryFactory.instance();



void setup() {
  size(800, 800, P3D);
 
  createMesh();
  
  
  HEM_Noise modifier=new HEM_Noise();
  modifier.setDistance(20);
  copymesh.modify(modifier);
  //for noise^^
  
  bendModifier=new HEM_Bend();
  
  P=new WB_Plane(0,0,-200,0,0,1); 
  bendModifier.setGroundPlane(P);// Ground plane of bend modifier 
 
  L=new WB_Line(0,0,-200,-1,0,-200);
  bendModifier.setBendAxis(L);// Bending axis

  
  bendModifier.setAngleFactor(30.0/400);// Angle per unit distance (in degrees) to the ground plane
  // bend axis by an angle d*angleFactor;
 
  bendModifier.setPosOnly(false);// apply modifier only on positive side of the ground plane?
  
  mesh.modify(bendModifier);
  
  
  render=new WB_Render(this);
  
    oscP5 = new OscP5(this, 8338);
  oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
  oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
  oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
  oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
  oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
  oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
  oscP5.plug(this, "jawReceived", "/gesture/jaw");
  oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
  oscP5.plug(this, "found", "/found");
  oscP5.plug(this, "poseOrientation", "/pose/orientation");
  oscP5.plug(this, "posePosition", "/pose/position");
  oscP5.plug(this, "poseScale", "/pose/scale");
  
}

void draw() {
  background(230);
  directionalLight(255, 255, 255, 1, 1, -1);
  directionalLight(127, 127, 127, -1, -1, 1);
  
  if (found) {
 
    translate(posePosition.x, posePosition.y+300);
    scale(poseScale*2);
  }
  rotateY(400*1.0/width*TWO_PI);
  rotateX(200*1.0/height*TWO_PI);
  
   HEM_Noise modifier=new HEM_Noise();
  copymesh=mesh.get();
  
  MN = rightEyebrowHeight - eyeRightHeight - 4;
// println(MN);

if (MN < 0) MN=0; //eyebrow eye ratio make sure it doesnt go into negative
 
  modifier.setDistance(MN/2);
  copymesh.modify(modifier);
  
 float heightWidthRatio=mouthHeight/mouthWidth;
  println(heightWidthRatio);
  if (heightWidthRatio < .2) heightWidthRatio= 0;
  L=gf.createLineThroughPoints(0,0, heightWidthRatio-100,-1,0,heightWidthRatio-100);
  //this one controls the speed vv
  bendModifier.setAngleFactor(20* 0.030 *heightWidthRatio);
  bendModifier.setBendAxis(L);
  mesh.modify(bendModifier);
  
  noStroke();
  render.drawEdges(mesh);
  noStroke();
  render.drawFaces(copymesh);
  
  

  
}


 
public void mouthWidthReceived(float w) {
//  println("mouth Width: " + w);
  mouthWidth = w;
}
 
public void mouthHeightReceived(float h) {
 // println("mouth height: " + h);
  mouthHeight = h;
}
 
 
public void eyebrowRightReceived(float h) {
 // println("eyebrow right: " + h);
  rightEyebrowHeight = h;
}

 
public void eyeRightReceived(float h) {
 // println("eye right: " + h);
  eyeRightHeight = h;
}

public void found(int i) {
  //println("found: " + i); // 1 == found, 0 == not found
  found = i == 1;
}
 
public void posePosition(float x, float y) {
 // println("pose position\tX: " + x + " Y: " + y );
  posePosition = new PVector(x, y);
}
 
public void poseScale(float s) {
 // println("scale: " + s);
  poseScale = s;
}
 
public void poseOrientation(float x, float y, float z) {
 // println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
}
 
 
void oscEvent(OscMessage theOscMessage) {
  if (theOscMessage.isPlugged()==false) {
   // println("UNPLUGGED: " + theOscMessage);
  }
}

void createMesh(){
  mesh = new HE_Mesh(fromStl(stlFilename));
  copymesh= mesh.get();

}



HEC_FromFacelist fromStl(String stlName) { 
  println("Start Build");
  WETriangleMesh wemesh = (WETriangleMesh) new STLReader().loadBinary(sketchPath(stlName), STLReader.WEMESH);
  //convert toxi mesh to a hemesh. Thanks to wblut
  int n=wemesh.getVertices().size();
  ArrayList points= new ArrayList(n);
  for (Vec3D v : wemesh.getVertices ()) { 
    points.add(new WB_Point(v.x, v.y, v.z));
  }
  int[] toxiFaces=wemesh.getFacesAsArray();
  int nf=toxiFaces.length/3;
  int[][] faces=new int[nf][3];
  for (int i=0; i

DragFace

Drag Face low

 

For this assignment, I remembered that the creator had created something that meshed a celebrity face with your face.  I wanted to do something similar but with drag makeup.  I attempted this idea but adding eye shadow, large red lips and contoured nose line.  However, due to the short time frame, this failed immensely but I do not want to give up on this idea I want to learn more so that I can apply this concept properly.  I hess this assignment was just to learn about the basics of this face program.

 

import oscP5.*;
OscP5 oscP5;

// num faces found
int found;

// pose
float poseScale;
PVector posePosition = new PVector();
PVector poseOrientation = new PVector();

// gesture
float mouthHeight;
float mouthWidth;
float eyeLeft;
float eyeRight;
float eyebrowLeft;
float eyebrowRight;
float jaw;
float nostrils;

void setup() {
size(640, 480);
frameRate(30);

oscP5 = new OscP5(this, 8338);
oscP5.plug(this, "found", "/found");
oscP5.plug(this, "poseScale", "/pose/scale");
oscP5.plug(this, "posePosition", "/pose/position");
oscP5.plug(this, "poseOrientation", "/pose/orientation");
oscP5.plug(this, "mouthWidthReceived", "/gesture/mouth/width");
oscP5.plug(this, "mouthHeightReceived", "/gesture/mouth/height");
oscP5.plug(this, "eyeLeftReceived", "/gesture/eye/left");
oscP5.plug(this, "eyeRightReceived", "/gesture/eye/right");
oscP5.plug(this, "eyebrowLeftReceived", "/gesture/eyebrow/left");
oscP5.plug(this, "eyebrowRightReceived", "/gesture/eyebrow/right");
oscP5.plug(this, "jawReceived", "/gesture/jaw");
oscP5.plug(this, "nostrilsReceived", "/gesture/nostrils");
}

void draw() {
background(255,255,255, 10);
stroke(0);

if(found > 0) {
translate(posePosition.x, posePosition.y);
scale(poseScale);
noFill();
ellipse(-20, eyeLeft * -9, 20, 7);
ellipse(20, eyeRight * -9, 20, 7);

fill(255,0,0, 200);
ellipse(0, 20, mouthWidth* 3, mouthHeight * 7);

fill(255);
ellipse(0, 20, mouthWidth* 3, mouthHeight * 3);
noFill();
ellipse(-5, nostrils * -1, 7, 3);
ellipse(5, nostrils * -1, 7, 3);
line(-5, nostrils * -2, -7, eyebrowLeft*-5);
line(5, nostrils * -2, 7, eyebrowLeft*-5);
noStroke();
rectMode(CENTER);
fill(140,100,40);
rect(-20, eyebrowLeft * -5, 25, 5);
rect(20, eyebrowRight * -5, 25, 5);

noStroke();
fill(90,150,200,150);
ellipse(-20, (eyebrowLeft*-5)+10, 30,eyebrowLeft*2);
ellipse(20, (eyebrowRight*-5)+10, 30,eyebrowRight*2);
fill(10,50,100,150);
ellipse(-30, (eyebrowLeft*-5)+10, 30,eyebrowLeft*2);
ellipse(30, (eyebrowRight*-5)+10, 30,eyebrowRight*2);
}
}

// OSC CALLBACK FUNCTIONS

public void found(int i) {
println("found: " + i);
found = i;
}

public void poseScale(float s) {
println("scale: " + s);
poseScale = s;
}

public void posePosition(float x, float y) {
println("pose position\tX: " + x + " Y: " + y );
posePosition.set(x, y, 0);
}

public void poseOrientation(float x, float y, float z) {
println("pose orientation\tX: " + x + " Y: " + y + " Z: " + z);
poseOrientation.set(x, y, z);
}

public void mouthWidthReceived(float w) {
println("mouth Width: " + w);
mouthWidth = w;
}

public void mouthHeightReceived(float h) {
println("mouth height: " + h);
mouthHeight = h;
}

public void eyeLeftReceived(float f) {
println("eye left: " + f);
eyeLeft = f;
}

public void eyeRightReceived(float f) {
println("eye right: " + f);
eyeRight = f;
}

public void eyebrowLeftReceived(float f) {
println("eyebrow left: " + f);
eyebrowLeft = f;
}

public void eyebrowRightReceived(float f) {
println("eyebrow right: " + f);
eyebrowRight = f;
}

public void jawReceived(float f) {
println("jaw: " + f);
jaw = f;
}

public void nostrilsReceived(float f) {
println("nostrils: " + f);
nostrils = f;
}

// all other OSC messages end up here
void oscEvent(OscMessage m) {

/* print the address pattern and the typetag of the received OscMessage */
println("#received an osc message");
println("Complete message: "+m);
println(" addrpattern: "+m.addrPattern());
println(" typetag: "+m.typetag());
println(" arguments: "+m.arguments()[0].toString());

if(m.isPlugged() == false) {
println("UNPLUGGED: " + m);
}
}

Alex Looking Outwards Max

Ambient Synthesis by Amanda Ghassaei “is a sound sculpture that responds to light stimuli to construct a unique, audiovisual interpretation of its environment.”  the light sensor data is interpreted by a MaxMSP application to produce abstract symmetrical patterns and a slowly evolving synthetic drone.  I really enjoy the minimalistic aesthetic of this cube in these landscapes.  I also enjoy the way this knee height square is able to interpret its surroundings and release a noise that engulfs an entire space.  One I’m prove meant that could be made is either a more developed focus on the lighting visuals or not have the lights at all because they appear to distract the viewer because they are not that pretty.  The documentation of this project is very effective because it draws a professional focus on the object as well as its surroundings.

 

“Commissioned by Sonos, Light House is an interactive light and sound responsive installation created by NY’s SoftLab that responds in real-time to Sonos components. The installation is constructed of a grid of 600 florescent light tubes at varying heights and lengths to create an interior volume.” It appears to be used as a interactive light display/set for musical performances.  I enjoy the clean look of the florescent lightbulbs.  However, because the installation exists in a three diminutional plane, some of the lighting patterns  are blocked by the inactive lights in from of them.  Another direction this piece could take is that the artists could think about the bulbs coming up from the floor, out from the walls in all different directions rather than just a cloud from above.

https://www.creativeapplications.net/maxmsp/light-house-light-and-sound-installation-for-sonos-by-softlab/

 

“Object” is my favorite of the three.  This “is an interactive installation that generates an ever changing sound thanks to the dynamic relation established between its different parts.”  Ass the viewer gets closer to the plate, it bends away and makes a louder noise as if it is afraid of the viewer.  It is very interesting because it is a give and take kind of fear.  The piece physically fears the viewer but the sounds and reaction of an inanimate object causes fear in the viewer from getting closer.  The documentation of this piece is kind of poor because it appears to be in a class room setting.

Alex Clock

Hour Glass

For this prompt, I decided to crake an hour glass looking structure that counts the seconds in red, the minutes in yellow and the hours (0-12) in blue. I used smooth progressive rectangle code in the example and multiplied it and flipped it vertically in my version. For some reason I lost the function of the rectangles resting when I transferred it into my hourglass structure. I originally wanted to crate a particle falling hourglass that gads small particles of sand fall through a hole and once the sand reached a certain number would combine and turn into a larger different color sand particle for the minute and then when 60 min sand particles combined, they would turn into a larger particle for the hour.

int prevSec;
int millisRolloverTime;
 
//--------------------------
void setup() {
  size(400,400);
  background(80);
  millisRolloverTime = 0;
}
 
//--------------------------
void draw() {
  strokeWeight(25);
  stroke(100,70,20);
  line(100,50, 300,50);
  line(100,350, 300,350);
  strokeWeight(3);
  line(120,50,120,350);
  line(280,50,280,350);
  int H = hour();
  int M = minute();
  int S = second();
  if (prevSec != S) {
    millisRolloverTime = millis();
  } 
  prevSec = S;
  int mils = millis() - millisRolloverTime

  float secondsWithFraction = S + mils/1000.0; 
  float minuetesWithFraction = M;
  float hourWithFraction = H;
  float rectHeightSec = map(secondsWithFraction,   0,60, 0,277);
  float rectHeightMin = map(minuetesWithFraction, 0,60, 0,277);
  float rectHeightHr = map(hourWithFraction, 0,12, 0,100);
  noStroke(); 
  fill(150,0,0); 
  rect(140,62, 20, rectHeightSec); 
  fill(150,120,0);
  rect(170,62, 35, rectHeightMin);
  fill(0,0,150);
  rect(220,62, 50, rectHeightHr);
}

GDB Looking Outwards – Max/MSP/Jitter

Hindsight is Always 20/20

DuBois_Lincoln_sm

 

Hindsight is always 20/20 is a review of all of the State of the Union Addresses based on frequency of words used in said address each year. As the name suggests, the piece serves as a forced historical re-evaluation of these presidents based, quite literally, on their choice of words.

I have to say that I am personally a fan of DuBois’ work. A few months back, I had the pleasure of walking into his exhibition at the Ringling Museum without prior knowledge of his body of work. Hindsight was being shown there. The piece is initially quite confusing: rows of large cards on the walls, reminiscent of something an optometrist might have hung on the far wall. You probably didn’t read the small plaque explaining what is going on, but these cards seems to be politically oriented. Then everything slides into place once you see the president’s names and dates in the corner.

DuBois has a strong grounding in the conversion of data into conceptually meaningful work, and this shines through in Hindsight. Also, the piece’s inherent dryness coupled with the seeming foolishness of the some of the choice words come together to give it a strange humour.

D.O.R.T.H.E. – Lasse Munk and Søren Andreasen

dorthe_03-640x359

 

D.O.R.T.H.E. is a machine that converts written language into music. It consists of a number of heavily-modified machines, cobbled together into an input device (seen above) , and a number of smaller ‘instruments’ that the main body uses as the output from the writing.

 

This piece fascinates me. The idea of turning words into music in this way is strong itself, but the physical implementation of this idea, from the typewriter as input to some truly strange musical inventions, is what makes this outstanding.

It’s difficult to tell from the video, but it seems that the situation under which D.O.R.T.H.E. will function seem quite constrained. The player always seems to stage the music in terms of small phrases, and the creation of sound does not seem to be real-time, but instead the machine seems to have to wait until the writer is finished.

Z-Machines – Squarepusher & Rhizomatics

Squarepusher-Still-6-copy

Z-Machines is a set of robotic musicians, designed to be able to play conventional instruments in ways that people cannot.

I’ve hear some of Squarepusher’s stuff, and I like both his solo body of work and the end result audio of Z-Machines, but for me this piece is ruined by the robots. The decision to constrain the machines to anthropomorphism makes them visually weaker (by riding straight into the uncanny valley), as well as hamstringing the machine’s potential to be something new. Instead it just emulates what has already come before, and attempts to directly compete with human bands for performance value, which seems like wasted effort to me.

Pedro Reyes work comes to mind of someone who took similar principles in a more interesting direction.

Chaos v.s order

So oddly enough, when I was young i used to have this super vivid dream whenever I had a fever and somehow my Dad gets the same dream. Weird, right? The only way i can describe it is an atmosphere an environment that is super smooth and calm, black and white, and then suddenly chaotic and choppy and scary. I tried to make it here.

 
  void setup() {
    size(600, 200);
  }
  void draw () {
    background(255);
  
    float randomness = map(mouseX, 0, width, 0, 1);
    randomness = constrain(randomness, 0, 1); 
  
    // float rX= random(width/2, width);
    // float rWidth= random(width/2, width);
  
    for (int i=1; 1<30; i++) {
      float rX= (i*20) + randomness * random (-15, 15);
      float rWidth= (i*20) + randomness *random (-15, 15);
  
      //if (mouseX>300) {
      line(rX, 0, rWidth, height);
    }
  }

LOOKING OUTWARDS

max/msp/jitter

Andrew Reed Miller is a professional musician turned instillation and media artist who uses string instruments in his work.

Supercanon uses vid display and audio loops

The computer must sense when he is about to play and when the phrase ends and automatically records the video and audio and plays each one on the tiled screen on a loop.

 

I like this because it’s live and musical. This is the kind of stuff i would do in middle school with my choir music on a friday night, minus the visual aspect. I used to try to loop a tiny ukulele part for a whole song because I wanted to record it and I was really bad at ululele (still am). Im excited to get to play with music.

Wallpaper

Screen Shot 2014-10-03 at 3.18.57 PM

Screen Shot 2014-10-03 at 3.18.53 PM

Screen Shot 2014-10-03 at 3.18.19 PM

  
  size(600, 600); 
  background(255); 
  noStroke();
  
  float Xspacing =random (20, 100);
  
  for (int y=1; y< =500; y=y+100) {
    for (int n=1; n<=10; n=n+1) { 
      float flowerX = n*Xspacing;
  
      float petalSize = random(4, 16); 
      float middleSize= random (4, 10);
  
      float R= (255);
      float G= random (114, 206);
      float B= random (114, 211);
      fill(R, G, B);
      ellipse (flowerX, y, middleSize, middleSize);

        float R2= (156);
      float  G2= random (248, 255);
      float B2= random (156, 255);
      fill (R2, G2, B2);
      ellipse (flowerX, y-17, petalSize, petalSize); //petal 1
      ellipse (flowerX+16, y-5, petalSize, petalSize);//petal 2
      ellipse (flowerX+11, y+12, petalSize, petalSize);//petal 3
      ellipse (flowerX-11, y+12, petalSize, petalSize);//petal 4
      ellipse (flowerX-16, y-5, petalSize, petalSize);//petal 5
    }
  }

SkyLine One Line

A skyline set to Chopin, accompanied with ambient city sounds.
code:

//skyline one line 
//cc Charlotte Stiles 2014

float lineLength = 400.0;
int [] x = new int[400];
float [] y = new float[400];
float lineX,lineY;
float times = .9;
float grab=200;

void setup() {
  background(255);
  size(500, 500);
}
void draw() {
  fill(255,10);
  noStroke();
  rect(0, 0, width, height);

  beginShape();
  for (int i = 50; i < lineLength; i++) {
    
    x[i] = i; //draws line from left to right
    
    lineX= x[i];
    lineY= y[i];
    println(x[i]);

    if (dist (lineX, lineY, mouseX, mouseY) > 400)   {
      lineX = mouseX; //this makes it have more curved lines
      for(float g = 0 ; g < 10; g++){
        if(x[i] <= grab + 200 && x[i] >= grab - 200){ //grab is the center point of the curve
        y [i] = mouseY * (g*.15);//g is made to increase little by little
        lineY = y[i];
       }
      }
    }
    
    strokeWeight(random(.1,1));
    stroke(random(0,100));
    curveVertex(lineX, lineY);//behold, one line
  }
  endShape(); 
}