For this project I wrote several js scripts using the THREE.js (it’s amazing, try it!) javascript webgl wrapper. I am primarily a 3d thinker so thats where my mind when when thinking about short gif-able animations. I also had a high-poly 3d model of my head for playing around with.
In the above gifs I applied a randomized parallel graph contraction algorithm to a 3d model of my head. The head I mostly made and the rhino I did not. What I mean by ‘mostly made’ is that I took a scan and added eyes hair and ears. Scans have unworkable topology and are by no means finished models.
I was looking for a way to collapse the model and began by randomly picking edges to collapse. It was clear this was cool; however, the number of frames would have to be linear to the number of vertices and half the time the change would be hidden (on the other side of the model). Then I looked for ways to collapse multiple groups of vertices. It didn’t take long to realize that this is what we just did in 15-210 last semester in order to find Minimum Spanning Trees (mst’s) of graphs. A 3d mesh is nothing more than a graph so this was perfect. After overcoming my shock of finding a real world application for 210 I wrote the script that takes any mesh and graph reduces it. The main challenges were overcoming the fact that THREE.js does not natively support dynamic geometry and me being dumb with js pointers (yes, js has pointers).
My main critic of this (which I could have anticipated) was that the frames jump too much from one to another. Thus not making a smooth gif and failing the main objective. Having started with single edge contraction I was imagining a more slow and gradual transformation. This graph contraction takes on average O(logV) rounds so it is no surprise it goes so fast. As I type this I realize I can vary the probably that each head gets assigned a H or T and easily vary to rate of contractions. That will wait until tomorrow.
In response to the above I made the below gif which is a cos wave that rides around a spherical head. This was achieved by normalizing each vertex every frame. My critic on this would be that it continues to abuse the coolness of normal mapping, also the wave isn’t very interesting and has that awkward area in between the two waves.
(sphere Joel looks super grumpy)
Ok I just made this.
This is the code which uses concurrent randomized star contraction on a THREE.js mesh.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 | var container, scene, camera, renderer, controls, stats; var keyboard = new THREEx.KeyboardState(); var clock = new THREE.Clock(); // custom global variables var mainObj, mainGeo, adjacencyList; init(); animate(); function init() { // SCENE scene = new THREE.Scene(); // CAMERA var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight; var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.1, FAR = 20000; camera = new THREE.PerspectiveCamera( VIEW_ANGLE, ASPECT, NEAR, FAR); scene.add(camera); camera.position.set(250, -20, 442.0931145267771); // camera.position.set(0,150,400); camera.lookAt(scene.position); // RENDERER if ( Detector.webgl ) renderer = new THREE.WebGLRenderer( {antialias:true, preserveDrawingBuffer: true } ); else renderer = new THREE.CanvasRenderer(); renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT); container = document.getElementById( 'ThreeJS' ); container.appendChild( renderer.domElement ); // EVENTS THREEx.WindowResize(renderer, camera); THREEx.FullScreen.bindKey({ charCode : 'm'.charCodeAt(0) }); // CONTROLS controls = new THREE.OrbitControls( camera, renderer.domElement ); // LIGHT var light = new THREE.PointLight(0xffffff); light.position.set(0,250,0); scene.add(light); // scene.add(floor); // SKYBOX/FOG var skyBoxGeometry = new THREE.CubeGeometry( 10000, 10000, 10000 ); var skyBoxMaterial = new THREE.MeshBasicMaterial( { color: 0x9999ff, side: THREE.BackSide } ); var skyBox = new THREE.Mesh( skyBoxGeometry, skyBoxMaterial ); // scene.add(skyBox); scene.fog = new THREE.FogExp2( 0x9999ff, 0.00025 ); //////////// // CUSTOM // //////////// var materialNormal = new THREE.MeshNormalMaterial(); var loader = new THREE.JSONLoader(); loader.load( "./models/elephant.js", function( geo, head_materials ) { mainObj = new THREE.Mesh( // new THREE.CubeGeometry(50, 50, 50, 2, 1, 1), // new THREE.IcosahedronGeometry( 120, 2 ), geo, // new THREE.MeshBasicMaterial( { color: 0x00ee00, wireframe: true, transparent: true } ) materialNormal // new THREE.MeshNormalMaterial({side: THREE.DoubleSide }) // new THREE.MeshFaceMaterial( head_materials ) ); mainObj.scale = new THREE.Vector3(300,300,300 ); mainObj.position.set(-90,-20,0); mainObj.rotation.set(0,4*Math.PI/4,0); // mainObj.rotation.set(0,Math.PI/2,0);//face scene.add( mainObj ); mainGeo = mainObj.geometry; adjacencyList = makeAdjacencyList(mainObj.geometry); mainGeo.vList = {}; for (var i = 0; i < mainGeo.vertices.length; i++) { mainGeo.vList[i] = Math.random() > .5 ? 'T' : 'H'; } var bar = 10; animate(); screenshot(function(){ main(); }); function main () { if (bar > 0) { starContract(); animate(); console.log('Contracted', i); screenshot(function(){ bar--; main(); }); } } }); } var mapping = {}; function starContract() { var neighbours, mappedTo; var c = 0; //For every vertex give it a H or T //Init a mapping from v -> v for (var v in mainGeo.vList) { mainGeo.vList[v] = Math.random() > .5 ? 'T' : 'H'; mapping[v] = v; } //For ever T vertex pick one adjacent H to contract to. for (var v in mainGeo.vList) { if (mainGeo.vList[v] == 'T') { // map to and return a adjacent vertex mappedTo = contractToHead(v); //record where we mapped to. mapping[v] = mappedTo; if (mappedTo != v) delete mainGeo.vList[v]; // console.log(v, mappedTo); c++; } } var oldN; var newN; // Update adjacency list to account for contracted vertices. for (var vert in adjacencyList) { for (var neigh in adjacencyList[vert]) { newN = mapping[neigh]; delete adjacencyList[vert][neigh]; adjacencyList[vert][newN] = true; } } for (var i = 0; i < mainGeo.vertices.length; i++) { mainGeo.vertices[i].copy(mainGeo.vertices[mapping[i]]); }; mainGeo.verticesNeedUpdate = true; return; } function contractToHead(i) { neighbours = adjacencyList[i]; if (Object.keys(neighbours).length == 0) { return console.log('no neighbors.'); } for (var n in neighbours) { // attach to first H. if (mainGeo.vList[n] == 'H') { //contract the T vert to H vert mainGeo.vertices[i] = mainGeo.vertices[n]; adjacencyList[n] = mergeObjs(adjacencyList[i], adjacencyList[n]); delete adjacencyList[n][n]; delete adjacencyList[n][i]; delete adjacencyList[i]; return n; } } return i; } function makeAdjacencyList (geometry) { var m = {}; var f; for (var i = 0; i < geometry.faces.length; i++) { f = geometry.faces[i]; if (!m[f.a]) m[f.a] = {}; if (!m[f.b]) m[f.b] = {}; if (!m[f.c]) m[f.c] = {}; m[f.a][f.b] = true; m[f.a][f.c] = true; m[f.b][f.a] = true; m[f.b][f.c] = true; m[f.c][f.a] = true; m[f.c][f.b] = true; } return m; } var rdy = true; function update() { if (keyboard.pressed("z") && rdy) { starContract(); rdy = false; setTimeout(function() { rdy = true; }, 500); } // controls.update(); } function randomInt(n, i) { i = i || 0; return Math.floor((Math.random()*n)+i); } function randomProperty(obj) { var result; var count = 0; for (var prop in obj) { if (Math.random() < 1/++count) { result = prop; } } return result; } function render() { renderer.render( scene, camera ); } function animate() { requestAnimationFrame( animate ); render(); update(); } function mergeObjs(obj1,obj2) { var obj3 = {}; for (var attrname in obj1) { obj3[attrname] = obj1[attrname]; } for (var attrname in obj2) { obj3[attrname] = obj2[attrname]; } return obj3; } // Array Remove - By John Resig (MIT Licensed) Array.prototype.remove = function(from, to) { var rest = this.slice((to || from) + 1 || this.length); this.length = from < 0 ? this.length + from : from; return this.push.apply(this, rest); } /*** ADDING SCREEN SHOT ABILITY ***/ function screenshot (callback) { img = document.getElementById('embedImage'); var imgNode; //Listen to 'P' key try { img.src = renderer.domElement.toDataURL(); img.download = 'foo.png'; img.onload = function () { var myWindow; myWindow = window.open(img.src.replace('image/png', 'image/octet-stream'), "width=512,height=512"); console.log('saved'); callback(); }; } catch(e) { console.log("Browser does not support taking screenshot of 3d context"); return; } } |
VERY SEGMENT
WOW
Hi Joel,
It’s a visually interesing transformation. I’m concerned that it might not work as well on the lenticular print as it does on the screen. For example, if you read Gifpop’s suggestions, they recommend that stuff move only very small amounts; by contrast, you have very big differences from frame-to-frame. For the print, what if you just rendered your head rotating in place (i.e. shaking your head through 15-30 degrees)? Simple, elegant, fun.
Try it? g
Consider spherical harmonics?
http://icgem.gfz-potsdam.de/ICGEM/potato/gf-tutorial-1-Dateien/image027.jpg