Skip to end of metadata
Go to start of metadata
You are viewing an old version of this page. View the current version.
Compare with Current
View Page History
« Previous
Version 4
Next »
//Spatial Interaction
//ZHdK, Interaction Design
//iad.zhdk.ch
//Beispiel 01: Volume
import ddf.minim.*;
Minim minim;
AudioInput in;
float amountOfNoise = 0;
void setup()
{
size(512, 200, P3D);
minim = new Minim(this);
//Open the audio input
in = minim.getLineIn();
}
void draw()
{
background(amountOfNoise);
stroke(255);
rect( 0, 0, in.left.level()*width, 100 );
rect( 0, 100, in.right.level()*width, 100 );
if(in.left.level() > 0.2 || in.right.level() > 0.2)
{
amountOfNoise++;
}
else if(amountOfNoise > 0)
{
amountOfNoise--;
}
text(amountOfNoise, 20, 20);
}
//Spatial Interaction
//ZHdK, Interaction Design
//iad.zhdk.ch
//Beispiel 02: Waveform
import ddf.minim.*;
Minim minim;
AudioInput in;
void setup()
{
size(640, 480, P3D);
minim = new Minim(this);
//Open the audio input
in = minim.getLineIn();
}
void draw()
{
background(0);
for(int i = 0; i < in.bufferSize() - 1; i++)
{
stroke(0,0,255);
line( i, 200, i, 200 - in.left.get(i+1)*50 );
stroke(255,0,0);
line( i, 280, i, 280 + in.right.get(i+1)*50 );
}
}
<!DOCTYPE HTML>
<html>
<head>
</head>
<body>
<script type="text/javascript">
// We need to check if the browser supports WebSockets
if ("WebSocket" in window) {
// Before we can connect to the WebSocket, we need to start it in Processing.
// Example using WebSocketP5
// http://github.com/muthesius/WebSocketP5
var ws = new WebSocket("ws://localhost:8080/p5websocket");
} else {
// The browser doesn't support WebSocket
alert("WebSocket NOT supported by your Browser!");
}
// Now we can start the speech recognition
// Supported only in Chrome
// Once started, you need to allow Chrome to use the microphone
var recognition = new webkitSpeechRecognition();
// Be default, Chrome will only return a single result.
// By enabling "continuous", Chrome will keep the microphone active.
recognition.continuous = true;
recognition.onresult = function(event) {
// Get the current result from the results object
var transcript = event.results[event.results.length-1][0].transcript;
// Send the result string via WebSocket to the running Processing Sketch
ws.send(transcript);
}
// Start the recognition
recognition.start();
</script>
</body>
</html>
//Spatial Interaction
//ZHdK, Interaction Design
//iad.zhdk.ch
//Beispiel 04: Speech Recognition
//based on the example from Florian Schulz http://stt.getflourish.com
import muthesius.net.*;
import org.webbitserver.*;
WebSocketP5 socket;
String input = "";
color c = color(0,0,0);
void setup()
{
size(640, 480);
socket = new WebSocketP5(this, 8080);
}
void draw()
{
background(c);
textSize(20);
text(input, 20, 60);
}
void stop()
{
socket.stop();
}
void websocketOnMessage(WebSocketConnection con, String msg)
{
println(msg);
msg = trim(msg);
input = msg;
if (msg.equals("black") || msg.equals("schwarz"))
{
c = color(0,0,0);
}
if(msg.equals("red") || msg.equals("rot"))
{
c = color(255,0,0);
}
}
void websocketOnOpen(WebSocketConnection con)
{
println("A client joined");
}
void websocketOnClosed(WebSocketConnection con)
{
println("A client left");
}