Processing XY scope from linein
import ddf.minim.*; | |
import ddf.minim.effects.*; | |
// https://processing.org/discourse/beta/num_1200857589.html | |
class playBack implements AudioSignal, AudioListener{ //Just a simple "re-route" audio class. | |
float[] left, right; | |
//Getting. | |
public void samples(float[] arg0) { | |
left = arg0; | |
} | |
public void samples(float[] arg0, float[] arg1) { | |
left = arg0; | |
right = arg1; | |
} | |
//Sending back. | |
public void generate(float[] arg0) { | |
System.arraycopy(left, 0, arg0, 0, arg0.length); | |
} | |
public void generate(float[] arg0, float[] arg1) { | |
System.out.println(arg0[0]); | |
if (left!=null && right!=null) { | |
System.arraycopy(left, 0, arg0, 0, arg0.length); | |
System.arraycopy(right, 0, arg1, 0, arg1.length); | |
} | |
} | |
} | |
Minim minim; | |
AudioPlayer wav; | |
AudioInput in; | |
AudioSource audio = in; | |
AudioOutput output = null; | |
//playBack pb = new playBack(); | |
float size = 0; | |
boolean swap = true; | |
void setup() | |
{ | |
size(512, 512); | |
size = (width/2.0) * 0.9; | |
minim = new Minim(this); | |
in = minim.getLineIn(); | |
output = minim.getLineOut(); | |
//in.addListener(pb); | |
//output.addSignal(pb); | |
wav = minim.loadFile("youscope_nointro.wav", 1024); | |
//wav.cue(30 * 100000); | |
//wav.loop(); | |
audio = in; | |
} | |
void draw() | |
{ | |
//background(0); | |
fill(0,0,0, 96); | |
rect(0, 0, width, height); | |
fill(0,0,0, 255); | |
rect(0, 0, 100, 50); | |
fill(255,255,255, 255); | |
stroke(64,255,64); | |
text(audio.left.get(0), 50, 10); | |
text("fps: " + frameRate, 15, 25); | |
text("pos: " + wav.position()/1000.0 + " s", 15, 35); | |
text("bufsize: " + audio.bufferSize(), 15, 45); | |
text("swap: " + swap, 15, 15); | |
float left1 = 0; | |
float right1 = 0; | |
float left2 = 0; | |
float right2 = 0; | |
//output.addSignal(audio.left); | |
float waveform_w = 400; | |
float waveform_h = 100; | |
float waveform_x = 100; | |
float waveform_y = 60; | |
float waveform_skale = waveform_h/2; | |
float add = waveform_w / audio.bufferSize(); | |
strokeWeight(1); | |
fill(0,0,32, 255); | |
rect(waveform_x, waveform_y-waveform_h/2, waveform_w, waveform_h); | |
fill(255,255,255, 255); | |
for(int i = 1; i < audio.bufferSize() - 1; i++) | |
{ | |
right1 = audio.left.get(i -1); | |
left1 = audio.right.get(i -1); | |
right2 = audio.left.get(i); | |
left2 = audio.right.get(i); | |
float x1 = waveform_x + i * add - add; | |
float x2 = waveform_x + i * add; | |
stroke(255, 0, 0); | |
line(x1, left1 * waveform_skale + waveform_y, x2, left2 * waveform_skale + waveform_y); | |
stroke(0, 255, 0); | |
line(x1, right1 * waveform_skale + waveform_y, x2, right2 * waveform_skale + waveform_y); | |
} | |
strokeWeight(3); | |
for(int i = 1; i < audio.bufferSize() - 1; i++) | |
{ | |
if (swap) { | |
right1 = audio.left.get(i -1); | |
left1 = audio.right.get(i -1); | |
right2 = audio.left.get(i); | |
left2 = audio.right.get(i); | |
} else { | |
left1 = audio.left.get(i -1); | |
right1 = audio.right.get(i -1); | |
left2 = audio.left.get(i); | |
right2 = audio.right.get(i); | |
} | |
float px = width/2.0 + left1 * size; | |
float py = height/2.0 - right1 * size; | |
float px2 = width/2.0 + left2 * size; | |
float py2 = height/2.0 - right2 * size; | |
float len = sqrt((px-px2)*(px-px2) + (py-py2)*(py-py2)); | |
stroke(64,255,64, (1.0/((len/size)*200.0)) *64); | |
//stroke(64,255,64, (max(0.0, 0.15 - len/size))*128); | |
//stroke(64,255,64, (exp(-(len/size)-1.0))*32); | |
//point(width/2.0 + left * size, height/2.0 + right * size); | |
line(px, py, px2, py2); | |
//line(width/2.0 + left * size, height/2.0 + right * size, width, height); | |
//line(i, 50 + audio.left.get(i)*50, i+1, 50 + audio.left.get(i+1)*50); | |
//line(i, 150 + audio.right.get(i)*50, i+1, 150 + audio.right.get(i+1)*50); | |
} | |
} | |
void keyPressed() | |
{ | |
if (key == 's') swap = !swap; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment