diff --git a/musical-leptos/src/components/dancing_lights.rs b/musical-leptos/src/components/dancing_lights.rs index 7670faa..f63bab4 100644 --- a/musical-leptos/src/components/dancing_lights.rs +++ b/musical-leptos/src/components/dancing_lights.rs @@ -1,3 +1,4 @@ +use js_sys::Float64Array; use leptos::*; use musical_lights_core::{ audio::{ @@ -38,13 +39,38 @@ async fn load_media_stream() -> Result { Ok(stream) } +// #[derive(Copy, Clone, Debug, PartialEq, Eq)] +// struct AudioOutput { +// signal: ReadSignal +// } + +/// TODO: this should be done in the audio worklet, but its easier to put here for now +struct DancingLightsProcessor { + signal: WriteSignal>, +} + +impl DancingLightsProcessor { + fn new(signal: WriteSignal>) -> Self { + Self { signal } + } + + fn process(&self, inputs: Vec) { + // TODO: do some audio processing on the inputs to turn it into 120 outputs + // TODO: instead of hard coding 120, use a generic + + // self.signal(inputs); + info!("inputs: {:?}", inputs); + } +} + /// Prompt the user for their microphone #[component] pub fn DancingLights() -> impl IntoView { // TODO: do this on button click let (listen, set_listen) = create_signal(false); - let (audio, set_audio) = create_signal(0); + // TODO: this needs to be a vec of signals + let (audio, set_audio) = create_signal(vec![]); // TODO: this is wrong. this runs immediatly, not on first click. why? let start_listening = create_resource(listen, move |x| async move { @@ -62,9 +88,17 @@ pub fn DancingLights() -> impl IntoView { let onmessage_callback = Closure::new(move |x: MessageEvent| { // TODO: this seems fragile. how do we be sure of the data type + let data = x.data(); + + let data = Float64Array::new(&data); + + let data = data.to_vec(); + // TODO: actual audio processing // TODO: this will actually be a vec of 120 f32s when we are done - let data = (x.data().as_f64().unwrap().abs() * 10000.0) as u32; + + trace!("data: {:#?}", data); + set_audio(data); }); @@ -106,10 +140,30 @@ pub fn DancingLights() -> impl IntoView { Now listening to {media_stream_id} -
{audio}
+
    + // + //
  1. {data.1}
  2. + //
    + {audio().into_iter().enumerate().map(|(i, x)| audio_list_item(i, x)).collect_view()} +
}.into_view(), Some(Err(err)) => view! {
Error: {err}
}.into_view(), }} } } + +/// TODO: i think this should be a component +pub fn audio_list_item(i: usize, x: f64) -> impl IntoView { + // TODO: pick a color based on the index + + let x = (x * 10000.0) as u64; + + view! { +
  • {x}
  • + } +} diff --git a/musical-leptos/src/my-wasm-processor.js b/musical-leptos/src/my-wasm-processor.js index 241695c..0a43829 100644 --- a/musical-leptos/src/my-wasm-processor.js +++ b/musical-leptos/src/my-wasm-processor.js @@ -1,12 +1,10 @@ -// TODO: should we have a minimal shim here? maybe fetch and instantiate a second wasm app here? - class MyWasmProcessor extends AudioWorkletProcessor { constructor(options) { super(); this.wasmInstance = null; - console.log("options.processorOptions:", options.processorOptions); + // console.log("options.processorOptions:", options.processorOptions); let [module, foobar] = options.processorOptions; @@ -23,10 +21,8 @@ class MyWasmProcessor extends AudioWorkletProcessor { if (this.wasmInstance) { // TODO: Call your WASM functions here to process audio. Then send it over this.port.postMessage() } else { - let sum = inputs[0][0].reduce((acc, val) => acc + val, 0); - // console.log("sum:", sum); - - this.port.postMessage(sum); + // TODO: don't post here. we want to do this in a dedicated wasm instance instead + this.port.postMessage(inputs[0][0]); } // browsers all handle this differently