Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions Runtime/Audio.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

104 changes: 104 additions & 0 deletions Runtime/Audio/AudioSourceDataObserver.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
namespace Zinnia.Audio
{
using UnityEngine;
using UnityEngine.Events;
using System;
using Malimbe.PropertySerializationAttribute;
using Malimbe.XmlDocumentationAttribute;

/// <summary>
/// Observes the <see cref="AudioSource"/> and emits the audio data.
/// </summary>
[RequireComponent(typeof(AudioSource))]
public class AudioSourceDataObserver : MonoBehaviour
{
/// <summary>
/// Holds data about a <see cref="AudioSourceDataObserver"/> event.
/// </summary>
[Serializable]
public class EventData
{
/// <summary>
/// <see cref="AudioSettings.dspTime"/> of the last <see cref="OnAudioFilterRead"/>.
/// </summary>
[Serialized]
[field: DocumentedByXml]
public double DspTime { get; set; }
/// <summary>
/// Audio data array of the last <see cref="OnAudioFilterRead"/>.
/// </summary>
[Serialized]
[field: DocumentedByXml]
public float[] Data { get; set; }
/// <summary>
/// Number of channels of the last <see cref="OnAudioFilterRead"/>.
/// </summary>
[Serialized]
[field: DocumentedByXml]
public int Channels { get; set; }

public EventData Set(EventData source)
{
return Set(source.DspTime, source.Data, source.Channels);
}

public EventData Set(double dspTime, float[] data, int channels)
{
DspTime = dspTime;
Data = data;
Channels = channels;
return this;
}

public void Clear()
{
Set(default, default, default);
}
}

/// <summary>
/// Defines the event with the <see cref="EventData"/>.
/// </summary>
[Serializable]
public class UnityEvent : UnityEvent<EventData> { }

/// <summary>
/// Emitted whenever the audio data is observed.
/// </summary>
[DocumentedByXml]
public UnityEvent DataObserved = new UnityEvent();

/// <summary>
/// The data to emit with an event.
/// </summary>
protected readonly EventData eventData = new EventData();

/// <summary>
/// Returns whether the <see cref="AudioSource"/> is player.
/// </summary>
public virtual bool IsAudioSourcePlaying() => audioSource != null && audioSource.isPlaying;

/// <summary>
/// The <see cref="AudioSource"/> to observe.
/// </summary>
protected AudioSource audioSource;

/// <summary>
/// Caches the <see cref="AudioSource"/>.
/// </summary>
protected virtual void Awake()
{
audioSource = GetComponent<AudioSource>();
}

/// <summary>
/// Emits audio data.
/// </summary>
/// <param name="data">An array of floats comprising the audio data.</param>
/// <param name="channels">An int that stores the number of channels of audio data passed to this delegate.</param>
protected virtual void OnAudioFilterRead(float[] data, int channels)
{
DataObserved?.Invoke(eventData.Set(AudioSettings.dspTime, data, channels));
}
}
}
11 changes: 11 additions & 0 deletions Runtime/Audio/AudioSourceDataObserver.cs.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

94 changes: 71 additions & 23 deletions Runtime/Haptics/AudioSourceHapticPulser.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,38 +2,59 @@
{
using UnityEngine;
using System.Collections;
using Malimbe.MemberChangeMethod;
using Malimbe.MemberClearanceMethod;
using Malimbe.PropertySerializationAttribute;
using Malimbe.XmlDocumentationAttribute;
using Zinnia.Audio;

/// <summary>
/// Creates a haptic pattern based on the waveform of an <see cref="UnityEngine.AudioSource"/> and utilizes a <see cref="Haptics.HapticPulser"/> to create the effect.
/// </summary>
public class AudioSourceHapticPulser : RoutineHapticPulser
{
/// <summary>
/// The waveform to represent the haptic pattern.
/// Observer that provides audio data from a <see cref="AudioSource"/>.
/// </summary>
[Serialized]
[Serialized, Cleared]
[field: DocumentedByXml]
public AudioSource AudioSource { get; set; }
public AudioSourceDataObserver Observer { get; set; }

/// <summary>
/// <see cref="AudioSettings.dspTime"/> of the last <see cref="OnAudioFilterRead"/>.
/// A reused data instance.
/// </summary>
protected double filterReadDspTime;
protected readonly AudioSourceDataObserver.EventData audioData = new AudioSourceDataObserver.EventData();

/// <summary>
/// Audio data array of the last <see cref="OnAudioFilterRead"/>.
/// Subscribes as a listener to the <see cref="Observer"/>.
/// </summary>
protected float[] filterReadData;
protected virtual void SubscribeToObserver()
{
if (Observer == null)
{
return;
}

Observer.DataObserved.AddListener(Receive);
}

/// <summary>
/// Number of channels of the last <see cref="OnAudioFilterRead"/>.
/// Unsubscribes from listening to the <see cref="Observer"/>.
/// </summary>
protected int filterReadChannels;
protected virtual void UnsubscribeFromObserver()
{
if (Observer == null)
{
return;
}

Observer.DataObserved.RemoveListener(Receive);
}

/// <inheritdoc />
public override bool IsActive()
{
return base.IsActive() && AudioSource != null;
return base.IsActive() && Observer != null;
}

/// <summary>
Expand All @@ -42,36 +63,63 @@ public override bool IsActive()
/// <returns>An Enumerator to manage the running of the Coroutine.</returns>
protected override IEnumerator HapticProcessRoutine()
{
SubscribeToObserver();
int outputSampleRate = AudioSettings.outputSampleRate;
while (AudioSource.isPlaying)
while (Observer != null && Observer.IsAudioSourcePlaying())
{
int sampleIndex = (int)((AudioSettings.dspTime - filterReadDspTime) * outputSampleRate);
float currentSample = 0;
if (filterReadData != null && sampleIndex * filterReadChannels < filterReadData.Length)
if (audioData.Data != null)
{
for (int i = 0; i < filterReadChannels; ++i)
int sampleIndex = (int)((AudioSettings.dspTime - audioData.DspTime) * outputSampleRate) * audioData.Channels;
sampleIndex = Mathf.Min(sampleIndex, audioData.Data.Length - audioData.Channels);
for (int i = 0; i < audioData.Channels; ++i)
{
currentSample += filterReadData[sampleIndex + i];
currentSample += Mathf.Abs(audioData.Data[sampleIndex + i]);
}
currentSample /= filterReadChannels;
currentSample /= audioData.Channels;
}
HapticPulser.Intensity = currentSample * IntensityMultiplier;
HapticPulser.Begin();
yield return null;
}
UnsubscribeFromObserver();
ResetIntensity();
}

/// <summary>
/// Store currently playing audio data and additional data.
/// Receive audio data from <see cref="AudioSourceDataObserver"/>.
/// </summary>
protected virtual void Receive(AudioSourceDataObserver.EventData eventData)
{
audioData.Set(eventData);
}

/// <summary>
/// Called before <see cref="Observer"/> has been changed.
/// </summary>
[CalledBeforeChangeOf(nameof(Observer))]
protected virtual void OnBeforeObserverChange()
{
if (hapticRoutine == null)
{
return;
}

UnsubscribeFromObserver();
}

/// <summary>
/// Called after <see cref="Observer"/> has been changed.
/// </summary>
/// <param name="data">An array of floats comprising the audio data.</param>
/// <param name="channels">An int that stores the number of channels of audio data passed to this delegate.</param>
protected virtual void OnAudioFilterRead(float[] data, int channels)
[CalledAfterChangeOf(nameof(Observer))]
protected virtual void OnAfterObserverChange()
{
filterReadDspTime = AudioSettings.dspTime;
filterReadData = data;
filterReadChannels = channels;
if (hapticRoutine == null)
{
return;
}

SubscribeToObserver();
}
}
}