diff --git a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/Source.groovy b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/Source.groovy index f9154064..01d83627 100644 --- a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/Source.groovy +++ b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/Source.groovy @@ -38,6 +38,11 @@ abstract class Source implements AudioResource { */ abstract int buffersProcessed() + /** + * Return whether this source has an attached static buffer. + */ + abstract boolean isBufferAttached() + /** * Rether whether this source is currently paused. * diff --git a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/openal/OpenALSource.groovy b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/openal/OpenALSource.groovy index 9d637e1e..6e2275e0 100644 --- a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/openal/OpenALSource.groovy +++ b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/audio/openal/OpenALSource.groovy @@ -73,6 +73,12 @@ class OpenALSource extends Source { return alGetSourcei(sourceId, AL_SOURCE_STATE) } + @Override + boolean isBufferAttached() { + + return alGetSourcei(sourceId, AL_BUFFER) != 0 + } + @Override boolean isPaused() { diff --git a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Animation.groovy b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Animation.groovy index f78eb259..5100df9b 100644 --- a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Animation.groovy +++ b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Animation.groovy @@ -29,12 +29,19 @@ import nz.net.ultraq.redhorizon.engine.scenegraph.GraphicsElement import nz.net.ultraq.redhorizon.engine.scenegraph.Node import nz.net.ultraq.redhorizon.engine.scenegraph.Playable import nz.net.ultraq.redhorizon.engine.scenegraph.Scene +import nz.net.ultraq.redhorizon.engine.scenegraph.SceneEvents import nz.net.ultraq.redhorizon.engine.scenegraph.Temporal +import nz.net.ultraq.redhorizon.events.Event +import nz.net.ultraq.redhorizon.events.EventTarget import nz.net.ultraq.redhorizon.filetypes.AnimationFile import nz.net.ultraq.redhorizon.filetypes.Streaming import nz.net.ultraq.redhorizon.filetypes.StreamingDecoder import nz.net.ultraq.redhorizon.filetypes.StreamingFrameEvent +import org.joml.Vector2f +import org.joml.primitives.Rectanglef + +import groovy.transform.TupleConstructor import java.util.concurrent.Executors /** @@ -44,46 +51,41 @@ import java.util.concurrent.Executors */ class Animation extends Node implements GraphicsElement, Playable, Temporal { - AnimationFile animationFile - StreamingDecoder streamingDecoder - - // TODO: Maybe remove the need for these copies of animation properties if we - // have the streaming decoder carry them as well, ie: make both file and - // decoder an `AnimationSource` with those properties 🤔 - private final float frameRate - private final int numFrames + private final AnimationSource animationSource - private final List frames = [] private long startTimeMs private int currentFrame = -1 - private int lastFrame = -1 private Mesh mesh private Shader shader private Material material + /** + * Constructor, create an animation using data straight from an animation file. + */ Animation(AnimationFile animationFile) { - this(animationFile.width, animationFile.height, animationFile.forVgaMonitors, animationFile.frameRate, - animationFile.numFrames, animationFile instanceof Streaming ? animationFile.streamingDecoder : null) - - this.animationFile = animationFile + this(new StreamingAnimationSource(((Streaming)animationFile).streamingDecoder, animationFile.frameRate, + animationFile.numFrames, true), + animationFile.width, animationFile.height, animationFile.forVgaMonitors) } - Animation(int width, int height, boolean forVgaMonitors, float frameRate, int numFrames, StreamingDecoder streamingDecoder) { + /** + * Constructor, create an animation using any implementation of the + * {@link AnimationSource} interface. + */ + Animation(AnimationSource animationSource, int width, int height, boolean forVgaMonitors) { + this.animationSource = animationSource + animationSource.relay(Event, this) bounds .set(0, 0, width, forVgaMonitors ? height * 1.2f as float : height) .center() - - this.frameRate = frameRate - this.numFrames = numFrames - this.streamingDecoder = streamingDecoder } @Override void onSceneAdded(Scene scene) { - if (!animationFile && !streamingDecoder) { + if (!animationSource) { throw new IllegalStateException('Cannot add an Animation node to a scene without a streaming or file source') } @@ -95,71 +97,28 @@ class Animation extends Node implements GraphicsElement, Playable, Te .get() material = new Material() - if (streamingDecoder) { - var buffersAdded = 0 - streamingDecoder.on(StreamingFrameEvent) { event -> - frames << scene - .requestCreateOrGet(new TextureRequest(event.width, event.height, event.format, event.frameFlippedVertical)) - .get() - buffersAdded++ - if (buffersAdded == Math.ceil(frameRate) as int) { - trigger(new PlaybackReadyEvent()) - } - } - - // Run ourselves, otherwise expect the source to run this - if (animationFile) { - Executors.newVirtualThreadPerTaskExecutor().execute(streamingDecoder) - } - else { - trigger(new StreamingReadyEvent()) - } - } - else { - Executors.newVirtualThreadPerTaskExecutor().execute { -> - var width = animationFile.width - var height = animationFile.height - var format = animationFile.format - animationFile.frameData.each { frame -> - frames << scene - .requestCreateOrGet(new TextureRequest(width, height, format, frame.flipVertical(width, height, format))) - .get() - } - } - } + animationSource.onSceneAdded(scene) } @Override void onSceneRemoved(Scene scene) { - streamingDecoder?.cancel(true) - scene.requestDelete(mesh, *(frames.findAll { frame -> frame })) + animationSource.onSceneRemoved(scene) + scene.requestDelete(mesh) } @Override void render(GraphicsRenderer renderer) { if (mesh && shader && material && currentFrame != -1) { - - // Draw the current frame if available - var currentFrameTexture = frames[currentFrame] - if (currentFrameTexture) { - material.texture = currentFrameTexture - var globalTransform = getGlobalTransform() - renderer.draw(mesh, globalTransform, shader, material) - } - - // Delete used frames as the animation progresses to free up memory - if (streamingDecoder) { - if (currentFrame > 0) { - for (var i = lastFrame; i < currentFrame; i++) { - renderer.delete(frames[i]) - frames[i] = null - } + def (texture, region) = animationSource.getFrame(renderer, currentFrame) + if (texture) { + material.texture = texture + if (region) { + mesh.updateTextureUvs(region as Vector2f[]) } + renderer.draw(mesh, globalTransform, shader, material) } - - lastFrame = currentFrame } } @@ -173,8 +132,8 @@ class Animation extends Node implements GraphicsElement, Playable, Te startTimeMs = currentTimeMs } - var nextFrame = Math.floor((currentTimeMs - startTimeMs) / 1000 * frameRate) as int - if (nextFrame < numFrames) { + var nextFrame = Math.floor((currentTimeMs - startTimeMs) / 1000 * animationSource.frameRate) as int + if (nextFrame < animationSource.numFrames) { currentFrame = nextFrame } else { @@ -182,4 +141,82 @@ class Animation extends Node implements GraphicsElement, Playable, Te } } } + + /** + * Interface for any source from which frames of animation can be obtained. + */ + static interface AnimationSource extends EventTarget, SceneEvents { + + /** + * Called during {@code render}, return the texture and an optional region + * to be used for rendering the given frame of animation. + */ + Tuple2 getFrame(GraphicsRenderer renderer, int frameNumber) + + float getFrameRate() + + int getNumFrames() + } + + /** + * An animation source using a streaming animation file. + */ + @TupleConstructor(defaults = false) + static class StreamingAnimationSource implements AnimationSource { + + final StreamingDecoder streamingDecoder + final float frameRate + final int numFrames + final boolean autoStream + + private List frames = [] + private int lastFrame = -1 + + @Override + Tuple2 getFrame(GraphicsRenderer renderer, int frameNumber) { + + var currentFrameTexture = frames[frameNumber] + + // Delete used frames as the animation progresses to free up memory + if (frameNumber > 0) { + for (var i = lastFrame; i < frameNumber; i++) { + renderer.delete(frames[i]) + frames[i] = null + } + } + lastFrame = frameNumber + + return new Tuple2<>(currentFrameTexture, null) + } + + @Override + void onSceneAdded(Scene scene) { + + var buffersAdded = 0 + streamingDecoder.on(StreamingFrameEvent) { event -> + frames << scene + .requestCreateOrGet(new TextureRequest(event.width, event.height, event.format, event.frameFlippedVertical)) + .get() + buffersAdded++ + if (buffersAdded == Math.ceil(frameRate) as int) { + trigger(new PlaybackReadyEvent()) + } + } + + // Run ourselves, otherwise expect the owner of this source to run this + if (autoStream) { + Executors.newVirtualThreadPerTaskExecutor().execute(streamingDecoder) + } + else { + trigger(new StreamingReadyEvent()) + } + } + + @Override + void onSceneRemoved(Scene scene) { + + streamingDecoder?.cancel(true) + scene.requestDelete(*(frames.findAll { frame -> frame })) + } + } } diff --git a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/PlaybackReadyEvent.groovy b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/PlaybackReadyEvent.groovy index b733d11d..a9eb42bd 100644 --- a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/PlaybackReadyEvent.groovy +++ b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/PlaybackReadyEvent.groovy @@ -19,8 +19,7 @@ package nz.net.ultraq.redhorizon.engine.scenegraph.nodes import nz.net.ultraq.redhorizon.events.Event /** - * Fired when a streaming source has filled its buffers and is ready for - * playback. + * Fired when a media source is ready for playback. * * @author Emanuel Rabina */ diff --git a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Sound.groovy b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Sound.groovy index 1b08b134..b9605852 100644 --- a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Sound.groovy +++ b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Sound.groovy @@ -25,12 +25,16 @@ import nz.net.ultraq.redhorizon.engine.scenegraph.AudioElement import nz.net.ultraq.redhorizon.engine.scenegraph.Node import nz.net.ultraq.redhorizon.engine.scenegraph.Playable import nz.net.ultraq.redhorizon.engine.scenegraph.Scene +import nz.net.ultraq.redhorizon.engine.scenegraph.SceneEvents import nz.net.ultraq.redhorizon.engine.scenegraph.Temporal +import nz.net.ultraq.redhorizon.events.Event +import nz.net.ultraq.redhorizon.events.EventTarget import nz.net.ultraq.redhorizon.filetypes.SoundFile import nz.net.ultraq.redhorizon.filetypes.Streaming import nz.net.ultraq.redhorizon.filetypes.StreamingDecoder import nz.net.ultraq.redhorizon.filetypes.StreamingSampleEvent +import groovy.transform.TupleConstructor import java.util.concurrent.ArrayBlockingQueue import java.util.concurrent.BlockingQueue import java.util.concurrent.Executors @@ -43,32 +47,35 @@ import java.util.concurrent.LinkedBlockingQueue */ class Sound extends Node implements AudioElement, Playable, Temporal { - SoundFile soundFile - StreamingDecoder streamingDecoder + private final SoundSource soundSource private Source source - private Buffer staticBuffer - private final BlockingQueue streamingBuffers = new ArrayBlockingQueue<>(10) - private final BlockingQueue streamedBuffers = new LinkedBlockingQueue<>() + /** + * Constructor, create a sound using data straight from a file. + */ Sound(SoundFile soundFile) { - this.soundFile = soundFile - if (soundFile.forStreaming) { - assert soundFile instanceof Streaming - streamingDecoder = soundFile.streamingDecoder - } + this(soundFile.forStreaming ? + new StreamingSoundSource(((Streaming)soundFile).streamingDecoder, true) : + new StaticSoundSource(soundFile) + ) } - Sound(StreamingDecoder streamingDecoder) { + /** + * Constructor, create a sound using any implementation of the + * {@link SoundSource} interface. + */ + Sound(SoundSource soundSource) { - this.streamingDecoder = streamingDecoder + this.soundSource = soundSource + soundSource.relay(Event, this) } @Override void onSceneAdded(Scene scene) { - if (!soundFile && !streamingDecoder) { + if (!soundSource) { throw new IllegalStateException('Cannot add a Sound node to a scene without a streaming or file source') } @@ -76,60 +83,21 @@ class Sound extends Node implements AudioElement, Playable, Temporal { .requestCreateOrGet(new SourceRequest()) .get() - if (streamingDecoder) { - var buffersAdded = 0 - streamingDecoder.on(StreamingSampleEvent) { event -> - streamingBuffers << scene - .requestCreateOrGet(new BufferRequest(event.bits, event.channels, event.frequency, event.sample)) - .get() - buffersAdded++ - if (buffersAdded == 10) { - trigger(new PlaybackReadyEvent()) - } - } - - // Run ourselves, otherwise expect the source to run this - if (soundFile) { - Executors.newVirtualThreadPerTaskExecutor().execute(streamingDecoder) - } - else { - trigger(new StreamingReadyEvent()) - } - } - else if (soundFile) { - staticBuffer = scene - .requestCreateOrGet(new BufferRequest(soundFile.bits, soundFile.channels, soundFile.frequency, soundFile.soundData)) - .get() - } + soundSource.onSceneAdded(scene) } @Override void onSceneRemoved(Scene scene) { - streamingDecoder?.cancel(true) + soundSource.onSceneRemoved(scene) scene.requestDelete(source) - if (staticBuffer) { - scene.requestDelete(staticBuffer) - } - if (streamedBuffers) { - scene.requestDelete(*streamedBuffers.drain()) - } } @Override void render(AudioRenderer renderer) { if (source) { - - // Add static or streaming buffers to the source - if (staticBuffer) { - source.attachBuffer(staticBuffer) - } - else if (streamingBuffers) { - var newBuffers = streamingBuffers.drain() - source.queueBuffers(*newBuffers) - streamedBuffers.addAll(newBuffers) - } + soundSource.prepareSource(renderer, source) // Control playback if (playing) { @@ -151,18 +119,6 @@ class Sound extends Node implements AudioElement, Playable, Temporal { source.stop() } } - - // Clean up used buffers for a streaming source - if (streamingDecoder) { - var buffersProcessed = source.buffersProcessed() - if (buffersProcessed) { - var processedBuffers = streamedBuffers.drain(buffersProcessed) - source.unqueueBuffers(*processedBuffers) - processedBuffers.each { processedBuffer -> - renderer.delete(processedBuffer) - } - } - } } } @@ -177,4 +133,110 @@ class Sound extends Node implements AudioElement, Playable, Temporal { } currentTimeMs = updatedTimeMs } + + /** + * Interface for any source from which sound data can be obtained. + */ + static interface SoundSource extends EventTarget, SceneEvents { + + /** + * Called during {@code render}, prepare the sound source for playback. + */ + void prepareSource(AudioRenderer renderer, Source source) + } + + /** + * A sound source using static buffers. + */ + @TupleConstructor(defaults = false) + static class StaticSoundSource implements SoundSource { + + final SoundFile soundFile + + private Buffer staticBuffer + + @Override + void onSceneAdded(Scene scene) { + + staticBuffer = scene + .requestCreateOrGet(new BufferRequest(soundFile.bits, soundFile.channels, soundFile.frequency, soundFile.soundData)) + .get() + + trigger(new PlaybackReadyEvent()) + } + + @Override + void onSceneRemoved(Scene scene) { + + scene.requestDelete(staticBuffer) + } + + @Override + void prepareSource(AudioRenderer renderer, Source source) { + + if (!source.bufferAttached && staticBuffer) { + source.attachBuffer(staticBuffer) + } + } + } + + /** + * A sound source using streaming buffers. + */ + @TupleConstructor(defaults = false) + static class StreamingSoundSource implements SoundSource { + + final StreamingDecoder streamingDecoder + final boolean autoStream + + private final BlockingQueue streamingBuffers = new ArrayBlockingQueue<>(10) + private final BlockingQueue streamedBuffers = new LinkedBlockingQueue<>() + + @Override + void onSceneAdded(Scene scene) { + + var buffersAdded = 0 + streamingDecoder.on(StreamingSampleEvent) { event -> + streamingBuffers << scene + .requestCreateOrGet(new BufferRequest(event.bits, event.channels, event.frequency, event.sample)) + .get() + buffersAdded++ + if (buffersAdded == 10) { + trigger(new PlaybackReadyEvent()) + } + } + + // Run ourselves, otherwise expect the owner of this source to run this + if (autoStream) { + Executors.newVirtualThreadPerTaskExecutor().execute(streamingDecoder) + } + else { + trigger(new StreamingReadyEvent()) + } + } + + @Override + void onSceneRemoved(Scene scene) { + + streamingDecoder?.cancel(true) + scene.requestDelete(*streamedBuffers.drain()) + } + + @Override + void prepareSource(AudioRenderer renderer, Source source) { + + var newBuffers = streamingBuffers.drain() + source.queueBuffers(*newBuffers) + streamedBuffers.addAll(newBuffers) + + var buffersProcessed = source.buffersProcessed() + if (buffersProcessed) { + var processedBuffers = streamedBuffers.drain(buffersProcessed) + source.unqueueBuffers(*processedBuffers) + processedBuffers.each { processedBuffer -> + renderer.delete(processedBuffer) + } + } + } + } } diff --git a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Video.groovy b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Video.groovy index 5dd6068f..4bdab27c 100644 --- a/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Video.groovy +++ b/redhorizon-engine/source/nz/net/ultraq/redhorizon/engine/scenegraph/nodes/Video.groovy @@ -23,6 +23,8 @@ import nz.net.ultraq.redhorizon.engine.scenegraph.GraphicsElement import nz.net.ultraq.redhorizon.engine.scenegraph.Node import nz.net.ultraq.redhorizon.engine.scenegraph.Playable import nz.net.ultraq.redhorizon.engine.scenegraph.Scene +import nz.net.ultraq.redhorizon.engine.scenegraph.nodes.Animation.StreamingAnimationSource +import nz.net.ultraq.redhorizon.engine.scenegraph.nodes.Sound.StreamingSoundSource import nz.net.ultraq.redhorizon.filetypes.StreamingDecoder import nz.net.ultraq.redhorizon.filetypes.VideoFile @@ -48,12 +50,12 @@ class Video extends Node