Update to v099r13 release
This commit is contained in:
parent
08337e38f5
commit
0dd66ec634
16 changed files with 350 additions and 407 deletions
|
@ -19,7 +19,7 @@ struct AudioALSA : Audio {
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
bool synchronize = false;
|
bool synchronize = false;
|
||||||
unsigned frequency = 22050;
|
unsigned frequency = 48000;
|
||||||
unsigned latency = 60;
|
unsigned latency = 60;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
|
@ -65,10 +65,10 @@ struct AudioALSA : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
if(!device.handle) return;
|
if(!device.handle) return;
|
||||||
|
|
||||||
buffer.data[buffer.length++] = left + (right << 16);
|
buffer.data[buffer.length++] = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
if(buffer.length < device.period_size) return;
|
if(buffer.length < device.period_size) return;
|
||||||
|
|
||||||
snd_pcm_sframes_t avail;
|
snd_pcm_sframes_t avail;
|
||||||
|
|
|
@ -8,7 +8,7 @@ struct AudioAO : Audio {
|
||||||
ao_device* audio_device = nullptr;
|
ao_device* audio_device = nullptr;
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
unsigned frequency = 22050;
|
unsigned frequency = 48000;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
auto cap(const string& name) -> bool {
|
auto cap(const string& name) -> bool {
|
||||||
|
@ -31,8 +31,8 @@ struct AudioAO : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t l_sample, uint16_t r_sample) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
uint32_t samp = (l_sample << 0) + (r_sample << 16);
|
uint32_t samp = (uint16_t)left << 0 | (uint16_t)right << 0;
|
||||||
ao_play(audio_device, (char*)&samp, 4); //This may need to be byte swapped for Big Endian
|
ao_play(audio_device, (char*)&samp, 4); //This may need to be byte swapped for Big Endian
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ struct AudioDS : Audio {
|
||||||
struct {
|
struct {
|
||||||
HWND handle = nullptr;
|
HWND handle = nullptr;
|
||||||
bool synchronize = false;
|
bool synchronize = false;
|
||||||
uint frequency = 22050;
|
uint frequency = 48000;
|
||||||
uint latency = 120;
|
uint latency = 120;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
|
@ -72,8 +72,8 @@ struct AudioDS : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
device.buffer[device.bufferoffset++] = left + (right << 16);
|
device.buffer[device.bufferoffset++] = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
if(device.bufferoffset < device.latency) return;
|
if(device.bufferoffset < device.latency) return;
|
||||||
device.bufferoffset = 0;
|
device.bufferoffset = 0;
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ struct AudioOpenAL : Audio {
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
bool synchronize = true;
|
bool synchronize = true;
|
||||||
unsigned frequency = 22050;
|
unsigned frequency = 48000;
|
||||||
unsigned latency = 40;
|
unsigned latency = 40;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
|
@ -66,8 +66,8 @@ struct AudioOpenAL : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
buffer.data[buffer.length++] = left << 0 | right << 16;
|
buffer.data[buffer.length++] = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
if(buffer.length < buffer.size) return;
|
if(buffer.length < buffer.size) return;
|
||||||
|
|
||||||
ALuint albuffer = 0;
|
ALuint albuffer = 0;
|
||||||
|
|
|
@ -11,26 +11,26 @@
|
||||||
//Failing that, one can disable OSS4 ioctl calls inside init() and remove the below defines
|
//Failing that, one can disable OSS4 ioctl calls inside init() and remove the below defines
|
||||||
|
|
||||||
#ifndef SNDCTL_DSP_COOKEDMODE
|
#ifndef SNDCTL_DSP_COOKEDMODE
|
||||||
#define SNDCTL_DSP_COOKEDMODE _IOW('P', 30, signed)
|
#define SNDCTL_DSP_COOKEDMODE _IOW('P', 30, int)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef SNDCTL_DSP_POLICY
|
#ifndef SNDCTL_DSP_POLICY
|
||||||
#define SNDCTL_DSP_POLICY _IOW('P', 45, signed)
|
#define SNDCTL_DSP_POLICY _IOW('P', 45, int)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
struct AudioOSS : Audio {
|
struct AudioOSS : Audio {
|
||||||
~AudioOSS() { term(); }
|
~AudioOSS() { term(); }
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
signed fd = -1;
|
int fd = -1;
|
||||||
signed format = AFMT_S16_LE;
|
int format = AFMT_S16_LE;
|
||||||
signed channels = 2;
|
int channels = 2;
|
||||||
} device;
|
} device;
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
string device = "/dev/dsp";
|
string device = "/dev/dsp";
|
||||||
bool synchronize = true;
|
bool synchronize = true;
|
||||||
unsigned frequency = 22050;
|
uint frequency = 48000;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
auto cap(const string& name) -> bool {
|
auto cap(const string& name) -> bool {
|
||||||
|
@ -60,8 +60,8 @@ struct AudioOSS : Audio {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(name == Audio::Frequency && value.is<unsigned>()) {
|
if(name == Audio::Frequency && value.is<uint>()) {
|
||||||
settings.frequency = value.get<unsigned>();
|
settings.frequency = value.get<uint>();
|
||||||
if(device.fd >= 0) init();
|
if(device.fd >= 0) init();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -69,8 +69,8 @@ struct AudioOSS : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
uint32_t sample = left << 0 | right << 16;
|
uint32_t sample = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
auto unused = write(device.fd, &sample, 4);
|
auto unused = write(device.fd, &sample, 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,14 +84,14 @@ struct AudioOSS : Audio {
|
||||||
#if 1 //SOUND_VERSION >= 0x040000
|
#if 1 //SOUND_VERSION >= 0x040000
|
||||||
//attempt to enable OSS4-specific features regardless of version
|
//attempt to enable OSS4-specific features regardless of version
|
||||||
//OSS3 ioctl calls will silently fail, but sound will still work
|
//OSS3 ioctl calls will silently fail, but sound will still work
|
||||||
signed cooked = 1, policy = 4; //policy should be 0 - 10, lower = less latency, more CPU usage
|
int cooked = 1, policy = 4; //policy should be 0 - 10, lower = less latency, more CPU usage
|
||||||
ioctl(device.fd, SNDCTL_DSP_COOKEDMODE, &cooked);
|
ioctl(device.fd, SNDCTL_DSP_COOKEDMODE, &cooked);
|
||||||
ioctl(device.fd, SNDCTL_DSP_POLICY, &policy);
|
ioctl(device.fd, SNDCTL_DSP_POLICY, &policy);
|
||||||
#endif
|
#endif
|
||||||
signed freq = settings.frequency;
|
int frequency = settings.frequency;
|
||||||
ioctl(device.fd, SNDCTL_DSP_CHANNELS, &device.channels);
|
ioctl(device.fd, SNDCTL_DSP_CHANNELS, &device.channels);
|
||||||
ioctl(device.fd, SNDCTL_DSP_SETFMT, &device.format);
|
ioctl(device.fd, SNDCTL_DSP_SETFMT, &device.format);
|
||||||
ioctl(device.fd, SNDCTL_DSP_SPEED, &freq);
|
ioctl(device.fd, SNDCTL_DSP_SPEED, &frequency);
|
||||||
|
|
||||||
updateSynchronization();
|
updateSynchronization();
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -20,7 +20,7 @@ struct AudioPulseAudio : Audio {
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
bool synchronize = false;
|
bool synchronize = false;
|
||||||
unsigned frequency = 22050;
|
unsigned frequency = 48000;
|
||||||
unsigned latency = 60;
|
unsigned latency = 60;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
|
@ -64,9 +64,9 @@ struct AudioPulseAudio : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
pa_stream_begin_write(device.stream, (void**)&buffer.data, &buffer.size);
|
pa_stream_begin_write(device.stream, (void**)&buffer.data, &buffer.size);
|
||||||
buffer.data[buffer.offset++] = left + (right << 16);
|
buffer.data[buffer.offset++] = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
if((buffer.offset + 1) * pa_frame_size(&device.spec) <= buffer.size) return;
|
if((buffer.offset + 1) * pa_frame_size(&device.spec) <= buffer.size) return;
|
||||||
|
|
||||||
while(true) {
|
while(true) {
|
||||||
|
|
|
@ -15,7 +15,7 @@ struct AudioPulseAudioSimple : Audio {
|
||||||
} buffer;
|
} buffer;
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
unsigned frequency = 22050;
|
unsigned frequency = 48000;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
auto cap(const string& name) -> bool {
|
auto cap(const string& name) -> bool {
|
||||||
|
@ -38,10 +38,10 @@ struct AudioPulseAudioSimple : Audio {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
if(!device.handle) return;
|
if(!device.handle) return;
|
||||||
|
|
||||||
buffer.data[buffer.offset++] = left + (right << 16);
|
buffer.data[buffer.offset++] = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
if(buffer.offset >= 64) {
|
if(buffer.offset >= 64) {
|
||||||
int error;
|
int error;
|
||||||
pa_simple_write(device.handle, (const void*)buffer.data, buffer.offset * sizeof(uint32_t), &error);
|
pa_simple_write(device.handle, (const void*)buffer.data, buffer.offset * sizeof(uint32_t), &error);
|
||||||
|
|
151
audio/wasapi.cpp
151
audio/wasapi.cpp
|
@ -5,19 +5,25 @@
|
||||||
#include <devicetopology.h>
|
#include <devicetopology.h>
|
||||||
#include <endpointvolume.h>
|
#include <endpointvolume.h>
|
||||||
|
|
||||||
#include <nall/dsp.hpp>
|
|
||||||
|
|
||||||
struct AudioWASAPI : Audio {
|
struct AudioWASAPI : Audio {
|
||||||
~AudioWASAPI() { term(); }
|
~AudioWASAPI() { term(); }
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
bool exclusive = false;
|
bool exclusive = false;
|
||||||
bool synchronize = false;
|
uint latency = 80;
|
||||||
uint frequency = 44100;
|
bool synchronize = true;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
|
struct {
|
||||||
|
uint channels = 0;
|
||||||
|
uint frequency = 0;
|
||||||
|
uint mode = 0;
|
||||||
|
uint precision = 0;
|
||||||
|
} device;
|
||||||
|
|
||||||
auto cap(const string& name) -> bool {
|
auto cap(const string& name) -> bool {
|
||||||
if(name == Audio::Exclusive) return true;
|
if(name == Audio::Exclusive) return true;
|
||||||
|
if(name == Audio::Latency) return true;
|
||||||
if(name == Audio::Synchronize) return true;
|
if(name == Audio::Synchronize) return true;
|
||||||
if(name == Audio::Frequency) return true;
|
if(name == Audio::Frequency) return true;
|
||||||
return false;
|
return false;
|
||||||
|
@ -25,145 +31,132 @@ struct AudioWASAPI : Audio {
|
||||||
|
|
||||||
auto get(const string& name) -> any {
|
auto get(const string& name) -> any {
|
||||||
if(name == Audio::Exclusive) return settings.exclusive;
|
if(name == Audio::Exclusive) return settings.exclusive;
|
||||||
|
if(name == Audio::Latency) return settings.latency;
|
||||||
if(name == Audio::Synchronize) return settings.synchronize;
|
if(name == Audio::Synchronize) return settings.synchronize;
|
||||||
if(name == Audio::Frequency) return settings.frequency;
|
if(name == Audio::Frequency) return device.frequency;
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto set(const string& name, const any& value) -> bool {
|
auto set(const string& name, const any& value) -> bool {
|
||||||
if(name == Audio::Exclusive && value.get<bool>()) {
|
if(name == Audio::Exclusive && value.get<bool>()) {
|
||||||
|
if(audioDevice) term(), init();
|
||||||
settings.exclusive = value.get<bool>();
|
settings.exclusive = value.get<bool>();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(name == Audio::Latency && value.get<uint>()) {
|
||||||
|
if(audioDevice) term(), init();
|
||||||
|
settings.latency = value.get<uint>();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
if(name == Audio::Synchronize && value.is<bool>()) {
|
if(name == Audio::Synchronize && value.is<bool>()) {
|
||||||
settings.synchronize = value.get<bool>();
|
settings.synchronize = value.get<bool>();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(name == Audio::Frequency && value.is<uint>()) {
|
|
||||||
settings.frequency = value.get<uint>();
|
|
||||||
dsp.setFrequency(settings.frequency);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16 left, uint16 right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
int samples[] = {(int16)left, (int16)right};
|
queuedFrames.append((uint16_t)left << 0 | (uint16_t)right << 16);
|
||||||
dsp.sample(samples);
|
|
||||||
while(dsp.pending()) {
|
if(!available() && queuedFrames.size() >= bufferSize) {
|
||||||
dsp.read(samples);
|
if(settings.synchronize) while(!available()); //wait for free sample slot
|
||||||
write(samples);
|
else queuedFrames.takeLeft(); //drop sample (run ahead)
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t cachedFrame = 0;
|
||||||
|
for(auto n : range(available())) {
|
||||||
|
if(queuedFrames) cachedFrame = queuedFrames.takeLeft();
|
||||||
|
write(cachedFrame >> 0, cachedFrame >> 16);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto clear() -> void {
|
auto clear() -> void {
|
||||||
audioClient->Stop();
|
audioClient->Stop();
|
||||||
renderClient->GetBuffer(bufferFrameCount, &bufferData);
|
audioClient->Reset();
|
||||||
|
for(auto n : range(available())) write(0, 0);
|
||||||
renderClient->ReleaseBuffer(bufferFrameCount, 0);
|
|
||||||
audioClient->Start();
|
audioClient->Start();
|
||||||
}
|
}
|
||||||
|
|
||||||
auto init() -> bool {
|
auto init() -> bool {
|
||||||
if(CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_ALL, IID_IMMDeviceEnumerator, (void**)&enumerator) != S_OK) return false;
|
if(CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_ALL, IID_IMMDeviceEnumerator, (void**)&enumerator) != S_OK) return false;
|
||||||
if(enumerator->GetDefaultAudioEndpoint(eRender, eConsole, &device) != S_OK) return false;
|
if(enumerator->GetDefaultAudioEndpoint(eRender, eConsole, &audioDevice) != S_OK) return false;
|
||||||
if(device->Activate(IID_IAudioClient, CLSCTX_ALL, nullptr, (void**)&audioClient) != S_OK) return false;
|
if(audioDevice->Activate(IID_IAudioClient, CLSCTX_ALL, nullptr, (void**)&audioClient) != S_OK) return false;
|
||||||
|
|
||||||
if(settings.exclusive) {
|
if(settings.exclusive) {
|
||||||
if(device->OpenPropertyStore(STGM_READ, &propertyStore) != S_OK) return false;
|
if(audioDevice->OpenPropertyStore(STGM_READ, &propertyStore) != S_OK) return false;
|
||||||
if(propertyStore->GetValue(PKEY_AudioEngine_DeviceFormat, &propVariant) != S_OK) return false;
|
if(propertyStore->GetValue(PKEY_AudioEngine_DeviceFormat, &propVariant) != S_OK) return false;
|
||||||
waveFormat = (WAVEFORMATEX*)propVariant.blob.pBlobData;
|
waveFormat = (WAVEFORMATEX*)propVariant.blob.pBlobData;
|
||||||
if(audioClient->GetDevicePeriod(nullptr, &devicePeriod) != S_OK) return false;
|
if(audioClient->GetDevicePeriod(nullptr, &devicePeriod) != S_OK) return false;
|
||||||
if(audioClient->Initialize(AUDCLNT_SHAREMODE_EXCLUSIVE, 0, devicePeriod, devicePeriod, waveFormat, nullptr) != S_OK) return false;
|
auto latency = max(devicePeriod, (REFERENCE_TIME)settings.latency * 10'000); //1ms to 100ns units
|
||||||
|
if(audioClient->Initialize(AUDCLNT_SHAREMODE_EXCLUSIVE, 0, latency, latency, waveFormat, nullptr) != S_OK) return false;
|
||||||
|
DWORD taskIndex = 0;
|
||||||
taskHandle = AvSetMmThreadCharacteristics(L"Pro Audio", &taskIndex);
|
taskHandle = AvSetMmThreadCharacteristics(L"Pro Audio", &taskIndex);
|
||||||
} else {
|
} else {
|
||||||
if(audioClient->GetMixFormat(&waveFormat) != S_OK) return false;
|
if(audioClient->GetMixFormat(&waveFormat) != S_OK) return false;
|
||||||
if(audioClient->GetDevicePeriod(&devicePeriod, nullptr)) return false;
|
if(audioClient->GetDevicePeriod(&devicePeriod, nullptr)) return false;
|
||||||
if(audioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, 0, devicePeriod, 0, waveFormat, nullptr) != S_OK) return false;
|
auto latency = max(devicePeriod, (REFERENCE_TIME)settings.latency * 10'000); //1ms to 100ns units
|
||||||
|
if(audioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, 0, latency, 0, waveFormat, nullptr) != S_OK) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(audioClient->GetService(IID_IAudioRenderClient, (void**)&renderClient) != S_OK) return false;
|
if(audioClient->GetService(IID_IAudioRenderClient, (void**)&renderClient) != S_OK) return false;
|
||||||
if(audioClient->GetBufferSize(&bufferFrameCount) != S_OK) return false;
|
if(audioClient->GetBufferSize(&bufferSize) != S_OK) return false;
|
||||||
|
|
||||||
switch(((WAVEFORMATEXTENSIBLE*)waveFormat)->SubFormat.Data1) {
|
device.channels = waveFormat->nChannels;
|
||||||
case 1: ieee = false; break; //fixed point
|
device.frequency = waveFormat->nSamplesPerSec;
|
||||||
case 3: ieee = true; break; //floating point
|
device.mode = ((WAVEFORMATEXTENSIBLE*)waveFormat)->SubFormat.Data1;
|
||||||
default: return false; //unknown format; abort
|
device.precision = waveFormat->wBitsPerSample;
|
||||||
}
|
|
||||||
|
|
||||||
dsp.setChannels(2);
|
|
||||||
dsp.setPrecision(16);
|
|
||||||
dsp.setFrequency(settings.frequency);
|
|
||||||
|
|
||||||
dsp.setResampler(DSP::ResampleEngine::Linear);
|
|
||||||
dsp.setResamplerFrequency(waveFormat->nSamplesPerSec);
|
|
||||||
dsp.setChannels(waveFormat->nChannels);
|
|
||||||
dsp.setPrecision(waveFormat->wBitsPerSample);
|
|
||||||
|
|
||||||
print("[WASAPI]\n");
|
|
||||||
print("Channels: ", waveFormat->nChannels, "\n");
|
|
||||||
print("Precision: ", waveFormat->wBitsPerSample, "\n");
|
|
||||||
print("Frequency: ", waveFormat->nSamplesPerSec, "\n");
|
|
||||||
print("IEEE-754: ", ieee, "\n");
|
|
||||||
print("Exclusive: ", settings.exclusive, "\n\n");
|
|
||||||
|
|
||||||
audioClient->Start();
|
audioClient->Start();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto term() -> void {
|
auto term() -> void {
|
||||||
if(audioClient) {
|
if(audioClient) audioClient->Stop();
|
||||||
audioClient->Stop();
|
if(renderClient) renderClient->Release(), renderClient = nullptr;
|
||||||
}
|
if(waveFormat) CoTaskMemFree(waveFormat), waveFormat = nullptr;
|
||||||
|
if(audioClient) audioClient->Release(), audioClient = nullptr;
|
||||||
if(taskHandle) {
|
if(audioDevice) audioDevice->Release(), audioDevice = nullptr;
|
||||||
AvRevertMmThreadCharacteristics(taskHandle);
|
if(taskHandle) AvRevertMmThreadCharacteristics(taskHandle), taskHandle = nullptr;
|
||||||
taskHandle = nullptr;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
auto write(int samples[]) -> void {
|
auto available() -> uint {
|
||||||
while(true) {
|
uint32_t padding = 0;
|
||||||
uint32 padding = 0;
|
audioClient->GetCurrentPadding(&padding);
|
||||||
audioClient->GetCurrentPadding(&padding);
|
return bufferSize - padding;
|
||||||
if(bufferFrameCount - padding < 1) {
|
}
|
||||||
if(!settings.synchronize) return;
|
|
||||||
continue;
|
auto write(int16_t left, int16_t right) -> void {
|
||||||
}
|
if(renderClient->GetBuffer(1, &bufferData) != S_OK) return;
|
||||||
break;
|
|
||||||
|
if(device.channels >= 2 && device.mode == 1 && device.precision == 16) {
|
||||||
|
auto buffer = (int16_t*)bufferData;
|
||||||
|
buffer[0] = left;
|
||||||
|
buffer[1] = right;
|
||||||
}
|
}
|
||||||
|
|
||||||
renderClient->GetBuffer(1, &bufferData);
|
if(device.channels >= 2 && device.mode == 3 && device.precision == 32) {
|
||||||
|
|
||||||
if(ieee) {
|
|
||||||
auto buffer = (float*)bufferData;
|
auto buffer = (float*)bufferData;
|
||||||
buffer[0] = (int16)samples[0] / 32768.0;
|
buffer[0] = left / 32768.0;
|
||||||
buffer[1] = (int16)samples[1] / 32768.0;
|
buffer[1] = right / 32768.0;
|
||||||
} else {
|
|
||||||
auto buffer = (int16*)bufferData;
|
|
||||||
buffer[0] = (int16)samples[0];
|
|
||||||
buffer[1] = (int16)samples[1];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
renderClient->ReleaseBuffer(1, 0);
|
renderClient->ReleaseBuffer(1, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
DSP dsp;
|
|
||||||
IMMDeviceEnumerator* enumerator = nullptr;
|
IMMDeviceEnumerator* enumerator = nullptr;
|
||||||
IMMDevice* device = nullptr;
|
IMMDevice* audioDevice = nullptr;
|
||||||
IPropertyStore* propertyStore = nullptr;
|
IPropertyStore* propertyStore = nullptr;
|
||||||
IAudioClient* audioClient = nullptr;
|
IAudioClient* audioClient = nullptr;
|
||||||
IAudioRenderClient* renderClient = nullptr;
|
IAudioRenderClient* renderClient = nullptr;
|
||||||
WAVEFORMATEX* waveFormat = nullptr;
|
WAVEFORMATEX* waveFormat = nullptr;
|
||||||
PROPVARIANT propVariant;
|
PROPVARIANT propVariant;
|
||||||
HANDLE taskHandle = nullptr;
|
HANDLE taskHandle = nullptr;
|
||||||
DWORD taskIndex = 0;
|
|
||||||
REFERENCE_TIME devicePeriod = 0;
|
REFERENCE_TIME devicePeriod = 0;
|
||||||
uint32 bufferFrameCount = 0;
|
uint32_t bufferSize = 0; //in frames
|
||||||
uint8* bufferData = nullptr;
|
uint8_t* bufferData = nullptr;
|
||||||
bool ieee = false;
|
vector<uint32_t> queuedFrames;
|
||||||
};
|
};
|
||||||
|
|
|
@ -29,7 +29,7 @@ struct AudioXAudio2 : Audio, public IXAudio2VoiceCallback {
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
bool synchronize = false;
|
bool synchronize = false;
|
||||||
unsigned frequency = 22050;
|
unsigned frequency = 48000;
|
||||||
unsigned latency = 120;
|
unsigned latency = 120;
|
||||||
} settings;
|
} settings;
|
||||||
|
|
||||||
|
@ -78,8 +78,8 @@ struct AudioXAudio2 : Audio, public IXAudio2VoiceCallback {
|
||||||
pSourceVoice->SubmitSourceBuffer(&xa2buffer);
|
pSourceVoice->SubmitSourceBuffer(&xa2buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sample(uint16_t left, uint16_t right) -> void {
|
auto sample(int16_t left, int16_t right) -> void {
|
||||||
device.buffer[device.writebuffer * device.latency + device.bufferoffset++] = left + (right << 16);
|
device.buffer[device.writebuffer * device.latency + device.bufferoffset++] = (uint16_t)left << 0 | (uint16_t)right << 16;
|
||||||
if(device.bufferoffset < device.latency) return;
|
if(device.bufferoffset < device.latency) return;
|
||||||
device.bufferoffset = 0;
|
device.bufferoffset = 0;
|
||||||
|
|
||||||
|
|
|
@ -1,25 +1,20 @@
|
||||||
/*
|
#pragma once
|
||||||
xaudio2.hpp (2010-08-14)
|
|
||||||
author: OV2
|
|
||||||
|
|
||||||
ruby-specific header to provide mingw-friendly xaudio2 interfaces
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef XAUDIO2_RUBY_H
|
//ruby-specific header to provide mingw-friendly xaudio2 interfaces
|
||||||
#define XAUDIO2_RUBY_H
|
|
||||||
|
|
||||||
//64-bit GCC fix
|
//64-bit GCC fix
|
||||||
#define GUID_EXT EXTERN_C
|
#define GUID_EXT EXTERN_C
|
||||||
#define GUID_SECT
|
#define GUID_SECT
|
||||||
|
|
||||||
#include <BaseTyps.h>
|
#include <audioclient.h>
|
||||||
|
#include <basetyps.h>
|
||||||
|
|
||||||
#define DEFINE_GUID_X(n,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) GUID_EXT const GUID n GUID_SECT = {l,w1,w2,{b1,b2,b3,b4,b5,b6,b7,b8}}
|
#define DEFINE_GUID_X(n,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) GUID_EXT const GUID n GUID_SECT = {l, w1, w2, {b1, b2, b3, b4, b5, b6, b7, b8}}
|
||||||
#define DEFINE_CLSID_X(className, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
|
#define DEFINE_CLSID_X(className, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
|
||||||
DEFINE_GUID_X(CLSID_##className, 0x##l, 0x##w1, 0x##w2, 0x##b1, 0x##b2, 0x##b3, 0x##b4, 0x##b5, 0x##b6, 0x##b7, 0x##b8)
|
DEFINE_GUID_X(CLSID_##className, 0x##l, 0x##w1, 0x##w2, 0x##b1, 0x##b2, 0x##b3, 0x##b4, 0x##b5, 0x##b6, 0x##b7, 0x##b8)
|
||||||
#define DEFINE_IID_X(interfaceName, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
|
#define DEFINE_IID_X(interfaceName, l, w1, w2, b1, b2, b3, b4, b5, b6, b7, b8) \
|
||||||
DEFINE_GUID_X(IID_##interfaceName, 0x##l, 0x##w1, 0x##w2, 0x##b1, 0x##b2, 0x##b3, 0x##b4, 0x##b5, 0x##b6, 0x##b7, 0x##b8)
|
DEFINE_GUID_X(IID_##interfaceName, 0x##l, 0x##w1, 0x##w2, 0x##b1, 0x##b2, 0x##b3, 0x##b4, 0x##b5, 0x##b6, 0x##b7, 0x##b8)
|
||||||
#define X2DEFAULT(x) =x
|
#define X2DEFAULT(x) = x
|
||||||
|
|
||||||
DEFINE_CLSID_X(XAudio2, e21a7345, eb21, 468e, be, 50, 80, 4d, b9, 7c, f7, 08);
|
DEFINE_CLSID_X(XAudio2, e21a7345, eb21, 468e, be, 50, 80, 4d, b9, 7c, f7, 08);
|
||||||
DEFINE_CLSID_X(XAudio2_Debug, f7a76c21, 53d4, 46bb, ac, 53, 8b, 45, 9c, ae, 46, bd);
|
DEFINE_CLSID_X(XAudio2_Debug, f7a76c21, 53d4, 46bb, ac, 53, 8b, 45, 9c, ae, 46, bd);
|
||||||
|
@ -27,314 +22,271 @@ DEFINE_IID_X(IXAudio2, 8bcf1f58, 9fe7, 4583, 8a, c6, e2, ad, c4, 65, c8, bb);
|
||||||
|
|
||||||
DECLARE_INTERFACE(IXAudio2Voice);
|
DECLARE_INTERFACE(IXAudio2Voice);
|
||||||
|
|
||||||
#define XAUDIO2_COMMIT_NOW 0
|
#define XAUDIO2_COMMIT_NOW 0
|
||||||
#define XAUDIO2_DEFAULT_CHANNELS 0
|
#define XAUDIO2_DEFAULT_CHANNELS 0
|
||||||
#define XAUDIO2_DEFAULT_SAMPLERATE 0
|
#define XAUDIO2_DEFAULT_SAMPLERATE 0
|
||||||
#define XAUDIO2_DEFAULT_FREQ_RATIO 4.0f
|
#define XAUDIO2_DEFAULT_FREQ_RATIO 4.0f
|
||||||
#define XAUDIO2_DEBUG_ENGINE 0x0001
|
#define XAUDIO2_DEBUG_ENGINE 0x0001
|
||||||
#define XAUDIO2_VOICE_NOSRC 0x0004
|
#define XAUDIO2_VOICE_NOSRC 0x0004
|
||||||
|
|
||||||
typedef struct
|
typedef enum XAUDIO2_DEVICE_ROLE {
|
||||||
{
|
NotDefaultDevice = 0x0,
|
||||||
WAVEFORMATEX Format;
|
DefaultConsoleDevice = 0x1,
|
||||||
union
|
DefaultMultimediaDevice = 0x2,
|
||||||
{
|
DefaultCommunicationsDevice = 0x4,
|
||||||
WORD wValidBitsPerSample;
|
DefaultGameDevice = 0x8,
|
||||||
WORD wSamplesPerBlock;
|
GlobalDefaultDevice = 0xf,
|
||||||
WORD wReserved;
|
InvalidDeviceRole = ~GlobalDefaultDevice,
|
||||||
} Samples;
|
|
||||||
DWORD dwChannelMask;
|
|
||||||
GUID SubFormat;
|
|
||||||
} WAVEFORMATEXTENSIBLE, *PWAVEFORMATEXTENSIBLE, *LPPWAVEFORMATEXTENSIBLE;
|
|
||||||
typedef const WAVEFORMATEXTENSIBLE* LPCWAVEFORMATEXTENSIBLE;
|
|
||||||
|
|
||||||
typedef enum XAUDIO2_DEVICE_ROLE
|
|
||||||
{
|
|
||||||
NotDefaultDevice = 0x0,
|
|
||||||
DefaultConsoleDevice = 0x1,
|
|
||||||
DefaultMultimediaDevice = 0x2,
|
|
||||||
DefaultCommunicationsDevice = 0x4,
|
|
||||||
DefaultGameDevice = 0x8,
|
|
||||||
GlobalDefaultDevice = 0xf,
|
|
||||||
InvalidDeviceRole = ~GlobalDefaultDevice
|
|
||||||
} XAUDIO2_DEVICE_ROLE;
|
} XAUDIO2_DEVICE_ROLE;
|
||||||
|
|
||||||
typedef struct XAUDIO2_DEVICE_DETAILS
|
typedef struct XAUDIO2_DEVICE_DETAILS {
|
||||||
{
|
WCHAR DeviceID[256];
|
||||||
WCHAR DeviceID[256];
|
WCHAR DisplayName[256];
|
||||||
WCHAR DisplayName[256];
|
XAUDIO2_DEVICE_ROLE Role;
|
||||||
XAUDIO2_DEVICE_ROLE Role;
|
WAVEFORMATEXTENSIBLE OutputFormat;
|
||||||
WAVEFORMATEXTENSIBLE OutputFormat;
|
|
||||||
} XAUDIO2_DEVICE_DETAILS;
|
} XAUDIO2_DEVICE_DETAILS;
|
||||||
|
|
||||||
typedef struct XAUDIO2_VOICE_DETAILS
|
typedef struct XAUDIO2_VOICE_DETAILS {
|
||||||
{
|
UINT32 CreationFlags;
|
||||||
UINT32 CreationFlags;
|
UINT32 InputChannels;
|
||||||
UINT32 InputChannels;
|
UINT32 InputSampleRate;
|
||||||
UINT32 InputSampleRate;
|
|
||||||
} XAUDIO2_VOICE_DETAILS;
|
} XAUDIO2_VOICE_DETAILS;
|
||||||
|
|
||||||
typedef enum XAUDIO2_WINDOWS_PROCESSOR_SPECIFIER
|
typedef enum XAUDIO2_WINDOWS_PROCESSOR_SPECIFIER {
|
||||||
{
|
Processor1 = 0x00000001,
|
||||||
Processor1 = 0x00000001,
|
Processor2 = 0x00000002,
|
||||||
Processor2 = 0x00000002,
|
Processor3 = 0x00000004,
|
||||||
Processor3 = 0x00000004,
|
Processor4 = 0x00000008,
|
||||||
Processor4 = 0x00000008,
|
Processor5 = 0x00000010,
|
||||||
Processor5 = 0x00000010,
|
Processor6 = 0x00000020,
|
||||||
Processor6 = 0x00000020,
|
Processor7 = 0x00000040,
|
||||||
Processor7 = 0x00000040,
|
Processor8 = 0x00000080,
|
||||||
Processor8 = 0x00000080,
|
Processor9 = 0x00000100,
|
||||||
Processor9 = 0x00000100,
|
Processor10 = 0x00000200,
|
||||||
Processor10 = 0x00000200,
|
Processor11 = 0x00000400,
|
||||||
Processor11 = 0x00000400,
|
Processor12 = 0x00000800,
|
||||||
Processor12 = 0x00000800,
|
Processor13 = 0x00001000,
|
||||||
Processor13 = 0x00001000,
|
Processor14 = 0x00002000,
|
||||||
Processor14 = 0x00002000,
|
Processor15 = 0x00004000,
|
||||||
Processor15 = 0x00004000,
|
Processor16 = 0x00008000,
|
||||||
Processor16 = 0x00008000,
|
Processor17 = 0x00010000,
|
||||||
Processor17 = 0x00010000,
|
Processor18 = 0x00020000,
|
||||||
Processor18 = 0x00020000,
|
Processor19 = 0x00040000,
|
||||||
Processor19 = 0x00040000,
|
Processor20 = 0x00080000,
|
||||||
Processor20 = 0x00080000,
|
Processor21 = 0x00100000,
|
||||||
Processor21 = 0x00100000,
|
Processor22 = 0x00200000,
|
||||||
Processor22 = 0x00200000,
|
Processor23 = 0x00400000,
|
||||||
Processor23 = 0x00400000,
|
Processor24 = 0x00800000,
|
||||||
Processor24 = 0x00800000,
|
Processor25 = 0x01000000,
|
||||||
Processor25 = 0x01000000,
|
Processor26 = 0x02000000,
|
||||||
Processor26 = 0x02000000,
|
Processor27 = 0x04000000,
|
||||||
Processor27 = 0x04000000,
|
Processor28 = 0x08000000,
|
||||||
Processor28 = 0x08000000,
|
Processor29 = 0x10000000,
|
||||||
Processor29 = 0x10000000,
|
Processor30 = 0x20000000,
|
||||||
Processor30 = 0x20000000,
|
Processor31 = 0x40000000,
|
||||||
Processor31 = 0x40000000,
|
Processor32 = 0x80000000,
|
||||||
Processor32 = 0x80000000,
|
XAUDIO2_ANY_PROCESSOR = 0xffffffff,
|
||||||
XAUDIO2_ANY_PROCESSOR = 0xffffffff,
|
XAUDIO2_DEFAULT_PROCESSOR = XAUDIO2_ANY_PROCESSOR,
|
||||||
XAUDIO2_DEFAULT_PROCESSOR = XAUDIO2_ANY_PROCESSOR
|
|
||||||
} XAUDIO2_WINDOWS_PROCESSOR_SPECIFIER, XAUDIO2_PROCESSOR;
|
} XAUDIO2_WINDOWS_PROCESSOR_SPECIFIER, XAUDIO2_PROCESSOR;
|
||||||
|
|
||||||
typedef struct XAUDIO2_VOICE_SENDS
|
typedef struct XAUDIO2_VOICE_SENDS {
|
||||||
{
|
UINT32 OutputCount;
|
||||||
UINT32 OutputCount;
|
IXAudio2Voice** pOutputVoices;
|
||||||
IXAudio2Voice** pOutputVoices;
|
|
||||||
} XAUDIO2_VOICE_SENDS;
|
} XAUDIO2_VOICE_SENDS;
|
||||||
|
|
||||||
typedef struct XAUDIO2_EFFECT_DESCRIPTOR
|
typedef struct XAUDIO2_EFFECT_DESCRIPTOR {
|
||||||
{
|
IUnknown* pEffect;
|
||||||
IUnknown* pEffect;
|
BOOL InitialState;
|
||||||
BOOL InitialState;
|
UINT32 OutputChannels;
|
||||||
UINT32 OutputChannels;
|
|
||||||
} XAUDIO2_EFFECT_DESCRIPTOR;
|
} XAUDIO2_EFFECT_DESCRIPTOR;
|
||||||
|
|
||||||
typedef struct XAUDIO2_EFFECT_CHAIN
|
typedef struct XAUDIO2_EFFECT_CHAIN {
|
||||||
{
|
UINT32 EffectCount;
|
||||||
UINT32 EffectCount;
|
const XAUDIO2_EFFECT_DESCRIPTOR* pEffectDescriptors;
|
||||||
const XAUDIO2_EFFECT_DESCRIPTOR* pEffectDescriptors;
|
|
||||||
} XAUDIO2_EFFECT_CHAIN;
|
} XAUDIO2_EFFECT_CHAIN;
|
||||||
|
|
||||||
typedef enum XAUDIO2_FILTER_TYPE
|
typedef enum XAUDIO2_FILTER_TYPE {
|
||||||
{
|
LowPassFilter,
|
||||||
LowPassFilter,
|
BandPassFilter,
|
||||||
BandPassFilter,
|
HighPassFilter,
|
||||||
HighPassFilter
|
|
||||||
} XAUDIO2_FILTER_TYPE;
|
} XAUDIO2_FILTER_TYPE;
|
||||||
|
|
||||||
typedef struct XAUDIO2_FILTER_PARAMETERS
|
typedef struct XAUDIO2_FILTER_PARAMETERS {
|
||||||
{
|
XAUDIO2_FILTER_TYPE Type;
|
||||||
XAUDIO2_FILTER_TYPE Type;
|
float Frequency;
|
||||||
float Frequency;
|
float OneOverQ;
|
||||||
float OneOverQ;
|
|
||||||
|
|
||||||
} XAUDIO2_FILTER_PARAMETERS;
|
} XAUDIO2_FILTER_PARAMETERS;
|
||||||
|
|
||||||
typedef struct XAUDIO2_BUFFER
|
typedef struct XAUDIO2_BUFFER {
|
||||||
{
|
UINT32 Flags;
|
||||||
UINT32 Flags;
|
UINT32 AudioBytes;
|
||||||
UINT32 AudioBytes;
|
const BYTE* pAudioData;
|
||||||
const BYTE* pAudioData;
|
UINT32 PlayBegin;
|
||||||
UINT32 PlayBegin;
|
UINT32 PlayLength;
|
||||||
UINT32 PlayLength;
|
UINT32 LoopBegin;
|
||||||
UINT32 LoopBegin;
|
UINT32 LoopLength;
|
||||||
UINT32 LoopLength;
|
UINT32 LoopCount;
|
||||||
UINT32 LoopCount;
|
void* pContext;
|
||||||
void* pContext;
|
|
||||||
} XAUDIO2_BUFFER;
|
} XAUDIO2_BUFFER;
|
||||||
|
|
||||||
typedef struct XAUDIO2_BUFFER_WMA
|
typedef struct XAUDIO2_BUFFER_WMA {
|
||||||
{
|
const UINT32* pDecodedPacketCumulativeBytes;
|
||||||
const UINT32* pDecodedPacketCumulativeBytes;
|
UINT32 PacketCount;
|
||||||
UINT32 PacketCount;
|
|
||||||
} XAUDIO2_BUFFER_WMA;
|
} XAUDIO2_BUFFER_WMA;
|
||||||
|
|
||||||
typedef struct XAUDIO2_VOICE_STATE
|
typedef struct XAUDIO2_VOICE_STATE {
|
||||||
{
|
void* pCurrentBufferContext;
|
||||||
void* pCurrentBufferContext;
|
UINT32 BuffersQueued;
|
||||||
UINT32 BuffersQueued;
|
UINT64 SamplesPlayed;
|
||||||
UINT64 SamplesPlayed;
|
|
||||||
} XAUDIO2_VOICE_STATE;
|
} XAUDIO2_VOICE_STATE;
|
||||||
|
|
||||||
typedef struct XAUDIO2_PERFORMANCE_DATA
|
typedef struct XAUDIO2_PERFORMANCE_DATA {
|
||||||
{
|
UINT64 AudioCyclesSinceLastQuery;
|
||||||
UINT64 AudioCyclesSinceLastQuery;
|
UINT64 TotalCyclesSinceLastQuery;
|
||||||
UINT64 TotalCyclesSinceLastQuery;
|
UINT32 MinimumCyclesPerQuantum;
|
||||||
UINT32 MinimumCyclesPerQuantum;
|
UINT32 MaximumCyclesPerQuantum;
|
||||||
UINT32 MaximumCyclesPerQuantum;
|
UINT32 MemoryUsageInBytes;
|
||||||
UINT32 MemoryUsageInBytes;
|
UINT32 CurrentLatencyInSamples;
|
||||||
UINT32 CurrentLatencyInSamples;
|
UINT32 GlitchesSinceEngineStarted;
|
||||||
UINT32 GlitchesSinceEngineStarted;
|
UINT32 ActiveSourceVoiceCount;
|
||||||
UINT32 ActiveSourceVoiceCount;
|
UINT32 TotalSourceVoiceCount;
|
||||||
UINT32 TotalSourceVoiceCount;
|
UINT32 ActiveSubmixVoiceCount;
|
||||||
UINT32 ActiveSubmixVoiceCount;
|
UINT32 TotalSubmixVoiceCount;
|
||||||
UINT32 TotalSubmixVoiceCount;
|
UINT32 ActiveXmaSourceVoices;
|
||||||
UINT32 ActiveXmaSourceVoices;
|
UINT32 ActiveXmaStreams;
|
||||||
UINT32 ActiveXmaStreams;
|
|
||||||
} XAUDIO2_PERFORMANCE_DATA;
|
} XAUDIO2_PERFORMANCE_DATA;
|
||||||
|
|
||||||
typedef struct XAUDIO2_DEBUG_CONFIGURATION
|
typedef struct XAUDIO2_DEBUG_CONFIGURATION {
|
||||||
{
|
UINT32 TraceMask;
|
||||||
UINT32 TraceMask;
|
UINT32 BreakMask;
|
||||||
UINT32 BreakMask;
|
BOOL LogThreadID;
|
||||||
BOOL LogThreadID;
|
BOOL LogFileline;
|
||||||
BOOL LogFileline;
|
BOOL LogFunctionName;
|
||||||
BOOL LogFunctionName;
|
BOOL LogTiming;
|
||||||
BOOL LogTiming;
|
|
||||||
} XAUDIO2_DEBUG_CONFIGURATION;
|
} XAUDIO2_DEBUG_CONFIGURATION;
|
||||||
|
|
||||||
DECLARE_INTERFACE(IXAudio2EngineCallback)
|
DECLARE_INTERFACE(IXAudio2EngineCallback) {
|
||||||
{
|
STDMETHOD_(void, OnProcessingPassStart) (THIS) PURE;
|
||||||
STDMETHOD_(void, OnProcessingPassStart) (THIS) PURE;
|
STDMETHOD_(void, OnProcessingPassEnd) (THIS) PURE;
|
||||||
STDMETHOD_(void, OnProcessingPassEnd) (THIS) PURE;
|
STDMETHOD_(void, OnCriticalError) (THIS_ HRESULT Error) PURE;
|
||||||
STDMETHOD_(void, OnCriticalError) (THIS_ HRESULT Error) PURE;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
DECLARE_INTERFACE(IXAudio2VoiceCallback)
|
DECLARE_INTERFACE(IXAudio2VoiceCallback) {
|
||||||
{
|
STDMETHOD_(void, OnVoiceProcessingPassStart) (THIS_ UINT32 BytesRequired) PURE;
|
||||||
STDMETHOD_(void, OnVoiceProcessingPassStart) (THIS_ UINT32 BytesRequired) PURE;
|
STDMETHOD_(void, OnVoiceProcessingPassEnd) (THIS) PURE;
|
||||||
STDMETHOD_(void, OnVoiceProcessingPassEnd) (THIS) PURE;
|
STDMETHOD_(void, OnStreamEnd) (THIS) PURE;
|
||||||
STDMETHOD_(void, OnStreamEnd) (THIS) PURE;
|
STDMETHOD_(void, OnBufferStart) (THIS_ void* pBufferContext) PURE;
|
||||||
STDMETHOD_(void, OnBufferStart) (THIS_ void* pBufferContext) PURE;
|
STDMETHOD_(void, OnBufferEnd) (THIS_ void* pBufferContext) PURE;
|
||||||
STDMETHOD_(void, OnBufferEnd) (THIS_ void* pBufferContext) PURE;
|
STDMETHOD_(void, OnLoopEnd) (THIS_ void* pBufferContext) PURE;
|
||||||
STDMETHOD_(void, OnLoopEnd) (THIS_ void* pBufferContext) PURE;
|
STDMETHOD_(void, OnVoiceError) (THIS_ void* pBufferContext, HRESULT Error) PURE;
|
||||||
STDMETHOD_(void, OnVoiceError) (THIS_ void* pBufferContext, HRESULT Error) PURE;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
DECLARE_INTERFACE(IXAudio2Voice)
|
DECLARE_INTERFACE(IXAudio2Voice) {
|
||||||
{
|
#define Declare_IXAudio2Voice_Methods() \
|
||||||
#define Declare_IXAudio2Voice_Methods() \
|
STDMETHOD_(void, GetVoiceDetails) (THIS_ XAUDIO2_VOICE_DETAILS* pVoiceDetails) PURE; \
|
||||||
STDMETHOD_(void, GetVoiceDetails) (THIS_ XAUDIO2_VOICE_DETAILS* pVoiceDetails) PURE; \
|
STDMETHOD(SetOutputVoices) (THIS_ const XAUDIO2_VOICE_SENDS* pSendList) PURE; \
|
||||||
STDMETHOD(SetOutputVoices) (THIS_ const XAUDIO2_VOICE_SENDS* pSendList) PURE; \
|
STDMETHOD(SetEffectChain) (THIS_ const XAUDIO2_EFFECT_CHAIN* pEffectChain) PURE; \
|
||||||
STDMETHOD(SetEffectChain) (THIS_ const XAUDIO2_EFFECT_CHAIN* pEffectChain) PURE; \
|
STDMETHOD(EnableEffect) (THIS_ UINT32 EffectIndex, \
|
||||||
STDMETHOD(EnableEffect) (THIS_ UINT32 EffectIndex, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD(DisableEffect) (THIS_ UINT32 EffectIndex, \
|
||||||
STDMETHOD(DisableEffect) (THIS_ UINT32 EffectIndex, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD_(void, GetEffectState) (THIS_ UINT32 EffectIndex, BOOL* pEnabled) PURE; \
|
||||||
STDMETHOD_(void, GetEffectState) (THIS_ UINT32 EffectIndex, BOOL* pEnabled) PURE; \
|
STDMETHOD(SetEffectParameters) (THIS_ UINT32 EffectIndex, \
|
||||||
STDMETHOD(SetEffectParameters) (THIS_ UINT32 EffectIndex, \
|
const void* pParameters, \
|
||||||
const void* pParameters, \
|
UINT32 ParametersByteSize, \
|
||||||
UINT32 ParametersByteSize, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD(GetEffectParameters) (THIS_ UINT32 EffectIndex, void* pParameters, \
|
||||||
STDMETHOD(GetEffectParameters) (THIS_ UINT32 EffectIndex, void* pParameters, \
|
UINT32 ParametersByteSize) PURE; \
|
||||||
UINT32 ParametersByteSize) PURE; \
|
STDMETHOD(SetFilterParameters) (THIS_ const XAUDIO2_FILTER_PARAMETERS* pParameters, \
|
||||||
STDMETHOD(SetFilterParameters) (THIS_ const XAUDIO2_FILTER_PARAMETERS* pParameters, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD_(void, GetFilterParameters) (THIS_ XAUDIO2_FILTER_PARAMETERS* pParameters) PURE; \
|
||||||
STDMETHOD_(void, GetFilterParameters) (THIS_ XAUDIO2_FILTER_PARAMETERS* pParameters) PURE; \
|
STDMETHOD(SetVolume) (THIS_ float Volume, \
|
||||||
STDMETHOD(SetVolume) (THIS_ float Volume, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD_(void, GetVolume) (THIS_ float* pVolume) PURE; \
|
||||||
STDMETHOD_(void, GetVolume) (THIS_ float* pVolume) PURE; \
|
STDMETHOD(SetChannelVolumes) (THIS_ UINT32 Channels, const float* pVolumes, \
|
||||||
STDMETHOD(SetChannelVolumes) (THIS_ UINT32 Channels, const float* pVolumes, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD_(void, GetChannelVolumes) (THIS_ UINT32 Channels, float* pVolumes) PURE; \
|
||||||
STDMETHOD_(void, GetChannelVolumes) (THIS_ UINT32 Channels, float* pVolumes) PURE; \
|
STDMETHOD(SetOutputMatrix) (THIS_ IXAudio2Voice* pDestinationVoice, \
|
||||||
STDMETHOD(SetOutputMatrix) (THIS_ IXAudio2Voice* pDestinationVoice, \
|
UINT32 SourceChannels, UINT32 DestinationChannels, \
|
||||||
UINT32 SourceChannels, UINT32 DestinationChannels, \
|
const float* pLevelMatrix, \
|
||||||
const float* pLevelMatrix, \
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE; \
|
STDMETHOD_(void, GetOutputMatrix) (THIS_ IXAudio2Voice* pDestinationVoice, \
|
||||||
STDMETHOD_(void, GetOutputMatrix) (THIS_ IXAudio2Voice* pDestinationVoice, \
|
UINT32 SourceChannels, UINT32 DestinationChannels, \
|
||||||
UINT32 SourceChannels, UINT32 DestinationChannels, \
|
float* pLevelMatrix) PURE; \
|
||||||
float* pLevelMatrix) PURE; \
|
STDMETHOD_(void, DestroyVoice) (THIS) PURE
|
||||||
STDMETHOD_(void, DestroyVoice) (THIS) PURE
|
|
||||||
|
|
||||||
Declare_IXAudio2Voice_Methods();
|
Declare_IXAudio2Voice_Methods();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
DECLARE_INTERFACE_(IXAudio2MasteringVoice, IXAudio2Voice) {
|
||||||
DECLARE_INTERFACE_(IXAudio2MasteringVoice, IXAudio2Voice)
|
Declare_IXAudio2Voice_Methods();
|
||||||
{
|
|
||||||
Declare_IXAudio2Voice_Methods();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
DECLARE_INTERFACE_(IXAudio2SubmixVoice, IXAudio2Voice)
|
DECLARE_INTERFACE_(IXAudio2SubmixVoice, IXAudio2Voice) {
|
||||||
{
|
Declare_IXAudio2Voice_Methods();
|
||||||
Declare_IXAudio2Voice_Methods();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
DECLARE_INTERFACE_(IXAudio2SourceVoice, IXAudio2Voice)
|
DECLARE_INTERFACE_(IXAudio2SourceVoice, IXAudio2Voice) {
|
||||||
{
|
Declare_IXAudio2Voice_Methods();
|
||||||
Declare_IXAudio2Voice_Methods();
|
STDMETHOD(Start) (THIS_ UINT32 Flags, UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
||||||
STDMETHOD(Start) (THIS_ UINT32 Flags, UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
STDMETHOD(Stop) (THIS_ UINT32 Flags, UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
||||||
STDMETHOD(Stop) (THIS_ UINT32 Flags, UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
STDMETHOD(SubmitSourceBuffer) (THIS_ const XAUDIO2_BUFFER* pBuffer, const XAUDIO2_BUFFER_WMA* pBufferWMA X2DEFAULT(NULL)) PURE;
|
||||||
STDMETHOD(SubmitSourceBuffer) (THIS_ const XAUDIO2_BUFFER* pBuffer, const XAUDIO2_BUFFER_WMA* pBufferWMA X2DEFAULT(NULL)) PURE;
|
STDMETHOD(FlushSourceBuffers) (THIS) PURE;
|
||||||
STDMETHOD(FlushSourceBuffers) (THIS) PURE;
|
STDMETHOD(Discontinuity) (THIS) PURE;
|
||||||
STDMETHOD(Discontinuity) (THIS) PURE;
|
STDMETHOD(ExitLoop) (THIS_ UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
||||||
STDMETHOD(ExitLoop) (THIS_ UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
STDMETHOD_(void, GetState) (THIS_ XAUDIO2_VOICE_STATE* pVoiceState) PURE;
|
||||||
STDMETHOD_(void, GetState) (THIS_ XAUDIO2_VOICE_STATE* pVoiceState) PURE;
|
STDMETHOD(SetFrequencyRatio) (THIS_ float Ratio,
|
||||||
STDMETHOD(SetFrequencyRatio) (THIS_ float Ratio,
|
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
||||||
UINT32 OperationSet X2DEFAULT(XAUDIO2_COMMIT_NOW)) PURE;
|
STDMETHOD_(void, GetFrequencyRatio) (THIS_ float* pRatio) PURE;
|
||||||
STDMETHOD_(void, GetFrequencyRatio) (THIS_ float* pRatio) PURE;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
DECLARE_INTERFACE_(IXAudio2, IUnknown)
|
DECLARE_INTERFACE_(IXAudio2, IUnknown) {
|
||||||
{
|
STDMETHOD(QueryInterface) (THIS_ REFIID riid, void** ppvInterface) PURE;
|
||||||
STDMETHOD(QueryInterface) (THIS_ REFIID riid, void** ppvInterface) PURE;
|
STDMETHOD_(ULONG, AddRef) (THIS) PURE;
|
||||||
STDMETHOD_(ULONG, AddRef) (THIS) PURE;
|
STDMETHOD_(ULONG, Release) (THIS) PURE;
|
||||||
STDMETHOD_(ULONG, Release) (THIS) PURE;
|
STDMETHOD(GetDeviceCount) (THIS_ UINT32* pCount) PURE;
|
||||||
STDMETHOD(GetDeviceCount) (THIS_ UINT32* pCount) PURE;
|
STDMETHOD(GetDeviceDetails) (THIS_ UINT32 Index, XAUDIO2_DEVICE_DETAILS* pDeviceDetails) PURE;
|
||||||
STDMETHOD(GetDeviceDetails) (THIS_ UINT32 Index, XAUDIO2_DEVICE_DETAILS* pDeviceDetails) PURE;
|
STDMETHOD(Initialize) (THIS_ UINT32 Flags X2DEFAULT(0),
|
||||||
STDMETHOD(Initialize) (THIS_ UINT32 Flags X2DEFAULT(0),
|
XAUDIO2_PROCESSOR XAudio2Processor X2DEFAULT(XAUDIO2_DEFAULT_PROCESSOR)) PURE;
|
||||||
XAUDIO2_PROCESSOR XAudio2Processor X2DEFAULT(XAUDIO2_DEFAULT_PROCESSOR)) PURE;
|
STDMETHOD(RegisterForCallbacks) (IXAudio2EngineCallback* pCallback) PURE;
|
||||||
STDMETHOD(RegisterForCallbacks) (IXAudio2EngineCallback* pCallback) PURE;
|
STDMETHOD_(void, UnregisterForCallbacks) (IXAudio2EngineCallback* pCallback) PURE;
|
||||||
STDMETHOD_(void, UnregisterForCallbacks) (IXAudio2EngineCallback* pCallback) PURE;
|
STDMETHOD(CreateSourceVoice) (THIS_ IXAudio2SourceVoice** ppSourceVoice,
|
||||||
STDMETHOD(CreateSourceVoice) (THIS_ IXAudio2SourceVoice** ppSourceVoice,
|
const WAVEFORMATEX* pSourceFormat,
|
||||||
const WAVEFORMATEX* pSourceFormat,
|
UINT32 Flags X2DEFAULT(0),
|
||||||
UINT32 Flags X2DEFAULT(0),
|
float MaxFrequencyRatio X2DEFAULT(XAUDIO2_DEFAULT_FREQ_RATIO),
|
||||||
float MaxFrequencyRatio X2DEFAULT(XAUDIO2_DEFAULT_FREQ_RATIO),
|
IXAudio2VoiceCallback* pCallback X2DEFAULT(NULL),
|
||||||
IXAudio2VoiceCallback* pCallback X2DEFAULT(NULL),
|
const XAUDIO2_VOICE_SENDS* pSendList X2DEFAULT(NULL),
|
||||||
const XAUDIO2_VOICE_SENDS* pSendList X2DEFAULT(NULL),
|
const XAUDIO2_EFFECT_CHAIN* pEffectChain X2DEFAULT(NULL)) PURE;
|
||||||
const XAUDIO2_EFFECT_CHAIN* pEffectChain X2DEFAULT(NULL)) PURE;
|
STDMETHOD(CreateSubmixVoice) (THIS_ IXAudio2SubmixVoice** ppSubmixVoice,
|
||||||
STDMETHOD(CreateSubmixVoice) (THIS_ IXAudio2SubmixVoice** ppSubmixVoice,
|
UINT32 InputChannels, UINT32 InputSampleRate,
|
||||||
UINT32 InputChannels, UINT32 InputSampleRate,
|
UINT32 Flags X2DEFAULT(0), UINT32 ProcessingStage X2DEFAULT(0),
|
||||||
UINT32 Flags X2DEFAULT(0), UINT32 ProcessingStage X2DEFAULT(0),
|
const XAUDIO2_VOICE_SENDS* pSendList X2DEFAULT(NULL),
|
||||||
const XAUDIO2_VOICE_SENDS* pSendList X2DEFAULT(NULL),
|
const XAUDIO2_EFFECT_CHAIN* pEffectChain X2DEFAULT(NULL)) PURE;
|
||||||
const XAUDIO2_EFFECT_CHAIN* pEffectChain X2DEFAULT(NULL)) PURE;
|
STDMETHOD(CreateMasteringVoice) (THIS_ IXAudio2MasteringVoice** ppMasteringVoice,
|
||||||
STDMETHOD(CreateMasteringVoice) (THIS_ IXAudio2MasteringVoice** ppMasteringVoice,
|
UINT32 InputChannels X2DEFAULT(XAUDIO2_DEFAULT_CHANNELS),
|
||||||
UINT32 InputChannels X2DEFAULT(XAUDIO2_DEFAULT_CHANNELS),
|
UINT32 InputSampleRate X2DEFAULT(XAUDIO2_DEFAULT_SAMPLERATE),
|
||||||
UINT32 InputSampleRate X2DEFAULT(XAUDIO2_DEFAULT_SAMPLERATE),
|
UINT32 Flags X2DEFAULT(0), UINT32 DeviceIndex X2DEFAULT(0),
|
||||||
UINT32 Flags X2DEFAULT(0), UINT32 DeviceIndex X2DEFAULT(0),
|
const XAUDIO2_EFFECT_CHAIN* pEffectChain X2DEFAULT(NULL)) PURE;
|
||||||
const XAUDIO2_EFFECT_CHAIN* pEffectChain X2DEFAULT(NULL)) PURE;
|
STDMETHOD(StartEngine) (THIS) PURE;
|
||||||
STDMETHOD(StartEngine) (THIS) PURE;
|
STDMETHOD_(void, StopEngine) (THIS) PURE;
|
||||||
STDMETHOD_(void, StopEngine) (THIS) PURE;
|
STDMETHOD(CommitChanges) (THIS_ UINT32 OperationSet) PURE;
|
||||||
STDMETHOD(CommitChanges) (THIS_ UINT32 OperationSet) PURE;
|
STDMETHOD_(void, GetPerformanceData) (THIS_ XAUDIO2_PERFORMANCE_DATA* pPerfData) PURE;
|
||||||
STDMETHOD_(void, GetPerformanceData) (THIS_ XAUDIO2_PERFORMANCE_DATA* pPerfData) PURE;
|
STDMETHOD_(void, SetDebugConfiguration) (THIS_ const XAUDIO2_DEBUG_CONFIGURATION* pDebugConfiguration,
|
||||||
STDMETHOD_(void, SetDebugConfiguration) (THIS_ const XAUDIO2_DEBUG_CONFIGURATION* pDebugConfiguration,
|
void* pReserved X2DEFAULT(NULL)) PURE;
|
||||||
void* pReserved X2DEFAULT(NULL)) PURE;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
__inline HRESULT XAudio2Create(IXAudio2** ppXAudio2, UINT32 Flags X2DEFAULT(0),
|
__inline HRESULT XAudio2Create(IXAudio2** ppXAudio2, UINT32 Flags X2DEFAULT(0),
|
||||||
XAUDIO2_PROCESSOR XAudio2Processor X2DEFAULT(XAUDIO2_DEFAULT_PROCESSOR))
|
XAUDIO2_PROCESSOR XAudio2Processor X2DEFAULT(XAUDIO2_DEFAULT_PROCESSOR)) {
|
||||||
{
|
IXAudio2* pXAudio2;
|
||||||
IXAudio2* pXAudio2;
|
HRESULT hr = CoCreateInstance((Flags & XAUDIO2_DEBUG_ENGINE) ? CLSID_XAudio2_Debug : CLSID_XAudio2,
|
||||||
HRESULT hr = CoCreateInstance((Flags & XAUDIO2_DEBUG_ENGINE) ? CLSID_XAudio2_Debug : CLSID_XAudio2,
|
NULL, CLSCTX_INPROC_SERVER, IID_IXAudio2, (void**)&pXAudio2);
|
||||||
NULL, CLSCTX_INPROC_SERVER, IID_IXAudio2, (void**)&pXAudio2);
|
if(SUCCEEDED(hr)) {
|
||||||
if (SUCCEEDED(hr))
|
hr = pXAudio2->Initialize(Flags, XAudio2Processor);
|
||||||
{
|
if(SUCCEEDED(hr)) {
|
||||||
hr = pXAudio2->Initialize(Flags, XAudio2Processor);
|
*ppXAudio2 = pXAudio2;
|
||||||
if (SUCCEEDED(hr))
|
} else {
|
||||||
{
|
pXAudio2->Release();
|
||||||
*ppXAudio2 = pXAudio2;
|
}
|
||||||
}
|
}
|
||||||
else
|
return hr;
|
||||||
{
|
|
||||||
pXAudio2->Release();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return hr;
|
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
8
ruby.hpp
8
ruby.hpp
|
@ -3,10 +3,10 @@
|
||||||
/* ruby
|
/* ruby
|
||||||
* author: byuu
|
* author: byuu
|
||||||
* license: ISC
|
* license: ISC
|
||||||
* version: 0.14 (2015-11-19)
|
* version: 0.15 (2016-04-18)
|
||||||
*
|
*
|
||||||
* ruby is a cross-platform hardware abstraction layer
|
* ruby is a cross-platform hardware abstraction layer.
|
||||||
* it provides a common interface to video, audio and input devices
|
* it provides a common interface to video, audio and input devices.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <nall/nall.hpp>
|
#include <nall/nall.hpp>
|
||||||
|
@ -62,7 +62,7 @@ struct Audio {
|
||||||
virtual auto get(const nall::string& name) -> nall::any { return false; }
|
virtual auto get(const nall::string& name) -> nall::any { return false; }
|
||||||
virtual auto set(const nall::string& name, const nall::any& value) -> bool { return false; }
|
virtual auto set(const nall::string& name, const nall::any& value) -> bool { return false; }
|
||||||
|
|
||||||
virtual auto sample(uint16_t left, uint16_t right) -> void {}
|
virtual auto sample(int16_t left, int16_t right) -> void {}
|
||||||
virtual auto clear() -> void {}
|
virtual auto clear() -> void {}
|
||||||
|
|
||||||
virtual auto init() -> bool { return true; }
|
virtual auto init() -> bool { return true; }
|
||||||
|
|
|
@ -82,14 +82,14 @@ struct VideoGLX : Video, OpenGL {
|
||||||
|
|
||||||
if(name == Video::Filter && value.is<unsigned>()) {
|
if(name == Video::Filter && value.is<unsigned>()) {
|
||||||
settings.filter = value.get<unsigned>();
|
settings.filter = value.get<unsigned>();
|
||||||
if(settings.shader.empty()) OpenGL::filter = settings.filter ? GL_LINEAR : GL_NEAREST;
|
if(!settings.shader) OpenGL::filter = settings.filter ? GL_LINEAR : GL_NEAREST;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(name == Video::Shader && value.is<string>()) {
|
if(name == Video::Shader && value.is<string>()) {
|
||||||
settings.shader = value.get<string>();
|
settings.shader = value.get<string>();
|
||||||
OpenGL::shader(settings.shader);
|
OpenGL::shader(settings.shader);
|
||||||
if(settings.shader.empty()) OpenGL::filter = settings.filter ? GL_LINEAR : GL_NEAREST;
|
if(!settings.shader) OpenGL::filter = settings.filter ? GL_LINEAR : GL_NEAREST;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,7 +182,6 @@ struct VideoGLX : Video, OpenGL {
|
||||||
|
|
||||||
//glXSwapInterval is used to toggle Vsync
|
//glXSwapInterval is used to toggle Vsync
|
||||||
//note that the ordering is very important! MESA declares SGI, but the SGI function does nothing
|
//note that the ordering is very important! MESA declares SGI, but the SGI function does nothing
|
||||||
glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalEXT");
|
|
||||||
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalMESA");
|
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalMESA");
|
||||||
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalSGI");
|
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalSGI");
|
||||||
|
|
||||||
|
|
|
@ -163,7 +163,6 @@ struct VideoGLX2 : Video {
|
||||||
glxcontext = glXCreateContext(display, vi, 0, GL_TRUE);
|
glxcontext = glXCreateContext(display, vi, 0, GL_TRUE);
|
||||||
glXMakeCurrent(display, glxwindow = xwindow, glxcontext);
|
glXMakeCurrent(display, glxwindow = xwindow, glxcontext);
|
||||||
|
|
||||||
glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalEXT");
|
|
||||||
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalMESA");
|
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalMESA");
|
||||||
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalSGI");
|
if(!glXSwapInterval) glXSwapInterval = (signed (*)(signed))glGetProcAddress("glXSwapIntervalSGI");
|
||||||
|
|
||||||
|
|
|
@ -28,11 +28,11 @@ auto OpenGL::shader(const string& pathname) -> void {
|
||||||
for(auto node : document["output"]) {
|
for(auto node : document["output"]) {
|
||||||
string text = node.text();
|
string text = node.text();
|
||||||
if(node.name() == "width") {
|
if(node.name() == "width") {
|
||||||
if(text.endsWith("%")) relativeWidth = real(text.rtrim("%", 1L)) / 100.0;
|
if(text.endsWith("%")) relativeWidth = real(text.trimRight("%", 1L)) / 100.0;
|
||||||
else absoluteWidth = text.natural();
|
else absoluteWidth = text.natural();
|
||||||
}
|
}
|
||||||
if(node.name() == "height") {
|
if(node.name() == "height") {
|
||||||
if(text.endsWith("%")) relativeHeight = real(text.rtrim("%", 1L)) / 100.0;
|
if(text.endsWith("%")) relativeHeight = real(text.trimRight("%", 1L)) / 100.0;
|
||||||
else absoluteHeight = text.natural();
|
else absoluteHeight = text.natural();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -167,7 +167,7 @@ auto OpenGL::refresh() -> void {
|
||||||
render(sources[0].width, sources[0].height, outputWidth, outputHeight);
|
render(sources[0].width, sources[0].height, outputWidth, outputHeight);
|
||||||
|
|
||||||
if(history.size() > 0) {
|
if(history.size() > 0) {
|
||||||
OpenGLTexture frame = history.takeLast();
|
OpenGLTexture frame = history.takeRight();
|
||||||
|
|
||||||
glBindTexture(GL_TEXTURE_2D, frame.texture);
|
glBindTexture(GL_TEXTURE_2D, frame.texture);
|
||||||
if(width == frame.width && height == frame.height) {
|
if(width == frame.width && height == frame.height) {
|
||||||
|
|
|
@ -4,9 +4,9 @@ auto OpenGLProgram::bind(OpenGL* instance, const Markup::Node& node, const strin
|
||||||
modulo = glrModulo(node["modulo"].integer());
|
modulo = glrModulo(node["modulo"].integer());
|
||||||
|
|
||||||
string w = node["width"].text(), h = node["height"].text();
|
string w = node["width"].text(), h = node["height"].text();
|
||||||
if(w.endsWith("%")) relativeWidth = real(w.rtrim("%", 1L)) / 100.0;
|
if(w.endsWith("%")) relativeWidth = real(w.trimRight("%", 1L)) / 100.0;
|
||||||
else absoluteWidth = w.natural();
|
else absoluteWidth = w.natural();
|
||||||
if(h.endsWith("%")) relativeHeight = real(h.rtrim("%", 1L)) / 100.0;
|
if(h.endsWith("%")) relativeHeight = real(h.trimRight("%", 1L)) / 100.0;
|
||||||
else absoluteHeight = h.natural();
|
else absoluteHeight = h.natural();
|
||||||
|
|
||||||
format = glrFormat(node["format"].text());
|
format = glrFormat(node["format"].text());
|
||||||
|
@ -41,8 +41,8 @@ auto OpenGLProgram::bind(OpenGL* instance, const Markup::Node& node, const strin
|
||||||
|
|
||||||
for(auto& leaf : node.find("pixmap")) {
|
for(auto& leaf : node.find("pixmap")) {
|
||||||
nall::image image({pathname, leaf.text()});
|
nall::image image({pathname, leaf.text()});
|
||||||
|
if(!image) continue;
|
||||||
image.transform();
|
image.transform();
|
||||||
if(image.empty()) continue;
|
|
||||||
|
|
||||||
GLuint texture;
|
GLuint texture;
|
||||||
glGenTextures(1, &texture);
|
glGenTextures(1, &texture);
|
||||||
|
@ -78,7 +78,7 @@ auto OpenGLProgram::parse(OpenGL* instance, string& source) -> void {
|
||||||
if(auto position = s.find("//")) s.resize(position()); //strip comments
|
if(auto position = s.find("//")) s.resize(position()); //strip comments
|
||||||
s.strip(); //remove extraneous whitespace
|
s.strip(); //remove extraneous whitespace
|
||||||
if(s.match("#in ?*")) {
|
if(s.match("#in ?*")) {
|
||||||
s.ltrim("#in ", 1L).strip();
|
s.trimLeft("#in ", 1L).strip();
|
||||||
if(auto setting = instance->settings.find({s})) {
|
if(auto setting = instance->settings.find({s})) {
|
||||||
line = {"#define ", setting().name, " ", setting().value};
|
line = {"#define ", setting().name, " ", setting().value};
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -48,7 +48,7 @@ struct VideoWGL : Video, OpenGL {
|
||||||
if(wglcontext) {
|
if(wglcontext) {
|
||||||
init();
|
init();
|
||||||
OpenGL::shader(settings.shader);
|
OpenGL::shader(settings.shader);
|
||||||
if(settings.shader.empty()) OpenGL::filter = settings.filter ? GL_LINEAR : GL_NEAREST;
|
if(!settings.shader) OpenGL::filter = settings.filter ? GL_LINEAR : GL_NEAREST;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue