微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

如何使用 Opne TK 同时从多个设备进行录制?

如何解决如何使用 Opne TK 同时从多个设备进行录制?

我想使用 Open TK 同时在多个设备上录制。

正如您在下面的问题中所见,Save to Wav file the audio recorded with .NetCore C# on Raspberry pi 我可以在一台设备上进行录制,但是如果我尝试同时在两台设备上进行此操作并执行以下代码,我可能会收到错误“System.AccessViolationException: Attempted to read or write protected memory。这通常是一个迹象其他内存已损坏。”

这是什么原因? 另外,我如何编写代码来实现我的意图?

下面代码中的Task1和task2几乎相同,只是AudioCapture设备(recorders[0],recorders[2]...)不同。

var recorders = AudioCapture.AvailableDevices;
for (int i = 0; i < recorders.Count; i++)
{
    Console.WriteLine(recorders[i]);
}
Console.WriteLine("-----");

const int samplingRate = 44100;     // Samples per second

const ALFormat alFormat = ALFormat.Mono16;
const ushort bitsPerSample = 16;    // Mono16 has 16 bits per sample
const ushort numChannels = 1;       // Mono16 has 1 channel

var task1 = Task.Run(() =>
{
    using (var f = File.OpenWrite("aaa.wav"))
    using (var sw = new BinaryWriter(f))
    {
        // Read This: http://soundfile.sapp.org/doc/WaveFormat/

        sw.Write(new char[] { 'R','I','F','F' });
        sw.Write(0); // will fill in later
        sw.Write(new char[] { 'W','A','V','E' });
        // "fmt " chunk (Google: WAVEFORMATEX structure)
        sw.Write(new char[] { 'f','m','t',' ' });
        sw.Write(16); // chunkSize (in bytes)
        sw.Write((ushort)1); // wFormatTag (PCM = 1)
        sw.Write(numChannels); // wChannels
        sw.Write(samplingRate); // dwSamplesPerSec
        sw.Write(samplingRate * numChannels * (bitsPerSample / 8)); // dwAvgBytesPerSec
        sw.Write((ushort)(numChannels * (bitsPerSample / 8))); // wBlockAlign
        sw.Write(bitsPerSample); // wBitsPerSample
                                    // "data" chunk
        sw.Write(new char[] { 'd','a','a' });
        sw.Write(0); // will fill in later

        // 10 seconds of data. overblown,but it gets the job done
        const int bufferLength = samplingRate * 10;
        int samplesWrote = 0;

        Console.WriteLine($"Recording from: {recorders[2]}");

        using (var audioCapture = new AudioCapture(
            recorders[2],samplingRate,alFormat,bufferLength))
        {
            var buffer = new short[bufferLength];

            audioCapture.Start();
            for (int i = 0; i < 10; ++i)
            {
                Thread.Sleep(1000); // give it some time to collect samples

                var samplesAvailable = audioCapture.AvailableSamples;
                audioCapture.ReadSamples(buffer,samplesAvailable);
                for (var x = 0; x < samplesAvailable; ++x)
                {
                    sw.Write(buffer[x]);
                }

                samplesWrote += samplesAvailable;

                Console.WriteLine($"Wrote {samplesAvailable}/{samplesWrote} samples...");
            }
            audioCapture.Stop();
        }

        sw.Seek(4,SeekOrigin.Begin); // seek to overall size
        sw.Write(36 + samplesWrote * (bitsPerSample / 8) * numChannels);
        sw.Seek(40,SeekOrigin.Begin); // seek to data size position
        sw.Write(samplesWrote * (bitsPerSample / 8) * numChannels);
    }
});

Thread.Sleep(1000);

var task2 = Task.Run(() => {
    using (var g = File.OpenWrite("bbbb.wav"))
    using (var sx = new BinaryWriter(g))
    {
        // Read This: http://soundfile.sapp.org/doc/WaveFormat/

        sx.Write(new char[] { 'R','F' });
        sx.Write(0); // will fill in later
        sx.Write(new char[] { 'W','E' });
        // "fmt " chunk (Google: WAVEFORMATEX structure)
        sx.Write(new char[] { 'f',' ' });
        sx.Write(16); // chunkSize (in bytes)
        sx.Write((ushort)1); // wFormatTag (PCM = 1)
        sx.Write(numChannels); // wChannels
        sx.Write(samplingRate); // dwSamplesPerSec
        sx.Write(samplingRate * numChannels * (bitsPerSample / 8)); // dwAvgBytesPerSec
        sx.Write((ushort)(numChannels * (bitsPerSample / 8))); // wBlockAlign
        sx.Write(bitsPerSample); // wBitsPerSample
                                    // "data" chunk
        sx.Write(new char[] { 'd','a' });
        sx.Write(0); // will fill in later

        // 10 seconds of data. overblown,but it gets the job done
        const int bufferLength = samplingRate * 10;
        int samplesWrote = 0;

        Console.WriteLine($"Recording from: {recorders[0]}");

        using (var audioCapture = new AudioCapture(
            recorders[0],samplesAvailable);
                for (var x = 0; x < samplesAvailable; ++x)
                {
                    sx.Write(buffer[x]);
                }

                samplesWrote += samplesAvailable;

                Console.WriteLine($"Wrote {samplesAvailable}/{samplesWrote} samples...");
            }
            audioCapture.Stop();
        }

        sx.Seek(4,SeekOrigin.Begin); // seek to overall size
        sx.Write(36 + samplesWrote * (bitsPerSample / 8) * numChannels);
        sx.Seek(40,SeekOrigin.Begin); // seek to data size position
        sx.Write(samplesWrote * (bitsPerSample / 8) * numChannels);
    }
});

await Task.WhenAll(task1,task2);

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。