Csharp 回声消除 Speex

如何解决Csharp 回声消除 Speex

我想使用 naudio 通过网络与 csharp 进行语音对话。然而,回声正在发生。我使用 libspeexdsp.dll 来避免回声,但回声仍然继续并且几乎没用。我想知道我哪里做错了。我的声音回到我身边。我计算了任何延迟。我想知道我是否应该考虑使用其他 speex 数据替代方案。

   class EchoFilter {
    [DllImport("libspeexdsp",EntryPoint = "speex_echo_state_init",CallingConvention = CallingConvention.Cdecl)]
    static extern IntPtr speex_echo_state_init(int frame_size,int filter_length);

    [DllImport("libspeexdsp",EntryPoint = "speex_echo_cancellation",CallingConvention = CallingConvention.Cdecl)]
    static extern void speex_echo_cancellation(IntPtr state,short[] inputFrame,short[] echoFrame,short[] outputFrame);

    [DllImport("libspeexdsp",EntryPoint = "speex_echo_ctl",CallingConvention = CallingConvention.Cdecl)]
    public static extern int speex_echo_ctl(IntPtr st,int id,ref int sampleRate);

    [DllImport("libspeexdsp",EntryPoint = "speex_preprocess_state_init",CallingConvention = CallingConvention.Cdecl)]
    static extern IntPtr speex_preprocess_state_init(int frame_size,int sampleRate);

    [DllImport("libspeexdsp",EntryPoint = "speex_preprocess_ctl",CallingConvention = CallingConvention.Cdecl)]
    public static extern int speex_preprocess_ctl(IntPtr state,IntPtr val);

    [DllImport("libspeexdsp",EntryPoint = "speex_preprocess_run",CallingConvention = CallingConvention.Cdecl)]
    public static extern int speex_preprocess_run(IntPtr st,EntryPoint = "speex_preprocess_state_destroy",CallingConvention = CallingConvention.Cdecl)]
    public static extern void speex_preprocess_state_destroy(IntPtr st);

    [DllImport("libspeexdsp",EntryPoint = "speex_echo_state_destroy",CallingConvention = CallingConvention.Cdecl)]
    static extern void speex_echo_state_destroy(IntPtr state);

    IntPtr st;
    IntPtr den;

    int SPEEX_ECHO_SET_SAMPLING_RATE = 24;
    int SPEEX_PREPROCESS_SET_ECHO_STATE = 24;

    /// <param name="frameSize">frameSize is the amount of data (in samples) you want to process at once.</param>
    /// <param name="filterLength">filterLength is the length (in samples) of the echo cancelling filter you want to use (also kNown as tail length).</param>
    public EchoFilter(int frameSize,int filterLength,int sampleRate) {
        st = speex_echo_state_init(frameSize,filterLength);
        den = speex_preprocess_state_init(frameSize,sampleRate);
        speex_echo_ctl(st,SPEEX_ECHO_SET_SAMPLING_RATE,ref sampleRate);
        speex_preprocess_ctl(den,SPEEX_PREPROCESS_SET_ECHO_STATE,st);
    }

    /// <summary>
    /// Method for echo cancellation
    /// </summary>
    /// <param name="inputFrame">Frame obtained from local microphone (Signal that contains echo)</param>
    /// <param name="echoFrame">Frame obtained from remote source (Source of echo)</param>
    /// <param name="outputFrame">Filtered output</param>
    public void Filter(short[] inputFrame,short[] outputFrame) {
        speex_echo_cancellation(st,inputFrame,echoFrame,outputFrame);
        speex_preprocess_run(den,outputFrame);
    }

    public void dispose() {
        dispose(true);
        GC.SuppressFinalize(this);
    }

    void dispose(bool disposing) {
        if (st != IntPtr.Zero) {
            speex_echo_state_destroy(st);
            st = IntPtr.Zero;
        }

        if (den != IntPtr.Zero) {
            speex_preprocess_state_destroy(den);
            den = IntPtr.Zero;
        }
    }
    ~EchoFilter() {
        dispose(false);
    }
}

class IMicrophone : IMMNotificationClient {
    MMDeviceEnumerator deviceEnum;
    ComboBox cbbDeviceList;

    int bufferMilliseconds = 20;
    WaveIn waveIn = null;
    BufferedWaveProvider bufferedWaveProvider;

    WaveFormat waveFormat = null;
    EventHandler<WaveInEventArgs> dataAvailable;

    public MMDevice defaultSpeaker = null;
    public MMDevice defaultMicrophone = null;


    public bool noDevice() {
        return WaveIn.DeviceCount == 0 ? true : false;
    }

    public EchoFilter filterSpeex = null;
    Queue<byte[]> playedQueue = new Queue<byte[]>();

    public IMicrophone() {
        deviceEnum = new MMDeviceEnumerator();
        deviceEnum.RegisterEndpointNotificationCallback(this);

        waveFormat = new WaveFormat(ISetting.MIC_SAMPLE_RATE,1);

        TimeSpan frameSizeTime = TimeSpan.FromMilliseconds(bufferMilliseconds);
        int frameSize = (int)Math.Ceiling(frameSizeTime.TotalSeconds * waveFormat.SampleRate);
        int filterLength = frameSize * 25;

        filterSpeex = new EchoFilter(frameSize,filterLength,ISetting.MIC_SAMPLE_RATE);
    }

    public void echoCancellation(byte[] buffer,Action<byte[]> completed) {
        short[] bufferOut = new short[buffer.Length / 2];

        lock (playedQueue) {
            if (playedQueue.Count == 0) {
                completed(buffer);
            } else {
                filterSpeex.Filter(BytesToShorts(buffer),BytesToShorts(playedQueue.First()),bufferOut);
                completed(ShortsToBytes(bufferOut));
            }
        }
    }

    public void load(ComboBox cbbDeviceList,EventHandler<WaveInEventArgs> dataAvailable) {
        defaultSpeaker = deviceEnum.GetDefaultAudioEndpoint(DataFlow.Render,Role.Multimedia);
        defaultMicrophone = deviceEnum.GetDefaultAudioEndpoint(DataFlow.Capture,Role.Multimedia);

        this.cbbDeviceList = cbbDeviceList;
        this.dataAvailable = dataAvailable;

        List<WaveInCapabilities> waveIns = new List<WaveInCapabilities>();
        for (int i = 0; i < WaveIn.DeviceCount; i++) {
            cbbDeviceList.Items.Add(WaveIn.GetCapabilities(i).ProductName);
        }

        if (WaveIn.DeviceCount > 0) {
            cbbDeviceList.Selectedindex = 0;
        }

        setMicrophoneMasterVolumeLevel(80);
    }

    public void addSamples(AudioInfo audioInfo) {
        if (!ISetting.ipAddress.Equals(audioInfo.ip)) {
            lock (playedQueue) {
                playedQueue.Enqueue(audioInfo.buffer);
                bufferedWaveProvider.AddSamples(audioInfo.buffer,audioInfo.buffer.Length);
            }
        }
    }

    public void connect() {
        stop();
        if (WaveIn.DeviceCount > 0) {
            waveIn = new WaveIn();
            waveIn.DeviceNumber = cbbDeviceList.Selectedindex;
            waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(dataAvailable);
            waveIn.WaveFormat = waveFormat;
            waveIn.BufferMilliseconds = bufferMilliseconds; // Biriktirme MS
            start();

            // Mic Play
            WaveOut waveOut = new WaveOut();
            bufferedWaveProvider = new BufferedWaveProvider(waveFormat) { discardOnBufferOverflow = true };
            waveOut.DesiredLatency = 100;
            waveOut.Volume = 1.0f;
            waveOut.Init(bufferedWaveProvider);
            waveOut.Play();
        }
    }

    public void start() {
        waveIn.StartRecording();
    }

    public void stop() {
        if (waveIn != null) {
            waveIn.dispose();
            waveIn = null;
        }
    }

   
}


[Serializable]
class AudioInfo {
    public string ip;
    public byte[] buffer;

    public AudioInfo(string ip,byte[] buffer) {
        this.ip = ip;
        this.buffer = buffer;
    }
}} 

void wave_DataAvailable(object sender,WaveInEventArgs e) {
        microphone.echoCancellation(e.Buffer,(bufferOut) => {
            if (MYInfo.info.isspeaker && MYInfo.info.startedMic) {
                byte[] bytesstream = Helper.serializetoStream(new AudioInfo(ISetting.ipAddress,bufferOut));
                foreach (UserInfo userInfo in UsersInfo.infos) { // SADECE SERVER   
                    ITCP.sendDataAsync(userInfo.ipAddress,ISetting.PORT_MIC,bytesstream);
                }
            }
        });
    }

    private void micStartListening(IAsyncResult ar) {
        ITCP.getListenData(listenerMic,ar,micStartListening,(buffer) => {
            AudioInfo audioInfo = (AudioInfo)Helper.desserialize(buffer);
            microphone.addSamples(audioInfo);

            if (MYInfo.info.isServer) {
                foreach (UserInfo userInfo in UsersInfo.infos.Where(info => info.isClient && !info.ipAddress.Equals(audioInfo.ip))) {
                    //ITCP.sendDataAsync(userInfo.ipAddress,buffer);
                }
            }
        });
    }

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。

相关推荐


Selenium Web驱动程序和Java。元素在(x,y)点处不可单击。其他元素将获得点击?
Python-如何使用点“。” 访问字典成员?
Java 字符串是不可变的。到底是什么意思?
Java中的“ final”关键字如何工作?(我仍然可以修改对象。)
“loop:”在Java代码中。这是什么,为什么要编译?
java.lang.ClassNotFoundException:sun.jdbc.odbc.JdbcOdbcDriver发生异常。为什么?
这是用Java进行XML解析的最佳库。
Java的PriorityQueue的内置迭代器不会以任何特定顺序遍历数据结构。为什么?
如何在Java中聆听按键时移动图像。
Java“Program to an interface”。这是什么意思?
Java在半透明框架/面板/组件上重新绘画。
Java“ Class.forName()”和“ Class.forName()。newInstance()”之间有什么区别?
在此环境中不提供编译器。也许是在JRE而不是JDK上运行?
Java用相同的方法在一个类中实现两个接口。哪种接口方法被覆盖?
Java 什么是Runtime.getRuntime()。totalMemory()和freeMemory()?
java.library.path中的java.lang.UnsatisfiedLinkError否*****。dll
JavaFX“位置是必需的。” 即使在同一包装中
Java 导入两个具有相同名称的类。怎么处理?
Java 是否应该在HttpServletResponse.getOutputStream()/。getWriter()上调用.close()?
Java RegEx元字符(。)和普通点?