ARI External Media Channel creating infinite loops and multiple WebSocket connection

I’m experiencing an issue with Asterisk ARI where a single call creates multiple channels and WebSocket connections. Here’s what’s happening:

  1. When a call starts, it triggers multiple StasisStart events
  2. Each event creates new bridges and external media channels
  3. Multiple WebSocket connections are established instead of a single connection
  4. Eventually crashes with “Bridge not found” error

Current setup:

  • Asterisk ARI with external media channel for voice processing
  • WebSocket connection to a cloud service
  • Node.js client using ari-client library

Console output shows multiple channels being created:

Call started on channel 1737806957.179
Created mixing bridge: 27e257e6-be4b-48b1-979a-f73f066fb54b
Call started on channel 1737806961.180
Created mixing bridge: 7c70aeb6-d94a-4639-a13b-b5dd33cd58e4
[continues creating more channels...]

I’ve tried:

  1. Adding channel tracking with Set()
  2. Using flags to prevent duplicate processing
  3. Managing external media state in CallHandler class

Looking for help to:

  1. Understand why multiple channels are being created
  2. Fix the infinite loop issue
  3. Maintain a single WebSocket connection per call
  4. Properly handle external media setup

Has anyone encountered similar issues or can suggest a proper way to handle external media channels with ARI?

You have shown no logging or talked about how calls are getting into Asterisk. If Asterisk is actually receiving multiple calls, then this would happen - so that is the first thing to verify and isolate.

Thanks for your reply @jcolp
But I am the only one who made the call. please check the code and let me know if I am doing anything wrong.

const ari = require('ari-client');
const WebSocket = require('ws');
const express = require('express');
const app = express();
const fs = require('fs').promises;
const path = require('path');

const ariConfig = {
    url: 'http://localhost:8088',
    username: 'admin',
    password: 'Admin123'
};

const CLOUD_WS_URL = 'wss://api-dev.precallai.com/connection';
const activeCalls = new Map();

class CallHandler {
    constructor(channelId, channel, client, bridge) {
        this.channelId = channelId;
        this.channel = channel;
        this.client = client;
        this.bridge = bridge;
        this.snoopChannel = null;
        this.externalMediaChannel = null;
        this.cloudWs = null;
        this.isActive = true;
    }

    async connectToCloud() {
        if (!this.isActive) return;

        return new Promise((resolve, reject) => {
            this.cloudWs = new WebSocket(CLOUD_WS_URL);
            
            this.cloudWs.on('open', () => {
                console.log(`Connected to cloud service for channel ${this.channelId}`);
                this.sendInitialMessage();
                resolve();
            });

            this.cloudWs.on('message', async (data) => {
                await this.handleCloudMessage(data);
            });

            this.cloudWs.on('error', (error) => {
                console.error('Cloud WebSocket error:', error);
                reject(error);
            });

            this.cloudWs.on('close', () => {
                console.log('Cloud WebSocket closed');
                if (this.isActive) {
                    this.cleanup();
                }
            });
        });
    }

    async initializeCall() {
        try {
            // Create snoop channel to capture caller's audio
            this.snoopChannel = await this.client.channels.snoopChannel({
                channelId: this.channelId,
                app: 'voicebot',
                spy: 'in',
                whisper: 'none',
                snoopId: `snoop_${this.channelId}`
            });

            // Create external media for processing audio
            this.externalMediaChannel = await this.channel.externalMedia({
                app: 'voicebot',
                external_host: '127.0.0.1:3001',
                format: 'ulaw'
            });

            // Add channels to bridge
            await this.bridge.addChannel({ channel: this.channel.id });
            await this.bridge.addChannel({ channel: this.externalMediaChannel.id });

            // Connect to cloud service
            await this.connectToCloud();

            // Start recording for debug purposes
            await this.snoopChannel.record({
                name: `debug_${this.channelId}`,
                format: 'wav',
                beep: false,
                maxDurationSeconds: 3600,
                ifExists: 'overwrite'
            });

        } catch (error) {
            console.error('Call initialization error:', error);
            await this.cleanup();
            throw error;
        }
    }

    async sendInitialMessage() {
        const startMessage = {
            event: 'start',
            channelId: this.channelId,
            caller: this.channel.caller.number,
            callee: this.channel.dialplan.exten,
            start: {
                customParameters: {
                    temp_call_history_id: '1599',
                    language: 'en',
                    voice_modal: 'azure',
                    start_speech: encodeURIComponent('Hello How are you?How can I help you?'),
                    voice_name: 'en-AU-KenNeural',
                    is_back_sound: 'false',
                    voice_speed: '1.0',
                    recorded_audio: null
                }
            }
        };
        
        this.cloudWs.send(JSON.stringify(startMessage));
    }

    async handleCloudMessage(data) {
        try {
            const message = JSON.parse(data);
            
            if (message.event === 'media' && message.media?.payload) {
                const audioBuffer = Buffer.from(message.media.payload, 'base64');
                const filePath = path.join(__dirname, 'uploads', `audio_${Date.now()}`);
                
                await this.playAudioToChannel(audioBuffer, filePath);
            }
        } catch (error) {
            console.error('Error handling cloud message:', error);
        }
    }

    async playAudioToChannel(audioBuffer, filePath) {
        try {
            const wavFile = await this.createWavFile(audioBuffer);
            await fs.writeFile(filePath + '.wav', wavFile);
            
            await this.channel.play({
                media: `sound:${filePath}`,
                format: 'wav'
            });
        } catch (error) {
            console.error('Error playing audio:', error);
        }
    }

    createWavFile(audioBuffer) {
        const header = Buffer.alloc(44);
        
        header.write('RIFF', 0);
        header.writeUInt32LE(audioBuffer.length + 36, 4);
        header.write('WAVE', 8);
        header.write('fmt ', 12);
        header.writeUInt32LE(16, 16);
        header.writeUInt16LE(1, 20);      // PCM
        header.writeUInt16LE(1, 22);      // Mono
        header.writeUInt32LE(8000, 24);   // Sample rate
        header.writeUInt32LE(16000, 28);  // Byte rate
        header.writeUInt16LE(2, 32);      // Block align
        header.writeUInt16LE(16, 34);     // Bits per sample
        header.write('data', 36);
        header.writeUInt32LE(audioBuffer.length, 40);

        return Buffer.concat([header, audioBuffer]);
    }

    async cleanup() {
        this.isActive = false;
        
        if (this.cloudWs) {
            this.cloudWs.close();
            this.cloudWs = null;
        }

        if (this.snoopChannel) {
            await this.snoopChannel.hangup();
            this.snoopChannel = null;
        }

        if (this.externalMediaChannel) {
            await this.externalMediaChannel.hangup();
            this.externalMediaChannel = null;
        }
    }
}

async function initializeARI() {
    try {
        const client = await ari.connect(ariConfig.url, ariConfig.username, ariConfig.password);
        console.log('Connected to Asterisk ARI');

        client.on('StasisStart', async (event, channel) => {
            if (activeCalls.has(channel.id)) return;
            
            try {
                const bridge = await client.Bridge();
                await bridge.create({ type: 'mixing' });

                const handler = new CallHandler(channel.id, channel, client, bridge);
                activeCalls.set(channel.id, handler);
                await handler.initializeCall();
            } catch (error) {
                console.error('StasisStart error:', error);
                channel.hangup().catch(console.error);
            }
        });

        client.on('StasisEnd', async (event, channel) => {
            const handler = activeCalls.get(channel.id);
            if (handler) {
                await handler.cleanup();
                activeCalls.delete(channel.id);
            }
        });

        client.start('voicebot');

    } catch (error) {
        console.error('Error connecting to ARI:', error);
        process.exit(1);
    }
}

// Express routes
app.use(express.json());

app.post('/voicebotConnector/trigger', async (req, res) => {
    const { destinationNumber, displayNumber } = req.body;

    try {
        const client = await ari.connect(ariConfig.url, ariConfig.username, ariConfig.password);
        
        const channel = await client.channels.originate({
            endpoint: `PJSIP/${destinationNumber}@jio-trunk`,
            callerId: displayNumber || '+917313541333',
            app: 'voicebot',
            appArgs: 'outbound',
            context: 'outbound',
            priority: 1
        });

        res.json({
            status: "success",
            callUID: channel.id
        });

    } catch (error) {
        console.error('Error making outbound call:', error);
        res.status(500).json({
            status: "failed",
            message: error.message
        });
    }
});

app.post('/voicebotConnector/hangup', async (req, res) => {
    const { callUID } = req.body;

    try {
        const callHandler = activeCalls.get(callUID);
        if (callHandler) {
            await callHandler.cleanup();
            res.json({ status: "success" });
        } else {
            res.status(404).json({ status: "failed", message: "Call not found" });
        }
    } catch (error) {
        res.status(500).json({ status: "failed", message: error.message });
    }
});

const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
    console.log(`Server listening on port ${PORT}`);
    initializeARI();
});

process.on('SIGTERM', () => {
    console.log('Shutting down...');
    activeCalls.forEach(handler => handler.cleanup());
    process.exit(0);
});```

How are you preventing the external media and snoop channels from triggering this logic? When creating them you specify this ARI application. You have an activeCalls check, but those channels are never added to it.

Thank @jcolp.

I solved the problem and it works perfectly, now I need help in recording so should I need to use snoopChannel or can I do it withought it.

Please check the function and logs below:

        try {
            // Generate recording name
            const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
            const recordingName = `call_${this.channelId}_${timestamp}`;

            console.log(`Starting recording for call ${this.channelId}`);

            // Get the bridge ID for direct access
            const bridgeId = this.bridge.id;
            
            // Create recording directory
            const recordingDir = '/var/lib/asterisk/recordings/voicebot-calls';
            await fs.mkdir(recordingDir, { recursive: true });
            await fs.chmod(recordingDir, 0o777).catch(() => {});

            // Start recording directly on the bridge
            this.recording = await this.client.bridges.record({
                bridgeId: bridgeId,
                name: recordingName,
                format: 'wav',
                maxDurationSeconds: 14400,
                maxSilenceSeconds: 0,
                beep: false,
                terminateOn: 'none',
                ifExists: 'overwrite'
            });

            // Store recording details
            this.recordingName = recordingName;
            this.recordingPath = `/var/lib/asterisk/recordings/${recordingName}.wav`;

            console.log(`Recording started for bridge ${bridgeId}`);
            console.log(`Recording name: ${recordingName}`);
            console.log(`Expected path: ${this.recordingPath}`);

            // Set up event handler for recordings
            this.client.on('RecordingFinished', async (event) => {
                if (event.recording.name === recordingName) {
                    console.log(`Recording finished: ${recordingName}`);
                    try {
                        // Wait a moment for file system
                        await new Promise(resolve => setTimeout(resolve, 1000));

                        // Check recording exists
                        await fs.access(this.recordingPath);
                        console.log(`Recording found at: ${this.recordingPath}`);

                        // Move to our directory
                        const finalPath = path.join(recordingDir, `${recordingName}.wav`);
                        await fs.copyFile(this.recordingPath, finalPath);
                        console.log(`Recording copied to: ${finalPath}`);
                        
                        // Set permissions
                        await fs.chmod(finalPath, 0o666).catch(() => {});

                        // Clean up original if needed
                        await fs.unlink(this.recordingPath).catch(() => {});
                    } catch (error) {
                        console.error(`Error processing recording: ${error.message}`);
                        // Check alternative locations
                        const locations = [
                            '/var/spool/asterisk/monitor',
                            '/var/lib/asterisk/recordings',
                            '/var/spool/asterisk/recording',
                            '/var/lib/asterisk'
                        ];

                        for (const dir of locations) {
                            try {
                                const files = await fs.readdir(dir);
                                const matches = files.filter(f => f.includes(recordingName));
                                if (matches.length > 0) {
                                    console.log(`Found recording in ${dir}:`, matches);
                                    // Try to move the first match
                                    const sourcePath = path.join(dir, matches[0]);
                                    const finalPath = path.join(recordingDir, matches[0]);
                                    await fs.copyFile(sourcePath, finalPath);
                                    console.log(`Moved recording from ${sourcePath} to ${finalPath}`);
                                    break;
                                }
                            } catch (e) {
                                console.log(`Could not check directory ${dir}: ${e.message}`);
                            }
                        }
                    }
                }
            });

            return this.recordingPath;

        } catch (error) {
            console.error(`Error starting recording for ${this.channelId}:`, error);
            throw error;
        }
    }

    async stopRecording() {
        if (this.recording) {
            try {
                await this.client.recordings.stop({
                    recordingName: this.recordingName
                });
                console.log(`Recording stopped: ${this.recordingName}`);
            } catch (error) {
                console.log(`Note: Could not stop recording - it may have already been stopped`);
            }
        }

logs are

Processing new channel 1737968296.22
Initializing call for channel 1737968296.22
Skipping already processed or external channel external_1737968296.22
Starting recording for call 1737968296.22
Recording started for bridge f0a46b35-1180-4e82-b73d-2efb36a1088f
Recording name: call_1737968296.22_2025-01-27T08-58-20-852Z
Expected path: /var/lib/asterisk/recordings/call_1737968296.22_2025-01-27T08-58-20-852Z.wav
WebSocket connected for channel 1737968296.22
Registering channel 1737968296.22 for RTP handling
Call initialization complete for 1737968296.22
Playback finished
Playback finished
Playback finished
Playback finished
Starting cleanup for 1737968296.22
Recording finished: call_1737968296.22_2025-01-27T08-58-20-852Z
Recording stopped: call_1737968296.22_2025-01-27T08-58-20-852Z
Note: Could not hangup channel 1737968296.22 - it may have already been destroyed
Unregistering channel 1737968296.22 from RTP handling
WebSocket closed for 1737968296.22
Unregistering channel 1737968296.22 from RTP handling
Error processing recording: ENOENT: no such file or directory, access '/var/lib/asterisk/recordings/call_1737968296.22_2025-01-27T08-58-20-852Z.wav'
Found recording in /var/spool/asterisk/recording: [ 'call_1737968296.22_2025-01-27T08-58-20-852Z.wav' ]
Moved recording from /var/spool/asterisk/recording/call_1737968296.22_2025-01-27T08-58-20-852Z.wav to /var/lib/asterisk/recordings/voicebot-calls/call_1737968296.22_2025-01-27T08-58-20-852Z.wav

can you tell where I made mistake ?

also in previous code ai voice is getting blurry can you tell me if I am doing something wrong with heders ?

I’m not going to answer that part, because this same ask is being posted by multiple people at least once or twice a week and so at this point there have been multiple past threads going through it.

@jcolp I have manage to fix this but the call is recording on one side only like it is not recording both the side, can you help me with that

        try {
            const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
            const recordingName = `call_${this.channelId}_${timestamp}`;

            console.log(`Starting recording for call ${this.channelId}`);

            // Ensure directories exist
            await fs.mkdir(this.recordingBaseDir, { recursive: true });
            await fs.mkdir(this.recordingsDir, { recursive: true });
            
            // Set permissions
            await fs.chmod(this.recordingsDir, 0o777).catch(console.error);

            // Start recording with MixMonitor
            this.recording = await this.bridge.record({
                name: recordingName,
                format: 'ulaw',
                maxDurationSeconds: 14400,
                maxSilenceSeconds: 0,
                beep: false,
                terminateOn: 'none',
                ifExists: 'overwrite',
                options: {
                    MIXMON_FORMAT: 'ulaw',
                    MIXMON_DIR: this.recordingBaseDir,
                    AUDIOHOOK_INHERIT_LEVEL: '1',
                    MIXMON_POST: '/usr/local/bin/convert_recording.sh ${FILENAME}'
                }
            });

            // Store recording details
            this.recordingName = recordingName;
            this.recordingPath = `${this.recordingBaseDir}/${recordingName}.ulaw`;
            this.finalRecordingPath = `${this.recordingsDir}/${recordingName}.wav`;

            console.log(`Recording started with name: ${recordingName}`);
            console.log(`Temp path: ${this.recordingPath}`);
            console.log(`Final path: ${this.finalRecordingPath}`);

            // Handle recording finished event
            this.client.on('RecordingFinished', async (event) => {
                if (event.recording.name === recordingName) {
                    await this.handleRecordingFinished(event);
                }
            });

        } catch (error) {
            console.error(`Error starting recording: ${error}`);
            throw error;
        }
    }

    async handleRecordingFinished(event) {
        try {
            console.log(`Recording finished: ${this.recordingName}`);
            
            // Wait for file system
            await new Promise(resolve => setTimeout(resolve, 2000));

            if (await this.fileExists(this.recordingPath)) {
                // Convert ulaw to wav
                await this.convertUlawToWav(this.recordingPath, this.finalRecordingPath);
                console.log(`Recording converted: ${this.finalRecordingPath}`);
                
                // Set permissions
                await fs.chmod(this.finalRecordingPath, 0o666);
                
                // Clean up ulaw file
                await fs.unlink(this.recordingPath);
            } else {
                console.log(`Recording not found at: ${this.recordingPath}`);
                // Search alternative locations
                const locations = [
                    '/var/spool/asterisk/recording',
                    '/var/spool/asterisk/monitor',
                    '/var/lib/asterisk/recordings'
                ];

                for (const dir of locations) {
                    const foundPath = await this.findRecordingInDir(dir, this.recordingName);
                    if (foundPath) {
                        await this.convertUlawToWav(foundPath, this.finalRecordingPath);
                        console.log(`Recording found and converted from: ${foundPath}`);
                        await fs.chmod(this.finalRecordingPath, 0o666);
                        await fs.unlink(foundPath);
                        break;
                    }
                }
            }
        } catch (error) {
            console.error(`Error processing recording: ${error.message}`);
        }
    }
    
    // New method to convert µ-law to WAV
    async convertUlawToWav(ulawPath, wavPath) {
        try {
            // Read the µ-law file
            const ulawData = await fs.readFile(ulawPath);
            
            // Convert µ-law to PCM using the lookup table
            const pcmSamples = new Int16Array(ulawData.length);
            for (let i = 0; i < ulawData.length; i++) {
                pcmSamples[i] = MULAW_DECODE_TABLE[ulawData[i]];
            }
    
            // Create buffer for PCM data
            const pcmBuffer = Buffer.alloc(pcmSamples.length * 2);
            for (let i = 0; i < pcmSamples.length; i++) {
                pcmBuffer.writeInt16LE(pcmSamples[i], i * 2);
            }
    
            // Create WAV header
            const header = this.createWavHeader(pcmBuffer.length);
    
            // Write the complete WAV file
            await fs.writeFile(wavPath, Buffer.concat([header, pcmBuffer]));
            
            console.log(`Successfully converted ${ulawPath} to ${wavPath}`);
        } catch (error) {
            console.error(`Error converting ulaw to wav: ${error.message}`);
            throw error;
        }
    }

    // Helper method to check if file exists
    async fileExists(path) {
        try {
            await fs.access(path);
            return true;
        } catch {
            return false;
        }
    }

    // Helper method to find recording in directory
    async findRecordingInDir(dir, recordingName) {
        try {
            const files = await fs.readdir(dir);
            const match = files.find(f => f.includes(recordingName));
            return match ? `${dir}/${match}` : null;
        } catch {
            return null;
        }
    }

    async stopRecording() {
        if (this.recording) {
            try {
                await this.client.recordings.stop({
                    recordingName: this.recordingName
                });
                console.log(`Recording stopped: ${this.recordingName}`);
            } catch (error) {
                console.log(`Note: Could not stop recording - it may have already been stopped`);
            }
        }
    }

Recording both sides of… what?

actually it only records the one side of call like user voice it is not recording the AI voice

It will record what is sent to the external media channel. If nothing is sent, nothing will be recorded.

If you play back audio to a channel using sound playback, it won’t be recorded. It has to be played to the bridge, and I can’t recall if that is even recorded (I believe it is).

it is running through the external media only

async initializeCall() {
        if (this.initialized) {
            console.log(`Call ${this.channelId} already initialized`);
            return;
        }

        try {
            console.log(`Initializing call for channel ${this.channelId}`);

            // Add the caller channel to bridge first
            await this.bridge.addChannel({
                channel: this.channel.id,
                role: 'participant'
            });

            // Create external media channel
            const externalChannelId = `external_${this.channelId}`;
            this.externalMediaChannel = await this.client.channels.externalMedia({
                app: 'voicebot',
                external_host: '127.0.0.1:3001',
                format: 'ulaw',
                channelId: externalChannelId,
                variables: {
                    MIXMON_FORMAT: 'ulaw',
                    AUDIOHOOK_INHERIT_LEVEL: '1',
                    MIXMON_DIR: '/var/spool/asterisk/recording'
                }
            });

            // Add external media channel to bridge
            await this.bridge.addChannel({
                channel: this.externalMediaChannel.id,
                role: 'participant'
            });

            // Start recording after bridge setup
            await this.startRecording();

            // Connect to cloud service
            await this.connectToCloud();
            
            this.initialized = true;
            console.log(`Call initialization complete for ${this.channelId}`);

        } catch (error) {
            console.error(`Error in initializeCall for ${this.channelId}:`, error);
            await this.cleanup();
            throw error;
        }
    }
async function initializeARI() {
    try {
        const client = await ari.connect(ariConfig.url, ariConfig.username, ariConfig.password);
        console.log('Connected to Asterisk ARI');
        const processedChannels = new Set();

        client.on('StasisStart', async (event, channel) => {
            const channelId = channel.id;
            const args = event.args || [];  // Get the arguments passed to Stasis

            // Skip if we've already processed this channel or if it's an external media channel
            if (processedChannels.has(channelId) || channelId.startsWith('external_')) {
                console.log(`Skipping already processed or external channel ${channelId}`);
                return;
            }

            // Mark this channel as processed
            processedChannels.add(channelId);
            
            console.log(`Processing new channel ${channelId}`);

            if (activeCalls.has(channel.id)) {
                console.log(`Channel ${channel.id} already being handled`);
                return;
            }

            if (args.includes('transfer')) {
                console.log(`Channel ${channelId} is a transfer channel, skipping voicebot initialization`);
                return;
            }

            if (!activeCalls.has(channel.id)) {
                try {
                    // Create bridge
                    const bridge = await client.Bridge();
                    await bridge.create({ 
                        type: 'mixing,proxy_media',
                        name: `voicebot_${channelId}`
                    });

                    // Create handler
                    const handler = new CallHandler(channelId, channel, client, bridge);
                    activeCalls.set(channelId, handler);
                    
                    // Initialize the call
                    await handler.initializeCall();
                } catch (error) {
                    console.error('Error initializing call:', error);
                    await channel.hangup().catch(console.error);
                }
            }
        });

        client.on('StasisEnd', async (event, channel) => {
            const handler = activeCalls.get(channel.id);
            if (handler) {
                await handler.cleanup();
                activeCalls.delete(channel.id);
            }
        });

        client.start('voicebot');

    } catch (error) {
        console.error('Error connecting to ARI:', error);
        process.exit(1);
    }
}

According to your past code, no it’s not.

The code is downloading a file or something from the websocket, storing it in a wav file, and then playing it back. That is not sending it as a stream to the external media.

How can I manage that do we have any example for it?

There’s been threads. I suggest searching and reading and experimenting.

Hello @jcolp
May I know is there any other way to play the audio without saving the file?

That would be external media with an RTP stream into Asterisk from the ARI application.

so do we need to run 2 RTP streams one for listen and one for send, or both will work with single RTP stream?

It’s a single external media channel, and a stream in each direction. That’s how RTP works.

okay, thanks

Hello @jcolp

I want trying to do with multiple external channels I can see the package is going properly but I can’t listen it on mobile side but when I play from file it works fine, can you please check the code and help me if I am getting anything wrong, I worked whole night yesterday I am really tired now and sorry to bother you.

await this.bridge.addChannel({
                channel: this.channel.id,
                role: 'participant'
            });


            // Create external media channel
            const externalChannelId = `inbound_external_${this.channelId}`;
            this.externalMediaChannel = await this.client.channels.externalMedia({
                app: 'voicebot',
                external_host: '127.0.0.1:3001',
                format: 'ulaw',
                channelId: externalChannelId
            });

            // Add external media channel to bridge
            await this.bridge.addChannel({
                channel: this.externalMediaChannel.id,
                role: 'participant'
            });


// Create external media channel
            const externalVChannelId = `outbound_external_${this.channelId}`;
            this.externalVMediaChannel = await this.client.channels.externalMedia({
                app: 'voicebot',
                external_host: '127.0.0.1:10001',
                format: 'ulaw',
                channelId: externalVChannelId
            });

            await this.externalVMediaChannel.setChannelVar({
                variable: 'CHANNEL(rtp_use_dynamic_payload)',
                value: '0'
            });
 await this.bridge.addChannel({
                channel: this.externalVMediaChannel.id,
                role: 'participant'
            });

in this code 3001 is working but 10001 is not working my rtp.conf is

  GNU nano 7.2                                                                                                /etc/asterisk/rtp.conf                                                                                                         
[general]
rtpstart=10000
rtpend=20000
rtpchecksums=no
strictrtp=yes        ; Changed to yes for security
icesupport=no
stunaddr=
dtls_learning=no
rtptimeout=60        ; Add timeout values
rtpholdtimeout=300
rtpkeepalive=15

here is the full code of RTP Handler

const dgram = require('dgram');
const WebSocket = require('ws');

const MULAW_DECODE_TABLE = new Int16Array([
    -32124, -31100, -30076, -29052, -28028, -27004, -25980, -24956,
    -23932, -22908, -21884, -20860, -19836, -18812, -17788, -16764,
    -15996, -15484, -14972, -14460, -13948, -13436, -12924, -12412,
    -11900, -11388, -10876, -10364, -9852, -9340, -8828, -8316,
    -7932, -7676, -7420, -7164, -6908, -6652, -6396, -6140,
    -5884, -5628, -5372, -5116, -4860, -4604, -4348, -4092,
    -3900, -3772, -3644, -3516, -3388, -3260, -3132, -3004,
    -2876, -2748, -2620, -2492, -2364, -2236, -2108, -1980,
    -1884, -1820, -1756, -1692, -1628, -1564, -1500, -1436,
    -1372, -1308, -1244, -1180, -1116, -1052, -988, -924,
    -876, -844, -812, -780, -748, -716, -684, -652,
    -620, -588, -556, -524, -492, -460, -428, -396,
    -372, -356, -340, -324, -308, -292, -276, -260,
    -244, -228, -212, -196, -180, -164, -148, -132,
    -120, -112, -104, -96, -88, -80, -72, -64,
    -56, -48, -40, -32, -24, -16, -8, 0,
    32124, 31100, 30076, 29052, 28028, 27004, 25980, 24956,
    23932, 22908, 21884, 20860, 19836, 18812, 17788, 16764,
    15996, 15484, 14972, 14460, 13948, 13436, 12924, 12412,
    11900, 11388, 10876, 10364, 9852, 9340, 8828, 8316,
    7932, 7676, 7420, 7164, 6908, 6652, 6396, 6140,
    5884, 5628, 5372, 5116, 4860, 4604, 4348, 4092,
    3900, 3772, 3644, 3516, 3388, 3260, 3132, 3004,
    2876, 2748, 2620, 2492, 2364, 2236, 2108, 1980,
    1884, 1820, 1756, 1692, 1628, 1564, 1500, 1436,
    1372, 1308, 1244, 1180, 1116, 1052, 988, 924,
    876, 844, 812, 780, 748, 716, 684, 652,
    620, 588, 556, 524, 492, 460, 428, 396,
    372, 356, 340, 324, 308, 292, 276, 260,
    244, 228, 212, 196, 180, 164, 148, 132,
    120, 112, 104, 96, 88, 80, 72, 64,
    56, 48, 40, 32, 24, 16, 8, 0
]);


class RTPHandler {
    constructor() {
        this.server = dgram.createSocket('udp4');
        this.server1 = dgram.createSocket('udp4');
        this.activeChannels = new Map(); // Map to store channel-websocket pairs
        this.streamSid = null;
        this.sequenceNumber = 0;
        this.startTime = null;  // Add this
        this.setupRTPServer();
    }

    setStreamSid(streamSid) {
        this.streamSid = streamSid;
    }

    setupRTPServer() {
        this.server.on('listening', () => {
            const address = this.server.address();
            console.log(`RTP server listening on ${address.address}:${address.port}`);
        });

        this.server.on('message', (msg, rinfo) => {
            // console.log(`Received RTP packet from ${rinfo.address}:${rinfo.port}`);
            this.handleRTPPacket(msg);
        });

        this.server.bind(3001);

        this.server1.on('listening', () => {
            const address = this.server1.address();
            console.log(`RTP server listening on ${address.address}:${address.port}`);
        });

        this.server1.on('message', (msg, rinfo) => {
            // console.log(`Received RTP packet from ${rinfo.address}:${rinfo.port}`);
            // this.handleRTPPacket(msg);
        });

        this.server1.bind(10001);
    }

    handleRTPPacket(rtpPacket) {
        try {
            // Extract RTP header (first 12 bytes)
            const rtpVersion = (rtpPacket[0] >> 6) & 0x03;
            const payloadType = rtpPacket[1] & 0x7f;
            const sequenceNumber = (rtpPacket[2] << 8) | rtpPacket[3];
            const timestamp = (rtpPacket[4] << 24) | (rtpPacket[5] << 16) | 
                            (rtpPacket[6] << 8) | rtpPacket[7];

            // Extract payload (audio data) - skip RTP header
            const payload = rtpPacket.slice(12);

            // Send to all active WebSocket connections
            this.activeChannels.forEach((ws, channelId) => {
                if (ws.readyState === WebSocket.OPEN) {
                    const message = {
                        event: 'media',
                        channelId: channelId,
                        streamSid: this.streamSid,
                        media: {
                            format: 'ulaw',
                            timestamp: timestamp,
                            sequenceNumber: sequenceNumber,
                            payload: payload.toString('base64')
                        }
                    };
                    ws.send(JSON.stringify(message));
                }
            });

        } catch (error) {
            console.error('Error processing RTP packet:', error);
        }
    }
    
    async sendAudio(channelId, audioPayload) {
        const channelInfo = this.activeChannels.get(channelId);
        if (!channelInfo) {
            console.error(`No channel info found for ${channelId}`);
            return;
        }
    
        // Get RTP port from channel info
        const rtpPort = channelInfo.rtpPort || 10001; // Default to start of Asterisk RTP range
        const rtpHost = channelInfo.rtpHost || '127.0.0.1';
    
        try {
            let audioBuffer;
            audioBuffer = Buffer.from(audioPayload, 'base64');
            const CHUNK_SIZE = 160;
            let packetsReceived = 0;
    
            console.log('Starting RTP transmission:', {
                totalSize: audioBuffer.length,
                numberOfChunks: Math.ceil(audioBuffer.length / CHUNK_SIZE),
                rtpPort,
                rtpHost
            });
    
            for (let offset = 0; offset < audioBuffer.length; offset += CHUNK_SIZE) {
                let chunk = audioBuffer.slice(offset, offset + CHUNK_SIZE);
                
                if (chunk.length < CHUNK_SIZE) {
                    const padding = Buffer.alloc(CHUNK_SIZE - chunk.length, 0x7F);
                    chunk = Buffer.concat([chunk, padding]);
                }
    
                const rtpHeader = Buffer.alloc(12);
                rtpHeader[0] = 0x80;
                rtpHeader[1] = 0x00;
                rtpHeader.writeUInt16BE(channelInfo.sequenceNumber, 2);
                rtpHeader.writeUInt32BE(channelInfo.timestamp, 4);
                rtpHeader.writeUInt32BE(channelInfo.ssrc, 8);
    
                const rtpPacket = Buffer.concat([rtpHeader, chunk]);
    
                const sendPromise = new Promise((resolve, reject) => {
                    this.server1.send(rtpPacket, 0, rtpPacket.length, rtpPort, rtpHost, (err) => {
                        if (err) {
                            console.error('Failed to send RTP packet:', err);
                            reject(err);
                        } else {
                            packetsReceived++;
                            if (channelInfo.sequenceNumber % 5 === 0) {
                                console.log('RTP packet sent:', {
                                    sequenceNumber: channelInfo.sequenceNumber,
                                    timestamp: channelInfo.timestamp,
                                    packetSize: rtpPacket.length,
                                    rtpPort,
                                    rtpHost,
                                    firstPayloadByte: chunk[0],
                                    lastPayloadByte: chunk[chunk.length - 1]
                                });
                            }
                            resolve();
                        }
                    });
                });
    
                await Promise.all([
                    sendPromise,
                    new Promise(resolve => setTimeout(resolve, 20))
                ]);
    
                channelInfo.sequenceNumber = (channelInfo.sequenceNumber + 1) & 0xFFFF;
                channelInfo.timestamp = (channelInfo.timestamp + CHUNK_SIZE) & 0xFFFFFFFF;
            }
    
            console.log('RTP transmission complete:', {
                packetsReceived,
                expectedPackets: Math.ceil(audioBuffer.length / CHUNK_SIZE),
                finalSequenceNumber: channelInfo.sequenceNumber,
                finalTimestamp: channelInfo.timestamp,
                rtpPort,
                rtpHost
            });
    
        } catch (error) {
            console.error(`Error in sendAudio:`, error);
        }
    }

    // Method to register a new channel and its WebSocket
    registerChannel(channelId, webSocket) {
        console.log(`Registering channel ${channelId} for RTP handling`);
        this.activeChannels.set(channelId, webSocket);
    }

    // Method to unregister a channel
    unregisterChannel(channelId) {
        console.log(`Unregistering channel ${channelId} from RTP handling`);
        this.activeChannels.delete(channelId);
        // this.cleanup();
    }

    // Cleanup method
    cleanup() {
        try{
            this.server.close();
            this.server1.close();
        } catch(err) {
            console.log("RTP Cleanup error", err);
        }
        this.activeChannels.clear();
    }
}

module.exports = RTPHandler;

I am generating audio like this

const response = await fetch(
                    `https://api.deepgram.com/v1/speak?model=${voice_name.toLowerCase()}&encoding=mulaw&sample_rate=8000&container=none`,
                    {
                        method: "POST",
                        headers: {
                            Authorization: `Token ${process.env.DEEPGRAM_API_KEY}`,
                            "Content-Type": "application/json"
                        },
                        body: JSON.stringify({
                            text: cleanString(partialResponse)
                        }),
                        encoding: "mulaw"
                    }
                );

                const audioArrayBuffer = await response.arrayBuffer();
                this.emit(
                    "speech",
                    partialResponseIndex,
                    Buffer.from(audioArrayBuffer).toString("base64"),
                    partialResponse,
                    interactionCount
                );

the same audio is working fine with Twilio but not with asterisk external media.