2015-01-02 11 views
0

Wowza의 webcam.fla 예제를 기반으로 웹캠 스트리밍 앱을 운영하고 있습니다. 앱에서 플래시의 오디오 및 동영상을 코드 변환 된 Wowza 서버로 스트리밍합니다.Microphone.getMicrophone()으로 오디오 소스를 전환하는 방법 Actionscript 3

오디오 소스를 다른 시스템 오디오 소스로 변경할 수있는 기능을 추가하려고합니다. 지금까지는 모든 인터페이스를 포함하는 드롭 다운을 만들고 콜백을 처리했지만 doConnect() 함수를 사용하여 스트림을 시작 및 중지했지만 오디오 소스가 기본값으로 유지되는 것으로 보입니다.

import flash.media.*; 
import flash.geom.*; 
import flash.net.*; 
import flash.media.*;// Should this be duplicated 

var parsed:Object = root.loaderInfo.parameters; 

var nc:NetConnection = null; 
var nsPublish:NetStream = null; 
var nsPlay:NetStream = null; 
var camera:Camera = null; 
var microphone:Microphone = null; 

// Testing 
var serverName:String = "rtmp://stream-na.example.tv:1935/live"; 
var movieName:String = "streamName"; 

var flushVideoBufferTimer:Number = 0; 

// Quality settings 

var videoBitrate:Number = 200000; 
var videoQuality:Number = 80; // Quality % 

var videoWidth:Number = 640; 
var videoHeight:Number = 360; 

var videoFrameRate:Number = 30; 


//////////////// UI Functions Bellow 

import fl.controls.ComboBox; 
import fl.data.DataProvider; 

var aCb:ComboBox = new ComboBox(); 

function createAudioComboBox(sources) 
{ 

    var sourcesArray:Array = new Array(); 

    aCb.dropdownWidth = 210; 
    aCb.width = 200; 
    aCb.move(0, 365); 
    aCb.prompt = "Change Audio Source"; 
    aCb.dataProvider = new DataProvider(sourcesArray); 
    aCb.addEventListener(Event.CHANGE, changeAudioHandler); 

    addChild(aCb); 

    for (var index in sources) 
    { 
     //ExternalInterface.call("logBrowserStreaming", sources[index]); 

     aCb.addItem({ label: sources[index], data: index}); 

    } 

    function changeAudioHandler(event:Event):void 
    { 

     doConnect(); 

     //var request:URLRequest = new URLRequest(); 
     //request.url = ComboBox(event.target).selectedItem.data; 
     //navigateToURL(request); 
     //aCb.selectedIndex = -1; 
     var audioSource = ComboBox(event.target).selectedItem.data; 

     //microphone:Microphone = null; 

     microphone = Microphone.getMicrophone(audioSource); 

     microphone.rate = 16; 
     microphone.codec = SoundCodec.SPEEX; 
     microphone.encodeQuality = 10; // This is shit!! offer better audio in native app? 
     microphone.setSilenceLevel(0, -1); 
     microphone.setUseEchoSuppression(true); 

     //ExternalInterface.call("logBrowserStreaming", audioSource); 

     // Trigger restart camera... 
     //startCamera(); // Nope 

     doConnect(); 

    } 

} 

//////////////// Core Streaming Functions Bellow 


function startCamera() 
{ 
    // get the default Flash camera and microphone 
    camera = Camera.getCamera(); 
    microphone = Microphone.getMicrophone(); 

    // here are all the quality and performance settings 
    // here are all the quality and performance settings 
    if (camera != null) 
    { 
     //camera.setMode(1280, 720, 30, false); 
     camera.setMode(videoWidth, videoHeight, videoFrameRate, false); // false gives framerate priority apparently?? http://www.flash-communications.net/technotes/setMode/index.html 
     camera.setQuality(videoBitrate, videoQuality); 
     // Max 800kbps; 
     camera.setKeyFrameInterval(2); 

     // List audio sources names 
     // sourceVideoLabel.text += Camera.names; 

     // Create audio sources dropdown 

     // Hide video sources for now... 
     //createVideoComboBox(Camera.names); 


    } 
    else 
    { 
     sourceVideoLabel.text = "No Camera Found\n"; 
    } 
    if (microphone != null) 
    { 
     microphone.rate = 16; 
     microphone.codec = SoundCodec.SPEEX; 
     microphone.encodeQuality = 10; // This is shit!! offer better audio in native app? 
     microphone.setSilenceLevel(0, -1); 
     microphone.setUseEchoSuppression(true); 

     // List audio sources names; 
     // sourceVideoLabel.text += Microphone.names; 

     // Create audio sources dropdown 
     createAudioComboBox(Microphone.names); 

     // Don't show audio slider for now... 
     // createAudioSlider(); 


     // Don't monitor audio level for now... 
     //monitorAudioLevel(); 

    } 
    else 
    { 
     sourceVideoLabel.text += "No Microphone Found\n"; 
    } 

    nameStr.text = movieName; 
    AppendCheckbox.selected = false; 
    connect.connectStr.text = serverName; 
    connect.connectButton.addEventListener(MouseEvent.CLICK, doConnect); 

    //enablePlayControls(false); 

    doConnect(); 
} 

function ncOnStatus(infoObject:NetStatusEvent) 
{ 
    trace("nc: "+infoObject.info.code+" ("+infoObject.info.description+")"); 
    if (infoObject.info.code == "NetConnection.Connect.Failed") 
    { 
     prompt.text = "Connection failed. Try again or email [email protected]"; 
    } 
    else if (infoObject.info.code == "NetConnection.Connect.Rejected") 
    { 
     // Hide connect fail... 
     prompt.text = infoObject.info.description; 
    } 
} 

// Ask for permission to use the camera and show the preview to the user 
// event:MouseEvent 
// doConnect toggles connections on and off. 
function doConnect() 
{ 
    // connect to the Wowza Media Server 
    if (nc == null) 
    { 
     // create a connection to the wowza media server 
     nc = new NetConnection(); 
     nc.addEventListener(NetStatusEvent.NET_STATUS, ncOnStatus); 
     nc.connect(connect.connectStr.text); 

     //connect.connectButton.label = "Disconnect"; 

     // uncomment this to monitor frame rate and buffer length 
     //setInterval("updateStreamValues", 500); 

     // Attach camera to preview 
     videoCamera.clear(); 
     videoCamera.attachCamera(camera); 

     //enablePlayControls(true); 

     // Pass status to 
     // ExternalInterface.call("logBrowserStreaming", "cameraagreed"); 

    } 
    else 
    { 
     nsPublish = null; 
     nsPlay = null; 

     videoCamera.attachNetStream(null); 
     videoCamera.clear(); 

     videoRemote.attachNetStream(null); 
     videoRemote.clear(); 

     nc.close(); 
     nc = null; 

     //enablePlayControls(false); 

     doSubscribe.label = 'Play'; 
     doPublish.label = 'Stream'; 
     AppendCheckbox.selected = false; 

     connect.connectButton.label = "Connect"; 
     prompt.text = ""; 
    } 
} 

// function to monitor the frame rate and buffer length 
function updateStreamValues() 
{ 
    if (nsPlay != null) 
    { 
     fpsText.text = (Math.round(nsPlay.currentFPS*1000)/1000)+" fps"; 
     bufferLenText.text = (Math.round(nsPlay.bufferLength*1000)/1000)+" secs"; 
    } 
    else 
    { 
     fpsText.text = ""; 
     bufferLenText.text = ""; 
    } 
} 

function nsPlayOnStatus(infoObject:NetStatusEvent) 
{ 
    trace("nsPlay: onStatus: "+infoObject.info.code+" ("+infoObject.info.description+")"); 
    if (infoObject.info.code == "NetStream.Play.StreamNotFound" || infoObject.info.code == "NetStream.Play.Failed") 
    { 
     prompt.text = infoObject.info.description; 
    } 
} 

function doCloseRecord() 
{ 
    // after we have hit "Stop" recording and after the buffered video data has been 
    // sent to the Wowza Media Server close the publishing stream 
    nsPublish.publish("null"); 
} 

// this function gets called every 250 ms to monitor the; 
// progress of flushing the video buffer. Once the video 
// buffer is empty we close publishing stream 
function flushVideoBuffer() 
{ 
    var buffLen:Number = nsPublish.bufferLength; 
    if (buffLen == 0) 
    { 
     clearInterval(flushVideoBufferTimer); 
     flushVideoBufferTimer = 0; 
     doCloseRecord(); 
     doPublish.label = 'Stream'; 
    } 
} 

function nsPublicOnStatus(infoObject:NetStatusEvent) 
{ 
    trace("nsPublish: "+infoObject.info.code+" ("+infoObject.info.description+")"); 

    // After calling nsPublish.publish(false); we wait for a status; 
    // event of "NetStream.Unpublish.Success" which tells us all the video 
    // and audio data has been written to the flv file. It is at this time 
    // that we can start playing the video we just recorded. 
    if (infoObject.info.code == "NetStream.Unpublish.Success") 
    { 
     //doPlayStart(); 
    } 

    if (infoObject.info.code == "NetStream.Play.StreamNotFound" || infoObject.info.code == "NetStream.Play.Failed") 
    { 
     prompt.text = infoObject.info.description; 
    } 
} 

function initH264Recording(nsPublish:NetStream) 
{ 
    var h264Settings:H264VideoStreamSettings = new H264VideoStreamSettings(); 
    h264Settings.setProfileLevel(H264Profile.BASELINE, H264Level.LEVEL_3); 
    nsPublish.videoStreamSettings = h264Settings; 
} 

// Start recording video to the server 
function doStreamStart() 
{ 
    //prompt.text = "Starting stream with mic..."; 
    //prompt.text = microphone; 

    ExternalInterface.call("logBrowserStreaming", "starting stream"); 

    // stop video playback 
    //doPlayStop(); 

    // create a new NetStream object for publishing 
    nsPublish = new NetStream(nc); 

    var nsPublishClient:Object = new Object(); 
    nsPublish.client = nsPublishClient; 

    // Set the H.264 encoding parameters 
    if (testVersion(11,0,0,0)) 
    { 
     initH264Recording(nsPublish); 
    } 
    else 
    { 
     prompt.text = "Flash player 11 or greater is required for H.264 encoding (" + Capabilities.version + ")."; 

    }// trace the NetStream status information 
    nsPublish.addEventListener(NetStatusEvent.NET_STATUS, nsPublicOnStatus); 

    // publish the stream by name; 
    nsPublish.publish(nameStr.text, (AppendCheckbox.selected?"append":"record")); 

    // add custom metadata to the header of the .flv file; 
    var metaData:Object = new Object(); 
    metaData["description"] = "Recorded using WebcamRecording example."; 
    nsPublish.send("@setDataFrame", "onMetaData", metaData); 

    // attach the camera and microphone to the server; 
    nsPublish.attachCamera(camera); 
    nsPublish.attachAudio(microphone); 

    ExternalInterface.call("logBrowserStreaming", microphone); 

    // set the buffer time to 20 seconds to buffer 20 seconds of video; 
    // data for better performance and higher quality video 
    nsPublish.bufferTime = 20; 

    // Disable the audio choice dropdown 
    aCb.enabled = false; 

} 

function doStreamStop() 
{ 

    ExternalInterface.call("logBrowserStreaming", "stopping stream"); 

    // stop streaming video and audio to the publishing 
    // NetStream object 
    nsPublish.attachAudio(null); 
    nsPublish.attachCamera(null); 

    // After stopping the publishing we need to check if there is; 
    // video content in the NetStream buffer. If there is data 
    // we are going to monitor the video upload progress by calling 
    // flushVideoBuffer every 250ms. If the buffer length is 0 
    // we close the recording immediately. 
    var buffLen:Number = nsPublish.bufferLength; 
    if (buffLen > 0) 
    { 
     flushVideoBufferTimer = setInterval(flushVideoBuffer,250); 
     doPublish.label = 'Wait...'; 
    } 
    else 
    { 
     trace("nsPublish.publish(null)"); 
     doCloseRecord(); 
     doPublish.label = 'Start'; 
    } 

    // Disable the audio choice dropdown 
    aCb.enabled = true; 

} 

// Test version function checks if the current flash version supports H.264 Encoding. 
function testVersion(v0:Number, v1:Number, v2:Number, v3:Number):Boolean 
{ 
    var version:String = Capabilities.version; 
    var index:Number = version.indexOf(" "); 
    version = version.substr(index+1); 
    var verParts:Array = version.split(","); 

    var i:Number; 

    var ret:Boolean = true; 
    while (true) 
    { 
     if (Number(verParts[0]) < v0) 
     { 
      ret = false; 
      break; 
     } 
     else if (Number(verParts[0]) > v0) 
     { 
      break; 

     } 
     if (Number(verParts[1]) < v1) 
     { 
      ret = false; 
      break; 
     } 
     else if (Number(verParts[1]) > v1) 
     { 
      break; 

     } 
     if (Number(verParts[2]) < v2) 
     { 
      ret = false; 
      break; 
     } 
     else if (Number(verParts[2]) > v2) 
     { 
      break; 

     } 
     if (Number(verParts[3]) < v3) 
     { 
      ret = false; 
      break; 
     } 
     break; 
    } 
    trace("testVersion: "+Capabilities.version+">="+v0+","+v1+","+v2+","+v3+": "+ret); 

    return ret; 
} 


// External trigger from Javascript; 

// Allow stream to start with startBrowserStreaming call from js 
ExternalInterface.addCallback("startBrowserStreaming", doStreamStart); 

// Allow stream to stop with stopBrowserStreaming call from js; 
ExternalInterface.addCallback("stopBrowserStreaming", doStreamStop); 

stage.align = "TL"; 
stage.scaleMode = "noScale"; 

startCamera(); 

답변

1

당신은 NetConnection 및/또는 NetStream을 건드리지 않고 오디오 소스를 전환 할 수 있습니다. 내 오디오 소스를 변경하는 버튼을 사용하여 위치를

이 간단한 예를 보자 나는 플래시 (Wowza 트랜스 코더 물론 애드온없이 무료 버전) Wowza 스트리밍 엔진 4.1.1이 코드를 테스트
const server:String = 'rtmp://localhost/live'; 
const stream:String = 'live'; 

var nc:NetConnection; 
var ns_publish:NetStream; 

nc = new NetConnection(); 
nc.addEventListener(
    NetStatusEvent.NET_STATUS, 
    function(e:NetStatusEvent):void { 
     if(e.info.code == 'NetConnection.Connect.Success'){ 
      publish(); 
     } 
    } 
) 
nc.addEventListener(AsyncErrorEvent.ASYNC_ERROR, function(e:AsyncErrorEvent):void {}) 
nc.connect(server); 

function publish():void { 

    var cam:Camera = Camera.getCamera(); 

    // for my case, I have 2 mic, and I start with the first 
    var mic:Microphone = Microphone.getMicrophone(0); 
    ns_publish = new NetStream(nc); 
    ns_publish.attachAudio(mic); 
    ns_publish.attachCamera(cam); 
    ns_publish.publish(stream, 'record'); 
} 

btn_switch_mic.addEventListener(MouseEvent.CLICK, function(e){ 
    // I can switch to the second mic without initialize my NetConnection and/or my NetStream 
    var mic:Microphone = Microphone.getMicrophone(1); 
    ns_publish.attachAudio(mic); 
}) 

미디어 서버 4.5, 그리고 그것은 잘 작동합니다.

참고 : 동일한 방식으로 비디오 소스 (카메라)를 변경할 수 있습니다.

희망 사항이 모두 도움이 될 수 있습니다.