2015-05-09 49 views
1

我試圖在初始化對象之前使用super.init,並嘗試將self放置在我的對象前面。誰能告訴我我做錯了什麼?swift錯誤自我。在super.init()調用之前使用

這是代碼我目前有:

// 
// RecordController.swift 
// TuneUpV2 
// 
// Created by Don Nijssen on 05-05-15. 
// Copyright (c) 2015 Don Nijssen. All rights reserved. 
// 

import UIKit 
import AVFoundation 

class RecordController: NSObject { 

    var audioEngine: AVAudioEngine 
    var audioInputNode : AVAudioInputNode 
    var audioPlayerNode: AVAudioPlayerNode 
    var audioMixerNode: AVAudioMixerNode 
    var audioBuffer: AVAudioPCMBuffer 

    override init(){ 

     audioEngine = AVAudioEngine() 
     audioPlayerNode = AVAudioPlayerNode() 
     audioInputNode = AVAudioInputNode() 
     audioMixerNode = AVAudioMixerNode() 
     audioBuffer = AVAudioPCMBuffer() 


     audioMixerNode = audioEngine.mainMixerNode 

     let frameLength = UInt32(256) 
     audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength) 
     audioBuffer.frameLength = frameLength 

     audioInputNode = audioEngine.inputNode 

     audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in 
      //let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount)) 
      //let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength)) 

      for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount) 
      { 
       // doing my real time stuff 
       //self.audioBuffer.floatChannelData.memory[i] = floats[i]; 
       println(self.audioEngine.inputNode.rate); 
      } 
     }) 

     // setup audio engine 
     audioEngine.attachNode(audioPlayerNode) 
     audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0)) 

    } 


    func start() 
    { 

     audioEngine.startAndReturnError(nil) 

     // play player and buffer 
     audioPlayerNode.play() 
     audioPlayerNode.scheduleBuffer(audioBuffer, atTime: nil, options: .Loops, completionHandler: nil) 
    } 

    func stop() 
    { 
     audioEngine.stop(); 
     audioPlayerNode.stop(); 
    } 

} 

回答

2

到底在哪你怎麼叫super.init()? 這工作在我的遊樂場:

override init(){ 

     audioEngine = AVAudioEngine() 
     audioPlayerNode = AVAudioPlayerNode() 
     audioInputNode = AVAudioInputNode() 
     audioMixerNode = AVAudioMixerNode() 
     audioBuffer = AVAudioPCMBuffer() 

     // Call super.init() immediately after all subclass properties are initialized 
     super.init() 

     audioMixerNode = audioEngine.mainMixerNode 

     let frameLength = UInt32(256) 
     audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength) 
     audioBuffer.frameLength = frameLength 

     audioInputNode = audioEngine.inputNode 

     audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in 
      //let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount)) 
      //let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength)) 

      for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount) 
      { 
       // doing my real time stuff 
       //self.audioBuffer.floatChannelData.memory[i] = floats[i]; 
       println(self.audioEngine.inputNode.rate); 
      } 
     }) 

     // setup audio engine 
     audioEngine.attachNode(audioPlayerNode) 
     audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0)) 

    } 

希望這有助於

+0

非常感謝它!我在init()之後立即調用了super.init() –

0

如果你的類是一個子類,你有非可選屬性,那麼你必須有對所有屬性賦予初始值。由於它是NSObject的子類,並且您正在從其父類重寫init方法,因此必須在初始化屬性之前提供super.init()調用。

class RecordController: NSObject { 

var audioEngine: AVAudioEngine 
var audioInputNode : AVAudioInputNode 
var audioPlayerNode: AVAudioPlayerNode 
var audioMixerNode: AVAudioMixerNode 
var audioBuffer: AVAudioPCMBuffer 

override init(){ 
    super.init() 
    self.audioEngine = AVAudioEngine() 
    self.audioPlayerNode = AVAudioPlayerNode() 
    self.audioInputNode = AVAudioInputNode() 
    self.audioMixerNode = AVAudioMixerNode() 
    self.audioBuffer = AVAudioPCMBuffer() 


    audioMixerNode = audioEngine.mainMixerNode 

    let frameLength = UInt32(256) 
    audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength) 
    audioBuffer.frameLength = frameLength 

    audioInputNode = audioEngine.inputNode 

    audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in 
     //let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount)) 
     //let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength)) 

     for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount) 
     { 
      // doing my real time stuff 
      //self.audioBuffer.floatChannelData.memory[i] = floats[i]; 
      println(self.audioEngine.inputNode.rate); 
     } 
    }) 

    // setup audio engine 
    audioEngine.attachNode(audioPlayerNode) 
    audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0)) 

} 


func start() 
{ 

    audioEngine.startAndReturnError(nil) 

    // play player and buffer 
    audioPlayerNode.play() 
    audioPlayerNode.scheduleBuffer(audioBuffer, atTime: nil, options: .Loops, completionHandler: nil) 
} 

func stop() 
{ 
    audioEngine.stop(); 
    audioPlayerNode.stop(); 
} 

}

按照蘋果公司的文檔。

相關問題