I tried to use the super.init
before initializing the objects and tried placing self
in front of my objects. Can anyone tell me what I am doing wrong?
我在初始化对象之前尝试使用super.init,并尝试将self放在我的对象前面。谁能告诉我我做错了什么?
This is the code I currently have:
这是我目前的代码:
//
// RecordController.swift
// TuneUpV2
//
// Created by Don Nijssen on 05-05-15.
// Copyright (c) 2015 Don Nijssen. All rights reserved.
//
import UIKit
import AVFoundation
class RecordController: NSObject {
var audioEngine: AVAudioEngine
var audioInputNode : AVAudioInputNode
var audioPlayerNode: AVAudioPlayerNode
var audioMixerNode: AVAudioMixerNode
var audioBuffer: AVAudioPCMBuffer
override init(){
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioInputNode = AVAudioInputNode()
audioMixerNode = AVAudioMixerNode()
audioBuffer = AVAudioPCMBuffer()
audioMixerNode = audioEngine.mainMixerNode
let frameLength = UInt32(256)
audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength)
audioBuffer.frameLength = frameLength
audioInputNode = audioEngine.inputNode
audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in
//let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount))
//let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength))
for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount)
{
// doing my real time stuff
//self.audioBuffer.floatChannelData.memory[i] = floats[i];
println(self.audioEngine.inputNode.rate);
}
})
// setup audio engine
audioEngine.attachNode(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0))
}
func start()
{
audioEngine.startAndReturnError(nil)
// play player and buffer
audioPlayerNode.play()
audioPlayerNode.scheduleBuffer(audioBuffer, atTime: nil, options: .Loops, completionHandler: nil)
}
func stop()
{
audioEngine.stop();
audioPlayerNode.stop();
}
}
2 个解决方案
#1
Where exactly did you call super.init()
? This works in my playground:
你究竟在哪里调用super.init()?这适用于我的操场:
override init(){
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioInputNode = AVAudioInputNode()
audioMixerNode = AVAudioMixerNode()
audioBuffer = AVAudioPCMBuffer()
// Call super.init() immediately after all subclass properties are initialized
super.init()
audioMixerNode = audioEngine.mainMixerNode
let frameLength = UInt32(256)
audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength)
audioBuffer.frameLength = frameLength
audioInputNode = audioEngine.inputNode
audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in
//let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount))
//let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength))
for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount)
{
// doing my real time stuff
//self.audioBuffer.floatChannelData.memory[i] = floats[i];
println(self.audioEngine.inputNode.rate);
}
})
// setup audio engine
audioEngine.attachNode(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0))
}
Hope this helps
希望这可以帮助
#2
If your class is a subclass and you have non optional properties then you have to have give initial value to all properties. Since it is a sub class of NSObject and you are overriding the init method from its parent class you have to provide super.init() call before initialising properties.
如果您的类是子类并且您具有非可选属性,那么您必须为所有属性赋予初始值。由于它是NSObject的子类,并且您从其父类重写init方法,因此必须在初始化属性之前提供super.init()调用。
class RecordController: NSObject {
var audioEngine: AVAudioEngine
var audioInputNode : AVAudioInputNode
var audioPlayerNode: AVAudioPlayerNode
var audioMixerNode: AVAudioMixerNode
var audioBuffer: AVAudioPCMBuffer
override init(){
super.init()
self.audioEngine = AVAudioEngine()
self.audioPlayerNode = AVAudioPlayerNode()
self.audioInputNode = AVAudioInputNode()
self.audioMixerNode = AVAudioMixerNode()
self.audioBuffer = AVAudioPCMBuffer()
audioMixerNode = audioEngine.mainMixerNode
let frameLength = UInt32(256)
audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength)
audioBuffer.frameLength = frameLength
audioInputNode = audioEngine.inputNode
audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in
//let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount))
//let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength))
for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount)
{
// doing my real time stuff
//self.audioBuffer.floatChannelData.memory[i] = floats[i];
println(self.audioEngine.inputNode.rate);
}
})
// setup audio engine
audioEngine.attachNode(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0))
}
func start()
{
audioEngine.startAndReturnError(nil)
// play player and buffer
audioPlayerNode.play()
audioPlayerNode.scheduleBuffer(audioBuffer, atTime: nil, options: .Loops, completionHandler: nil)
}
func stop()
{
audioEngine.stop();
audioPlayerNode.stop();
}
}
As per Apple documentation.
根据Apple文档。
#1
Where exactly did you call super.init()
? This works in my playground:
你究竟在哪里调用super.init()?这适用于我的操场:
override init(){
audioEngine = AVAudioEngine()
audioPlayerNode = AVAudioPlayerNode()
audioInputNode = AVAudioInputNode()
audioMixerNode = AVAudioMixerNode()
audioBuffer = AVAudioPCMBuffer()
// Call super.init() immediately after all subclass properties are initialized
super.init()
audioMixerNode = audioEngine.mainMixerNode
let frameLength = UInt32(256)
audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength)
audioBuffer.frameLength = frameLength
audioInputNode = audioEngine.inputNode
audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in
//let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount))
//let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength))
for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount)
{
// doing my real time stuff
//self.audioBuffer.floatChannelData.memory[i] = floats[i];
println(self.audioEngine.inputNode.rate);
}
})
// setup audio engine
audioEngine.attachNode(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0))
}
Hope this helps
希望这可以帮助
#2
If your class is a subclass and you have non optional properties then you have to have give initial value to all properties. Since it is a sub class of NSObject and you are overriding the init method from its parent class you have to provide super.init() call before initialising properties.
如果您的类是子类并且您具有非可选属性,那么您必须为所有属性赋予初始值。由于它是NSObject的子类,并且您从其父类重写init方法,因此必须在初始化属性之前提供super.init()调用。
class RecordController: NSObject {
var audioEngine: AVAudioEngine
var audioInputNode : AVAudioInputNode
var audioPlayerNode: AVAudioPlayerNode
var audioMixerNode: AVAudioMixerNode
var audioBuffer: AVAudioPCMBuffer
override init(){
super.init()
self.audioEngine = AVAudioEngine()
self.audioPlayerNode = AVAudioPlayerNode()
self.audioInputNode = AVAudioInputNode()
self.audioMixerNode = AVAudioMixerNode()
self.audioBuffer = AVAudioPCMBuffer()
audioMixerNode = audioEngine.mainMixerNode
let frameLength = UInt32(256)
audioBuffer = AVAudioPCMBuffer(PCMFormat: audioPlayerNode.outputFormatForBus(0), frameCapacity: frameLength)
audioBuffer.frameLength = frameLength
audioInputNode = audioEngine.inputNode
audioInputNode.installTapOnBus(0, bufferSize:frameLength, format: audioInputNode.outputFormatForBus(0), block: {(buffer, time) in
//let channels = UnsafeArray(start: buffer.floatChannelData, length: Int(buffer.format.channelCount))
//let floats = UnsafeArray(start: channels[0], length: Int(buffer.frameLength))
for var i = 0; i < Int(self.audioBuffer.frameLength); i+=Int(self.audioMixerNode.outputFormatForBus(0).channelCount)
{
// doing my real time stuff
//self.audioBuffer.floatChannelData.memory[i] = floats[i];
println(self.audioEngine.inputNode.rate);
}
})
// setup audio engine
audioEngine.attachNode(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioMixerNode, format: audioPlayerNode.outputFormatForBus(0))
}
func start()
{
audioEngine.startAndReturnError(nil)
// play player and buffer
audioPlayerNode.play()
audioPlayerNode.scheduleBuffer(audioBuffer, atTime: nil, options: .Loops, completionHandler: nil)
}
func stop()
{
audioEngine.stop();
audioPlayerNode.stop();
}
}
As per Apple documentation.
根据Apple文档。