Skip to content

Commit

Permalink
fix MemoryLeak when playback with main or high profile.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Jan 15, 2023
1 parent 94c98ee commit dbb57f2
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 29 deletions.
5 changes: 5 additions & 0 deletions Examples/iOS/LiveViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ final class LiveViewController: UIViewController {
super.viewWillDisappear(animated)
rtmpStream.removeObserver(self, forKeyPath: "currentFPS")
rtmpStream.close()
rtmpStream.attachAudio(nil)
rtmpStream.attachCamera(nil)
if #available(iOS 13.0, *) {
rtmpStream.attachMultiCamera(nil)
}
// swiftlint:disable notification_center_detachment
NotificationCenter.default.removeObserver(self)
}
Expand Down
21 changes: 3 additions & 18 deletions Examples/iOS/PlaybackViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,14 @@ final class PlaybackViewController: UIViewController {
override func viewWillAppear(_ animated: Bool) {
logger.info("viewWillAppear")
super.viewWillAppear(animated)
(view as? MTHKView)?.attachStream(rtmpStream)
(view as? PiPHKView)?.attachStream(rtmpStream)
NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didRouteChangeNotification(_:)), name: AVAudioSession.routeChangeNotification, object: nil)
if let layer = view.layer as? AVSampleBufferDisplayLayer, #available(iOS 15.0, *) {
(view as? NetStreamDrawable)?.attachStream(rtmpStream)
if #available(iOS 15.0, *), let layer = view.layer as? AVSampleBufferDisplayLayer {
pictureInPictureController = AVPictureInPictureController(contentSource: .init(sampleBufferDisplayLayer: layer, playbackDelegate: self))
}
}

override func viewWillDisappear(_ animated: Bool) {
logger.info("viewWillDisappear")
// swiftlint:disable notification_center_detachment
NotificationCenter.default.removeObserver(self)
super.viewWillDisappear(animated)
}

Expand Down Expand Up @@ -61,7 +56,7 @@ final class PlaybackViewController: UIViewController {
@objc
private func rtmpStatusHandler(_ notification: Notification) {
let e = Event.from(notification)
guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else {
guard let data = e.data as? ASObject, let code = data["code"] as? String else {
return
}
logger.info(code)
Expand Down Expand Up @@ -102,16 +97,6 @@ final class PlaybackViewController: UIViewController {
rtmpStream.receiveVideo = true
}
}

@objc
private func didInterruptionNotification(_ notification: Notification) {
logger.info(notification)
}

@objc
private func didRouteChangeNotification(_ notification: Notification) {
logger.info(notification)
}
}

extension PlaybackViewController: AVPictureInPictureSampleBufferPlaybackDelegate {
Expand Down
22 changes: 11 additions & 11 deletions Sources/Codec/VideoCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -256,11 +256,11 @@ public class VideoCodec {
duration: duration
) { [unowned self] status, _, sampleBuffer in
guard let sampleBuffer, status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return
}
self.formatDescription = sampleBuffer.formatDescription
self.delegate?.videoCodec(self, didOutput: sampleBuffer)
formatDescription = sampleBuffer.formatDescription
delegate?.videoCodec(self, didOutput: sampleBuffer)
}
}

Expand Down Expand Up @@ -292,7 +292,7 @@ public class VideoCodec {
)

guard status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return
}

Expand All @@ -309,19 +309,19 @@ public class VideoCodec {
)

guard let buffer = sampleBuffer, status == noErr else {
self.delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return
}

if self.isBaseline {
self.delegate?.videoCodec(self, didOutput: buffer)
if isBaseline {
delegate?.videoCodec(self, didOutput: buffer)
} else {
self.buffers.append(buffer)
self.buffers.sort {
buffers.append(buffer)
buffers.sort {
$0.presentationTimeStamp < $1.presentationTimeStamp
}
if self.minimumGroupOfPictures <= buffers.count {
self.delegate?.videoCodec(self, didOutput: buffer)
if minimumGroupOfPictures <= buffers.count {
delegate?.videoCodec(self, didOutput: buffers.removeFirst())
}
}
}
Expand Down

0 comments on commit dbb57f2

Please sign in to comment.