- Camera and Microphone streaming library via RTMP, HLS for iOS, macOS, tvOS.
 - Issuesの言語は、英語か、日本語でお願いします!
 
- Authentication
 - Publish and Recording (H264/AAC)
 - Playback (Technical Preview)
 -  Adaptive bitrate streaming
- Handling (see also #126)
 - Automatic drop frames
 
 -  Action Message Format
- AMF0
 - AMF3
 
 - SharedObject
 -  RTMPS
- Native (RTMP over SSL/TSL)
 - Tunneled (RTMPT over SSL/TSL) (Technical Preview)
 
 - RTMPT (Technical Preview)
 - ReplayKit Live as a Broadcast Upload Extension (Technical Preview)
 
- HTTPService
 - HLS Publish
 
-  Support tvOS 10.2+  (Technical Preview)
- tvOS can't publish Camera and Microphone. Available playback feature.
 
 - Hardware acceleration for H264 video encoding, AAC audio encoding
 - Support "Allow app extension API only" option
 - Support GPUImage framework (~> 0.5.12)
 -  
Objectiv-C Bridging 
| - | iOS | OSX | tvOS | XCode | Swift | CocoaPods | Carthage | 
|---|---|---|---|---|---|---|---|
| 0.7.0 | 8.0+ | 10.11+ | 10.2+ | 8.3+ | 3.1 | 1.2.0 | 0.20.0+ | 
| 0.6.0 | 8.0+ | 10.11+ | - | 8.3+ | 3.1 | 1.2.0 | 0.20.0+ | 
| 0.5.0 | 8.0+ | 10.11+ | - | 8.0+ | 3.0 | 1.1.0 | 0.17.2(0.5.5+) | 
| 0.4.0 | 8.0+ | 10.11+ | - | 7.3+ | 2.3 | 1.0.0 | 0.17.2(0.4.4+) | 
iOS10.0+
- NSMicrophoneUsageDescription
 - NSCameraUsageDescription
 - NSPhotoLibraryUsageDescription
 
source 'https://github.com/CocoaPods/Specs.git'
use_frameworks!
def import_pods
    pod 'lf', '~> 0.7.0'
end
target 'Your Target'  do
    platform :ios, '8.0'
    import_pods
endgithub "shogo4405/lf.swift" ~> 0.7.0
New BSD
Bitcoin
1HtWpaYkRGZMnq253QsJP6xSKZRPoJ8HrsReal Time Messaging Protocol (RTMP).
var rtmpConnection:RTMPConnection = RTMPConnection()
var rtmpStream:RTMPStream = RTMPStream(connection: rtmpConnection)
rtmpStream.attachAudio(AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)) { error in
    // print(error)
}
rtmpStream.attachCamera(DeviceUtil.device(withPosition: .back)) { error in
    // print(error)
}
var lfView:LFView = LFView(frame: view.bounds)
lfView.videoGravity = AVLayerVideoGravityResizeAspectFill
lfView.attachStream(rtmpStream)
// add ViewController#view
view.addSubview(lfView)
rtmpConnection.connect("rtmp://localhost/appName/instanceName")
rtmpStream.publish("streamName")
// if you want to record a stream.
// rtmpStream.publish("streamName", type: .localRecord)let sampleRate:Double = 44_100
// see: #58
#if(iOS)
do {
    try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
    try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
    try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeDefault)
    try AVAudioSession.sharedInstance().setActive(true)
} catch {
}
#endif
var rtmpStream:RTMPStream = RTMPStream(connection: rtmpConnection)
rtmpStream.captureSettings = [
    "fps": 30, // FPS
    "sessionPreset": AVCaptureSessionPresetMedium, // input video width/height
    "continuousAutofocus": false, // use camera autofocus mode
    "continuousExposure": false, //  use camera exposure mode
]
rtmpStream.audioSettings = [
    "muted": false, // mute audio
    "bitrate": 32 * 1024,
    "sampleRate": sampleRate, 
]
rtmpStream.videoSettings = [
    "width": 640, // video output width
    "height": 360, // video output height
    "bitrate": 160 * 1024, // video output bitrate
    // "dataRateLimits": [160 * 1024 / 8, 1], optional kVTCompressionPropertyKey_DataRateLimits property
    "profileLevel": kVTProfileLevel_H264_Baseline_3_1, // H264 Profile require "import VideoToolbox"
    "maxKeyFrameIntervalDuration": 2, // key frame / sec
]
// "0" means the same of input
rtmpStream.recorderSettings = [
    AVMediaTypeAudio: [
        AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
        AVSampleRateKey: 0,
        AVNumberOfChannelsKey: 0,
        // AVEncoderBitRateKey: 128000,
    ],
    AVMediaTypeVideo: [
        AVVideoCodecKey: AVVideoCodecH264,
        AVVideoHeightKey: 0,
        AVVideoWidthKey: 0,
        /*
        AVVideoCompressionPropertiesKey: [
            AVVideoMaxKeyFrameIntervalDurationKey: 2,
            AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30,
            AVVideoAverageBitRateKey: 512000
        ]
        */
    ],
]
// 2nd arguemnt set false
rtmpStream.attachAudio(AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio), automaticallyConfiguresApplicationAudioSession: false)var rtmpConnection:RTMPConnection = RTMPConnection()
rtmpConnection.connect("rtmp://username:password@localhost/appName/instanceName")// iOS
rtmpStream.attachScreen(ScreenCaptureSession(shared: UIApplication.shared))
// macOS
rtmpStream.attachScreen(AVCaptureScreenInput(displayID: CGMainDisplayID()))HTTP Live Streaming (HLS). Your iPhone/Mac become a IP Camera. Basic snipet. You can see http://ip.address:8080/hello/playlist.m3u8
var httpStream:HTTPStream = HTTPStream()
httpStream.attachCamera(DeviceUtil.device(withPosition: .back))
httpStream.attachAudio(AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
httpStream.publish("hello")
var lfView:LFView = LFView(frame: view.bounds)
lfView.attachStream(httpStream)
var httpService:HLSService = HLSService(domain: "", type: "_http._tcp", name: "lf", port: 8080)
httpService.startRunning()
httpService.addHTTPStream(httpStream)
// add ViewController#view
view.addSubview(lfView)Make sure you setup and activate your AVAudioSession.
import AVFoundation
...
do {
   try AVAudioSession.sharedInstance().setPreferredSampleRate(44_100)
   try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
   try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeDefault)
   try AVAudioSession.sharedInstance().setActive(true)
   } catch {
}
- Adobe’s Real Time Messaging Protocol
 - Action Message Format -- AMF 0
 - Action Message Format -- AMF 3
 - Video File Format Specification Version 10
 - Adobe Flash Video File Format Specification Version 10.1