Skip to content

Commit

Permalink
Merge branch 'main' into duan/feat-rpc
Browse files Browse the repository at this point in the history
  • Loading branch information
cloudwebrtc committed Jan 27, 2025
2 parents be9b9b0 + cf800fa commit 09e0b1e
Show file tree
Hide file tree
Showing 18 changed files with 204 additions and 30 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# CHANGELOG

## 2.3.5

* feat: add TrackProcessor support. (#657)
* fix: bug for mute/unmute and speaker switch. (#684)

## 2.3.4+hotfix.2

* fix: side effects for stop remote track.
Expand Down
16 changes: 16 additions & 0 deletions example/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,19 @@ flutter pub get
# you can autofill URL and TOKEN for first run in debug mode.
flutter run --dart-define=URL=wss://${LIVEKIT_SERVER_IP_OR_DOMAIN} --dart-define=TOKEN=${YOUR_TOKEN}
```

## End-to-End Encryption (E2EE)

The example app supports end-to-end encryption for audio and video tracks. To enable E2EE:

1. Toggle the "E2EE" switch in the connect screen
2. Enter a shared key that will be used for encryption
3. All participants must use the same shared key to communicate

For web support, you'll need to compile the E2EE web worker:

```bash
dart compile js web/e2ee.worker.dart -o example/web/e2ee.worker.dart.js -m
```

Note: All participants in the room must have E2EE enabled and use the same shared key to see and hear each other. If the keys don't match, participants won't be able to decode each other's audio and video.
2 changes: 1 addition & 1 deletion ios/livekit_client.podspec
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Pod::Spec.new do |s|
s.name = 'livekit_client'
s.version = '2.3.4'
s.version = '2.3.5'
s.summary = 'Open source platform for real-time audio and video.'
s.description = 'Open source platform for real-time audio and video.'
s.homepage = 'https://livekit.io/'
Expand Down
1 change: 1 addition & 0 deletions lib/livekit_client.dart
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ export 'src/track/remote/audio.dart';
export 'src/track/remote/remote.dart';
export 'src/track/remote/video.dart';
export 'src/track/track.dart';
export 'src/track/processor.dart';
export 'src/types/other.dart';
export 'src/types/participant_permissions.dart';
export 'src/types/video_dimensions.dart';
Expand Down
5 changes: 5 additions & 0 deletions lib/src/core/engine.dart
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,10 @@ class Engine extends Disposable with EventsEmittable<EngineEvent> {

RegionUrlProvider? _regionUrlProvider;

lk_models.ServerInfo? _serverInfo;

lk_models.ServerInfo? get serverInfo => _serverInfo;

void clearReconnectTimeout() {
if (reconnectTimeout != null) {
reconnectTimeout?.cancel();
Expand Down Expand Up @@ -911,6 +915,7 @@ class Engine extends Disposable with EventsEmittable<EngineEvent> {
..on<SignalJoinResponseEvent>((event) async {
// create peer connections
_subscriberPrimary = event.response.subscriberPrimary;
_serverInfo = event.response.serverInfo;
var iceServersFromServer =
event.response.iceServers.map((e) => e.toSDKType()).toList();

Expand Down
14 changes: 14 additions & 0 deletions lib/src/events.dart
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.

import 'package:livekit_client/src/track/processor.dart';
import 'core/engine.dart';
import 'core/room.dart';
import 'core/signal_client.dart';
Expand Down Expand Up @@ -578,3 +579,16 @@ class AudioVisualizerEvent with TrackEvent {
String toString() => '${runtimeType}'
'track: ${track})';
}

class TrackProcessorUpdateEvent with TrackEvent {
final Track track;
final TrackProcessor? processor;
const TrackProcessorUpdateEvent({
required this.track,
this.processor,
});

@override
String toString() => '${runtimeType}'
'track: ${track})';
}
23 changes: 9 additions & 14 deletions lib/src/hardware/hardware.dart
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,14 @@ class Hardware {

MediaDevice? selectedVideoInput;

bool? speakerOn;
bool? _speakerOn;

bool? get speakerOn => _speakerOn;

bool _preferSpeakerOutput = false;

bool get preferSpeakerOutput => _preferSpeakerOutput;

Future<List<MediaDevice>> enumerateDevices({String? type}) async {
var infos = await rtc.navigator.mediaDevices.enumerateDevices();
var devices = infos
Expand Down Expand Up @@ -143,21 +147,12 @@ class Hardware {
}
}

bool get preferSpeakerOutput => _preferSpeakerOutput;

bool get canSwitchSpeakerphone =>
(lkPlatformIsMobile()) &&
[AudioTrackState.localOnly, AudioTrackState.localAndRemote]
.contains(audioTrackState);
bool get canSwitchSpeakerphone => lkPlatformIsMobile();

Future<void> setSpeakerphoneOn(bool enable) async {
if (lkPlatformIsMobile()) {
speakerOn = enable;
if (canSwitchSpeakerphone) {
await rtc.Helper.setSpeakerphoneOn(enable);
} else {
logger.warning('Can\'t switch speaker/earpiece');
}
if (canSwitchSpeakerphone) {
_speakerOn = enable;
await rtc.Helper.setSpeakerphoneOn(enable);
} else {
logger.warning('setSpeakerphoneOn only support on iOS/Android');
}
Expand Down
2 changes: 1 addition & 1 deletion lib/src/livekit.dart
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import 'support/native.dart';
/// Main entry point to connect to a room.
/// {@category Room}
class LiveKitClient {
static const version = '2.3.4';
static const version = '2.3.5';

/// Initialize the WebRTC plugin. If this is not manually called, will be
/// initialized with default settings.
Expand Down
9 changes: 9 additions & 0 deletions lib/src/participant/local.dart
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,8 @@ class LocalParticipant extends Participant<LocalTrackPublication> {

// did publish
await track.onPublish();
await track.processor?.onPublish(room);

await room.applyAudioSpeakerSettings();

var listener = track.createListener();
Expand Down Expand Up @@ -336,6 +338,7 @@ class LocalParticipant extends Participant<LocalTrackPublication> {

// did publish
await track.onPublish();
await track.processor?.onPublish(room);

var listener = track.createListener();
listener.on((TrackEndedEvent event) {
Expand Down Expand Up @@ -390,6 +393,12 @@ class LocalParticipant extends Participant<LocalTrackPublication> {

// did unpublish
await track.onUnpublish();

if (track.processor != null) {
await track.processor?.onUnpublish();
await track.stopProcessor();
}

await room.applyAudioSpeakerSettings();
}

Expand Down
8 changes: 7 additions & 1 deletion lib/src/track/local/audio.dart
Original file line number Diff line number Diff line change
Expand Up @@ -136,12 +136,18 @@ class LocalAudioTrack extends LocalTrack
options ??= const AudioCaptureOptions();
final stream = await LocalTrack.createStream(options);

return LocalAudioTrack(
var track = LocalAudioTrack(
TrackSource.microphone,
stream,
stream.getAudioTracks().first,
options,
enableVisualizer: enableVisualizer,
);

if (options.processor != null) {
await track.setProcessor(options.processor);
}

return track;
}
}
57 changes: 57 additions & 0 deletions lib/src/track/local/local.dart
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import '../../support/native.dart';
import '../../support/platform.dart';
import '../../types/other.dart';
import '../options.dart';
import '../processor.dart';
import '../remote/audio.dart';
import '../remote/video.dart';
import '../track.dart';
Expand Down Expand Up @@ -119,6 +120,10 @@ abstract class LocalTrack extends Track {

bool _stopped = false;

TrackProcessor? _processor;

TrackProcessor? get processor => _processor;

LocalTrack(
TrackType kind,
TrackSource source,
Expand Down Expand Up @@ -253,6 +258,10 @@ abstract class LocalTrack extends Track {
final newStream = await LocalTrack.createStream(currentOptions);
final newTrack = newStream.getTracks().first;

var processor = _processor;

await stopProcessor();

// replace track on sender
try {
await sender?.replaceTrack(newTrack);
Expand All @@ -267,6 +276,10 @@ abstract class LocalTrack extends Track {
// set new stream & track to this object
updateMediaStreamAndTrack(newStream, newTrack);

if (processor != null) {
await setProcessor(processor);
}

// mark as started
await start();

Expand All @@ -277,6 +290,50 @@ abstract class LocalTrack extends Track {
));
}

Future<void> setProcessor(TrackProcessor? processor) async {
if (processor == null) {
return;
}

if (_processor != null) {
await stopProcessor();
}

_processor = processor;

var processorOptions = ProcessorOptions(
kind: kind,
track: mediaStreamTrack,
);

await _processor!.init(processorOptions);

logger.fine('processor initialized');

events.emit(TrackProcessorUpdateEvent(track: this, processor: _processor));
}

@internal
Future<void> stopProcessor({bool keepElement = false}) async {
if (_processor == null) return;

logger.fine('stopping processor');
await _processor?.destroy();
_processor = null;

if (!keepElement) {
// processorElement?.remove();
// processorElement = null;
}

// apply original track constraints in case the processor changed them
//await this._mediaStreamTrack.applyConstraints(this._constraints);
// force re-setting of the mediaStreamTrack on the sender
//await this.setMediaStreamTrack(this._mediaStreamTrack, true);

events.emit(TrackProcessorUpdateEvent(track: this));
}

@internal
@mustCallSuper
Future<bool> onPublish() async {
Expand Down
17 changes: 10 additions & 7 deletions lib/src/track/local/video.dart
Original file line number Diff line number Diff line change
Expand Up @@ -162,12 +162,9 @@ class LocalVideoTrack extends LocalTrack with VideoTrack {
}

// Private constructor
LocalVideoTrack._(
TrackSource source,
rtc.MediaStream stream,
rtc.MediaStreamTrack track,
this.currentOptions,
) : super(
LocalVideoTrack._(TrackSource source, rtc.MediaStream stream,
rtc.MediaStreamTrack track, this.currentOptions)
: super(
TrackType.VIDEO,
source,
stream,
Expand All @@ -181,12 +178,18 @@ class LocalVideoTrack extends LocalTrack with VideoTrack {
options ??= const CameraCaptureOptions();

final stream = await LocalTrack.createStream(options);
return LocalVideoTrack._(
var track = LocalVideoTrack._(
TrackSource.camera,
stream,
stream.getVideoTracks().first,
options,
);

if (options.processor != null) {
await track.setProcessor(options.processor);
}

return track;
}

/// Creates a LocalVideoTrack from the display.
Expand Down
11 changes: 11 additions & 0 deletions lib/src/track/options.dart
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import '../support/platform.dart';
import '../track/local/audio.dart';
import '../track/local/video.dart';
import '../types/video_parameters.dart';
import 'processor.dart';

/// A type that represents front or back of the camera.
enum CameraPosition {
Expand Down Expand Up @@ -60,10 +61,12 @@ class CameraCaptureOptions extends VideoCaptureOptions {
double? maxFrameRate,
VideoParameters params = VideoParametersPresets.h720_169,
this.stopCameraCaptureOnMute = true,
TrackProcessor<VideoProcessorOptions>? processor,
}) : super(
params: params,
deviceId: deviceId,
maxFrameRate: maxFrameRate,
processor: processor,
);

CameraCaptureOptions.from({required VideoCaptureOptions captureOptions})
Expand Down Expand Up @@ -217,10 +220,14 @@ abstract class VideoCaptureOptions extends LocalTrackOptions {
// Limit the maximum frameRate of the capture device.
final double? maxFrameRate;

/// A processor to apply to the video track.
final TrackProcessor<VideoProcessorOptions>? processor;

const VideoCaptureOptions({
this.params = VideoParametersPresets.h540_169,
this.deviceId,
this.maxFrameRate,
this.processor,
});

@override
Expand Down Expand Up @@ -269,6 +276,9 @@ class AudioCaptureOptions extends LocalTrackOptions {
/// set to false to only toggle enabled instead of stop/replaceTrack for muting
final bool stopAudioCaptureOnMute;

/// A processor to apply to the audio track.
final TrackProcessor<AudioProcessorOptions>? processor;

const AudioCaptureOptions({
this.deviceId,
this.noiseSuppression = true,
Expand All @@ -278,6 +288,7 @@ class AudioCaptureOptions extends LocalTrackOptions {
this.voiceIsolation = true,
this.typingNoiseDetection = true,
this.stopAudioCaptureOnMute = true,
this.processor,
});

@override
Expand Down
Loading

0 comments on commit 09e0b1e

Please sign in to comment.