From 1354568b2434d4261aabb0b6ffa8272654bc9657 Mon Sep 17 00:00:00 2001 From: "Pohsiang (John) Hsu" Date: Wed, 29 Nov 2023 08:47:50 -0800 Subject: [PATCH] [doc] update doc, cosmetic fixes --- .../EncodingAndPackagingTool.Cli.Test.cs | 24 +++++++++---------- EncodingAndPackagingExample/README.md | 4 ++-- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/EncodingAndPackagingExample/EncodingAndPackagingTool.Test/EncodingAndPackagingTool.Cli.Test.cs b/EncodingAndPackagingExample/EncodingAndPackagingTool.Test/EncodingAndPackagingTool.Cli.Test.cs index 58d224c..ec7952a 100644 --- a/EncodingAndPackagingExample/EncodingAndPackagingTool.Test/EncodingAndPackagingTool.Cli.Test.cs +++ b/EncodingAndPackagingExample/EncodingAndPackagingTool.Test/EncodingAndPackagingTool.Cli.Test.cs @@ -15,17 +15,17 @@ public class EncodingAndPackagingTool private static string _storageServiceUri; private static string _inputContainerUri; private static string _testDataPath; - private static DefaultAzureCredential _azureCrendentail; + private static DefaultAzureCredential _azureCredential; static EncodingAndPackagingTool() { _storageServiceUri = "https://127.0.0.1:10000/devstoreaccount1"; _inputContainerUri = $"{_storageServiceUri}/encodingandpackagingtooltest"; _testDataPath = Environment.GetEnvironmentVariable("TEST_DATA") ?? throw new Exception("TEST_DATA environment variable is missing."); - _azureCrendentail = new DefaultAzureCredential(); + _azureCredential = new DefaultAzureCredential(); // Upload test video clip. - var container = new BlobContainerClient(new Uri(_inputContainerUri), _azureCrendentail); + var container = new BlobContainerClient(new Uri(_inputContainerUri), _azureCredential); container.CreateIfNotExists(); Task.WhenAll(Directory.GetFiles(_testDataPath).Select(async file => @@ -54,7 +54,7 @@ public async Task EncodingAndPackagingToolTest() // Verify dash related files. // We should have the output mpd file. - var blob = new BlobClient(new Uri($"{outputContainerUri}/bunny.640x480.15fps.mpd"), _azureCrendentail); + var blob = new BlobClient(new Uri($"{outputContainerUri}/bunny.640x480.15fps.mpd"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 2000); @@ -63,7 +63,7 @@ public async Task EncodingAndPackagingToolTest() // We should have 13 chunk files for stream 0 for (var i = 1; i <= 13; ++i) { - blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream0-{i.ToString("00000")}.m4s"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream0-{i.ToString("00000")}.m4s"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 2000); @@ -73,7 +73,7 @@ public async Task EncodingAndPackagingToolTest() // We should have 24 chunk files for stream 1 for (var i = 1; i <= 24; ++i) { - blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream1-{i.ToString("00000")}.m4s"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream1-{i.ToString("00000")}.m4s"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 2000); @@ -83,7 +83,7 @@ public async Task EncodingAndPackagingToolTest() // We should have 13 chunk files for stream 2 for (var i = 1; i <= 13; ++i) { - blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream2-{i.ToString("00000")}.m4s"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream2-{i.ToString("00000")}.m4s"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 2000); @@ -93,7 +93,7 @@ public async Task EncodingAndPackagingToolTest() // We should have 13 chunk files for stream 3 for (var i = 1; i <= 13; ++i) { - blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream3-{i.ToString("00000")}.m4s"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/chunk-stream3-{i.ToString("00000")}.m4s"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 2000); @@ -103,7 +103,7 @@ public async Task EncodingAndPackagingToolTest() // We should have 4 init chunk files. for (var i = 0; i < 4; ++i) { - blob = new BlobClient(new Uri($"{outputContainerUri}/init-stream{i}.m4s"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/init-stream{i}.m4s"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 500); @@ -112,7 +112,7 @@ public async Task EncodingAndPackagingToolTest() // Verify hls related files. // We should have the output master hls file. - blob = new BlobClient(new Uri($"{outputContainerUri}/bunny.640x480.15fps.m3u8"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/bunny.640x480.15fps.m3u8"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 400); @@ -121,7 +121,7 @@ public async Task EncodingAndPackagingToolTest() // We should have 4 hls playlist file. for (var i = 0; i < 4; ++i) { - blob = new BlobClient(new Uri($"{outputContainerUri}/media_{i}.m3u8"), _azureCrendentail); + blob = new BlobClient(new Uri($"{outputContainerUri}/media_{i}.m3u8"), _azureCredential); using (var stream = await blob.OpenReadAsync()) { Assert.True(stream.Length > 1000); @@ -129,7 +129,7 @@ public async Task EncodingAndPackagingToolTest() } // Delete the container if success. - var container = new BlobContainerClient(new Uri(outputContainerUri), _azureCrendentail); + var container = new BlobContainerClient(new Uri(outputContainerUri), _azureCredential); await container.DeleteAsync(); } } diff --git a/EncodingAndPackagingExample/README.md b/EncodingAndPackagingExample/README.md index d260a69..57cf67d 100644 --- a/EncodingAndPackagingExample/README.md +++ b/EncodingAndPackagingExample/README.md @@ -3,11 +3,11 @@ Encoding and Packaging Sample ## Overview -This folder contains sample code that demonstrates how to use FFMpeg to encode and package a MP4 video file into multi-stream MPEG-DASH format that's suitable for playback in the browser. The functionality is built into a .NET library, and two sample applications are shown that demonstrates the use the library in a command line tool and in an Azure Function. +This folder contains sample code that demonstrates how to use FFMpeg to encode and package a MP4 video file into multi-stream MPEG-DASH / HLS format that's suitable for playback in the browser. The functionality is built into a .NET library, and two sample applications are shown that demonstrates the use the library in a command line tool and in an Azure Function. Three projects: -* `EncodingAndPackagingTool.Core`: library project containing the core functionality of encoding and packaging using FFmpeg. +* `EncodingAndPackagingTool.Core`: library project containing the core functionality of encoding and packaging using FFmpeg. It demonstrates how to use FFmpeg to encode and package an input mp4 file from an input Azure blob storage container with a simple setting of encoding to 3 adaptive bitrate streams with h.264 and AAC, and generating HLS/DASH manifests for streaming and uploading the result to an output Azure blob storage. Please see [EncodingAndPackagingTool.cs (line 98)](https://github.com/Azure/azure-media-migration/blob/main/EncodingAndPackagingExample/EncodingAndPackagingTool.Core/EncodingAndPackagingTool.cs#L98) for further details on the FFmpeg setting used, and feel free to make customizations here to suit your scenarios. * `EncodingAndPackagingTool.Cli`: command line executable project that demonstrates how to use the library. * `EncodingAndPackagingTool.AzureFunction`: Azure Function project that demonstrates how to use the library where the Azure function is triggered by a message from an Azure message queue.