diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..1ff0c423 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,63 @@ +############################################################################### +# Set default behavior to automatically normalize line endings. +############################################################################### +* text=auto + +############################################################################### +# Set default behavior for command prompt diff. +# +# This is need for earlier builds of msysgit that does not have it on by +# default for csharp files. +# Note: This is only used by command line +############################################################################### +#*.cs diff=csharp + +############################################################################### +# Set the merge driver for project and solution files +# +# Merging from the command prompt will add diff markers to the files if there +# are conflicts (Merging from VS is not affected by the settings below, in VS +# the diff markers are never inserted). Diff markers may cause the following +# file extensions to fail to load in VS. An alternative would be to treat +# these files as binary and thus will always conflict and require user +# intervention with every merge. To do so, just uncomment the entries below +############################################################################### +#*.sln merge=binary +#*.csproj merge=binary +#*.vbproj merge=binary +#*.vcxproj merge=binary +#*.vcproj merge=binary +#*.dbproj merge=binary +#*.fsproj merge=binary +#*.lsproj merge=binary +#*.wixproj merge=binary +#*.modelproj merge=binary +#*.sqlproj merge=binary +#*.wwaproj merge=binary + +############################################################################### +# behavior for image files +# +# image files are treated as binary by default. +############################################################################### +#*.jpg binary +#*.png binary +#*.gif binary + +############################################################################### +# diff behavior for common document formats +# +# Convert binary document formats to text before diffing them. This feature +# is only available from the command line. Turn it on by uncommenting the +# entries below. +############################################################################### +#*.doc diff=astextplain +#*.DOC diff=astextplain +#*.docx diff=astextplain +#*.DOCX diff=astextplain +#*.dot diff=astextplain +#*.DOT diff=astextplain +#*.pdf diff=astextplain +#*.PDF diff=astextplain +#*.rtf diff=astextplain +#*.RTF diff=astextplain diff --git a/.nuget/NuGet.Config b/.nuget/NuGet.Config new file mode 100644 index 00000000..67f8ea04 --- /dev/null +++ b/.nuget/NuGet.Config @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.nuget/NuGet.exe b/.nuget/NuGet.exe new file mode 100644 index 00000000..9ca66594 Binary files /dev/null and b/.nuget/NuGet.exe differ diff --git a/.nuget/NuGet.targets b/.nuget/NuGet.targets new file mode 100644 index 00000000..3f8c37b2 --- /dev/null +++ b/.nuget/NuGet.targets @@ -0,0 +1,144 @@ + + + + $(MSBuildProjectDirectory)\..\ + + + false + + + false + + + true + + + false + + + + + + + + + + + $([System.IO.Path]::Combine($(SolutionDir), ".nuget")) + + + + + $(SolutionDir).nuget + + + + $(MSBuildProjectDirectory)\packages.$(MSBuildProjectName.Replace(' ', '_')).config + $(MSBuildProjectDirectory)\packages.$(MSBuildProjectName).config + + + + $(MSBuildProjectDirectory)\packages.config + $(PackagesProjectConfig) + + + + + $(NuGetToolsPath)\NuGet.exe + @(PackageSource) + + "$(NuGetExePath)" + mono --runtime=v4.0.30319 "$(NuGetExePath)" + + $(TargetDir.Trim('\\')) + + -RequireConsent + -NonInteractive + + "$(SolutionDir) " + "$(SolutionDir)" + + + $(NuGetCommand) install "$(PackagesConfig)" -source "$(PackageSources)" $(NonInteractiveSwitch) $(RequireConsentSwitch) -solutionDir $(PaddedSolutionDir) + $(NuGetCommand) pack "$(ProjectPath)" -Properties "Configuration=$(Configuration);Platform=$(Platform)" $(NonInteractiveSwitch) -OutputDirectory "$(PackageOutputDir)" -symbols + + + + RestorePackages; + $(BuildDependsOn); + + + + + $(BuildDependsOn); + BuildPackage; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/DataMovement.sln b/DataMovement.sln new file mode 100644 index 00000000..0edc60f6 --- /dev/null +++ b/DataMovement.sln @@ -0,0 +1,82 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 2013 +VisualStudioVersion = 12.0.21005.1 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DataMovement", "lib\DataMovement.csproj", "{B821E031-09CC-48F0-BDC6-2793228D4027}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".nuget", ".nuget", "{E2E6D76F-6339-4E02-96EB-94CC8E6D62B2}" + ProjectSection(SolutionItems) = preProject + .nuget\NuGet.Config = .nuget\NuGet.Config + .nuget\NuGet.exe = .nuget\NuGet.exe + .nuget\NuGet.targets = .nuget\NuGet.targets + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Test", "Test", "{4353D299-C4E9-41FF-BB35-6769BACA424A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DMLibTest", "test\DMLibTest\DMLibTest.csproj", "{2A4656A4-F744-4653-A9D6-15112E9AB352}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DMLibTestCodeGen", "test\DMLibTestCodeGen\DMLibTestCodeGen.csproj", "{7018EE4E-D389-424E-A8DD-F9B4FFDA5194}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MsTestLib", "test\MsTestLib\MsTestLib.csproj", "{AC39B50F-DC27-4411-9ED4-A4A137190ACB}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|Mixed Platforms = Debug|Mixed Platforms + Debug|Win32 = Debug|Win32 + Release|Any CPU = Release|Any CPU + Release|Mixed Platforms = Release|Mixed Platforms + Release|Win32 = Release|Win32 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Win32.ActiveCfg = Debug|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Any CPU.Build.0 = Release|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Win32.ActiveCfg = Release|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Win32.ActiveCfg = Debug|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Any CPU.Build.0 = Release|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Win32.ActiveCfg = Release|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Win32.ActiveCfg = Debug|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Any CPU.Build.0 = Release|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Win32.ActiveCfg = Release|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Win32.ActiveCfg = Debug|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Any CPU.Build.0 = Release|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Mixed Platforms.Build.0 = Release|Any CPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Win32.ActiveCfg = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {2A4656A4-F744-4653-A9D6-15112E9AB352} = {4353D299-C4E9-41FF-BB35-6769BACA424A} + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194} = {4353D299-C4E9-41FF-BB35-6769BACA424A} + {AC39B50F-DC27-4411-9ED4-A4A137190ACB} = {4353D299-C4E9-41FF-BB35-6769BACA424A} + EndGlobalSection +EndGlobal diff --git a/LICENSE b/LICENSE index ad410e11..5761bc66 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,21 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file +The MIT License (MIT) + +Copyright (c) 2015 Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..fb536c37 --- /dev/null +++ b/README.md @@ -0,0 +1,154 @@ +# Microsoft Azure Storage Data Movement Library (0.1.0) + +The Microsoft Azure Storage Data Movement Library designed for high-performance uploading, downloading and copying Azure Storage Blob and File. + +[AzCopy](https://azure.microsoft.com/documentation/articles/storage-use-azcopy/), the Azure Storage data management command line utility, is refering to this library. + +For more information about the Azure Storage, please visit [Microsoft Azure Storage Documentation](https://azure.microsoft.com/documentation/services/storage/). + +# Features + +- Blobs + - Download/Upload/Copy Blobs. + - Synchronous and asynchronous copy Blobs + - Concurrently transfer Blobs and Blob chunks, define number of concurrents + - Download Specific Blob Snapshot + +- Files + - Download/Upload/Copy Files. + - Synchronous and asynchronous copy Files + - Concurrently transfer Files and File ranges, define number of concurrents + +- General + - Track data transfer progress + - Recover the data transfer + - Set Access Condition + - Set User Agent Suffix + +# Getting started + +For the best development experience, we recommend that developers use the official Microsoft NuGet packages for libraries. NuGet packages are regularly updated with new functionality and hotfixes. + + +## Requirements + +To call Azure services, you must first have an Azure subscription. Sign up for a [free trial](/en-us/pricing/free-trial/) or use your [MSDN subscriber benefits](/en-us/pricing/member-offers/msdn-benefits-details/). + + +## Download & Install + + +### Via Git + +To get the source code of the SDK via git just type: + +```bash +git clone https://github.com/Azure/azure-storage-net-data-movement.git +cd azure-storage-net-data-movement +``` + +### Via NuGet + +To get the binaries of this library as distributed by Microsoft, ready for use +within your project you can also have them installed by the .NET package manager [NuGet](http://www.nuget.org/). + +`Install-Package WindowsAzure.Storage.DataMovment` + + +## Dependencies + +### Azure Storage Client Library for .NET + +This version depends on Azure Storage Client Library for .NET. + +- [WindowsAzure.Storage](https://www.nuget.org/packages/WindowsAzure.Storage/) + + + +## Code Samples + +Find more samples at [getting started with Storage Data Movement Library (TBC)]() and the [sample folder (TBC)](). + +### Upload a blob + +First, include the classes you need, here we include Storage client library, the Storage data movement library and the .NET threading because data movement libary provides Task Asynchronous interfaces to transfer storage objects: + +```csharp +using System; +using System.Threading; +using Microsoft.WindowsAzure.Storage; +using Microsoft.WindowsAzure.Storage.Auth; +using Microsoft.WindowsAzure.Storage.Blob; +using Microsoft.WindowsAzure.Storage.DataMovement; +``` + +Now use the interfaces provided by Storage client lib to setup the storage context (find more details at [how to use Blob Storage from .NET](https://azure.microsoft.com/documentation/articles/storage-dotnet-how-to-use-blobs/)): + +```csharp +CloudStorageAccount account = CloudStorageAccount.Parse( + configurationManager.ConnectionStrings["StorageConnectionString"]); +CloudBlobClient blobClient = account.CreateCloudBlobClient(); +CloudBlobContainer blobContainer = blobClient.GetContainerReference("mycontainer"); +blobContainer.CreateIfNotExists(); +string sourcePath = "path\\to\\test.txt"; +CloudBlockBlob destBlob = blobContainer.GetBlockBlobReference("myblob"); +``` + +Once you setup the storage blob context, you can start to use `WindowsAzure.Storage.DataMovement.TransferManager` to upload the blob and track the upload progress, + +```csharp +// Setup the number of the concurrent operations +TransferManager.Configurations.ParallelOperations = 64; +// Setup the transfer context and track the upoload progress +TransferContext context = new TransferContext(); +context.ProgressHandler = new Progress((progress) => +{ + Console.WriteLine("Bytes uploaded: {0}/{1}", + progress.BytesTransferred, progress.TotalSize); +}); +// Upload a local blob +var task = TransferManager.UploadAsync( + sourcePath, destBlob, null, context, CancellationToken.None); +task.Wait(); +``` +# Best Practice + +### Increase .NET HTTP connections limit +By default, the .Net HTTP connection limit is 2. This implies that only two concurrent connections can be maintained. It prevents more parallel connections accessing Azure blob storage from your application. + +AzCopy will set ServicePointManager.DefaultConnectionLimit to the number of eight multiple the core number by default. To have a comparable performance when using Data Movement Library alone, we recommend you set this value as well. + +```csharp +ServicePoint myServicePoint = ServicePointManager.FindServicePoint(myServiceUri); +myServicePoint.ConnectionLimit = 48 +``` + +### Turn off 100-continue +When the property "Expect100Continue" is set to true, client requests that use the PUT and POST methods will add an Expect: 100-continue header to the request and it will expect to receive a 100-Continue response from the server to indicate that the client should send the data to be posted. This mechanism allows clients to avoid sending large amounts of data over the network when the server, based on the request headers, intends to reject the request. + +However, once the entire payload is received on the server end, other errors may still occur. And if Windows Azure clients have tested the client well enough to ensure that it is not sending any bad requests, clients could turn off 100-continue so that the entire request is sent in one roundtrip. This is especially true when clients send small size storage objects. + +```csharp +ServicePointManager.Expect100Continue = false; +``` + +# Need Help? +Be sure to check out the Microsoft Azure [Developer Forums on MSDN](http://go.microsoft.com/fwlink/?LinkId=234489) if you have trouble with the provided code or use StackOverflow. + + +# Collaborate & Contribute + +We gladly accept community contributions. + +- Issues: Please report bugs using the Issues section of GitHub +- Forums: Interact with the development teams on StackOverflow or the Microsoft Azure Forums +- Source Code Contributions: Please follow the [contribution guidelines for Microsoft Azure open source](http://azure.github.io/guidelines.html) that details information on onboarding as a contributor + +For general suggestions about Microsoft Azure please use our [UserVoice forum](http://feedback.azure.com/forums/34192--general-feedback). + + +# Learn More + +- [Storage Data Movement Library API reference (TBC)]() +- [Storage Client Library Reference for .NET - MSDN](http://msdn.microsoft.com/library/azure/dn495001(v=azure.10).aspx) +- [Azure Storage Team Blog](http://blogs.msdn.com/b/windowsazurestorage/) diff --git a/changelog.txt b/changelog.txt new file mode 100644 index 00000000..3d65d23f --- /dev/null +++ b/changelog.txt @@ -0,0 +1,2 @@ +2015.07.17 Version 0.1.0 + * Initial Release diff --git a/lib/AssemblyInfo.cs b/lib/AssemblyInfo.cs new file mode 100644 index 00000000..513b08a9 --- /dev/null +++ b/lib/AssemblyInfo.cs @@ -0,0 +1,15 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Reflection; +using System.Resources; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Microsoft.WindowsAzure.Storage.DataMovement.dll")] +[assembly: AssemblyDescription("")] diff --git a/lib/Constants.cs b/lib/Constants.cs new file mode 100644 index 00000000..0dc9f3d6 --- /dev/null +++ b/lib/Constants.cs @@ -0,0 +1,140 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Reflection; + + /// + /// Constants for use with the transfer classes. + /// + public static class Constants + { + /// + /// Stores the max block size, 4MB. + /// + public const int MaxBlockSize = 4 * 1024 * 1024; + + /// + /// Default block size, 4MB. + /// + public const int DefaultBlockSize = 4 * 1024 * 1024; + + /// + /// Define cache size for one parallel operation. + /// + internal const long CacheSizeMultiplierInByte = 12 * 1024 * 1024; + + /// + /// Default to root container name if none is specified. + /// + internal const string DefaultContainerName = "$root"; + + /// + /// Minimum block size, 256KB. + /// + internal const int MinBlockSize = 256 * 1024; + + /// + /// Stores the max page blob file size, 1TB. + /// + internal const long MaxPageBlobFileSize = (long)1024 * 1024 * 1024 * 1024; + + /// + /// Stores the max block blob file size, 50000 * 4M. + /// + internal const long MaxBlockBlobFileSize = (long)50000 * 4 * 1024 * 1024; + + /// + /// Stores the max cloud file size, 1TB. + /// + internal const long MaxCloudFileSize = (long)1024 * 1024 * 1024 * 1024; + + /// + /// Max transfer window size. + /// There can be multiple threads to transfer a file, + /// and we need to record transfer window + /// and have constant length for a transfer entry record in restart journal, + /// so set a limitation for transfer window here. + /// + internal const int MaxCountInTransferWindow = 128; + + /// + /// Length to get page ranges in one request. + /// In blog http://blogs.msdn.com/b/windowsazurestorage/archive/2012/03/26/getting-the-page-ranges-of-a-large-page-blob-in-segments.aspx, + /// it says that it's safe to get page ranges of 150M in one request. + /// We use 148MB which is multiples of 4MB. + /// + internal const long PageRangesSpanSize = 148 * 1024 * 1024; + + /// + /// Length to get file ranges in one request. + /// Use the same number as page blob for now because cloud file leverages page blob in implementation. + /// TODO: update this number when doc for cloud file is available. + /// + internal const long FileRangeSpanSize = 148 * 1024 * 1024; + + /// + /// Percentage of available we'll try to use for our memory cache. + /// + internal const double MemoryCacheMultiplier = 0.5; + + /// + /// Maximum amount of memory to use for our memory cache. + /// + internal static readonly long MemoryCacheMaximum = GetMemoryCacheMaximum(); + + /// + /// Maximum amount of cells in memory manager. + /// + internal const int MemoryManagerCellsMaximum = 8 * 1024; + + /// + /// The life time in minutes of SAS auto generated for blob to blob copy. + /// + internal const int CopySASLifeTimeInMinutes = 7 * 24 * 60; + + /// + /// The time in milliseconds to wait to refresh copy status for asynchronous copy. + /// + internal const long AsyncCopyStatusRefreshWaitTimeInMilliseconds = 100; + + internal const string BlobTypeMismatch = "Blob type of the blob reference doesn't match blob type of the blob."; + + /// + /// The product name used in UserAgent header. + /// + internal const string UserAgentProductName = "DataMovement"; + + /// + /// UserAgent header. + /// + internal static readonly string UserAgent = GetUserAgent(); + + internal static readonly string FormatVersion = GetFormatVersion(); + + /// + /// Gets the UserAgent string. + /// + /// UserAgent string. + private static string GetUserAgent() + { + AssemblyName assemblyName = Assembly.GetExecutingAssembly().GetName(); + return UserAgentProductName + "/" + assemblyName.Version.ToString(); + } + + private static string GetFormatVersion() + { + AssemblyName assemblyName = Assembly.GetExecutingAssembly().GetName(); + return assemblyName.Name + "/" + assemblyName.Version.ToString(); + } + + private static long GetMemoryCacheMaximum() + { + return Environment.Is64BitProcess ? (long)2 * 1024 * 1024 * 1024 : (long)512 * 1024 * 1024; + } + } +} diff --git a/lib/DataMovement.csproj b/lib/DataMovement.csproj new file mode 100644 index 00000000..bdd9ec6d --- /dev/null +++ b/lib/DataMovement.csproj @@ -0,0 +1,170 @@ + + + + + Debug + AnyCPU + {B821E031-09CC-48F0-BDC6-2793228D4027} + Library + Properties + Microsoft.WindowsAzure.Storage.DataMovement + Microsoft.WindowsAzure.Storage.DataMovement + v4.5 + 512 + ..\ + + true + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + false + ..\tools\analysis\fxcop\azure-storage-dm.ruleset + false + true + bin\Debug\Microsoft.WindowsAzure.Storage.DataMovement.XML + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + false + ..\tools\analysis\fxcop\azure-storage-dm.ruleset + true + true + true + + + true + true + ..\tools\strongnamekeys\fake\windows.snk + + + + False + ..\packages\Microsoft.Data.Edm.5.6.4\lib\net40\Microsoft.Data.Edm.dll + + + False + ..\packages\Microsoft.Data.OData.5.6.4\lib\net40\Microsoft.Data.OData.dll + + + False + ..\packages\Microsoft.Data.Services.Client.5.6.4\lib\net40\Microsoft.Data.Services.Client.dll + + + False + ..\packages\Microsoft.WindowsAzure.ConfigurationManager.1.8.0.0\lib\net35-full\Microsoft.WindowsAzure.Configuration.dll + + + False + ..\packages\WindowsAzure.Storage.5.0.0\lib\net40\Microsoft.WindowsAzure.Storage.dll + + + False + ..\packages\Newtonsoft.Json.6.0.8\lib\net45\Newtonsoft.Json.dll + + + + + + + False + ..\packages\System.Spatial.5.6.4\lib\net40\System.Spatial.dll + + + + + SharedAssemblyInfo.cs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + True + True + Resources.resx + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ResXFileCodeGenerator + Resources.Designer.cs + Designer + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + \ No newline at end of file diff --git a/lib/Exceptions/TransferErrorCode.cs b/lib/Exceptions/TransferErrorCode.cs new file mode 100644 index 00000000..562b30f1 --- /dev/null +++ b/lib/Exceptions/TransferErrorCode.cs @@ -0,0 +1,104 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + + /// + /// Error codes for TransferException. + /// + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1027:MarkEnumsWithFlags")] + public enum TransferErrorCode + { + /// + /// No error. + /// + None = 0, + + /// + /// Invalid source location specified. + /// + InvalidSourceLocation = 1, + + /// + /// Invalid destination location specified. + /// + InvalidDestinationLocation = 2, + + /// + /// Failed to open file for upload or download. + /// + OpenFileFailed = 3, + + /// + /// The file to transfer is too large for the destination. + /// + UploadSourceFileSizeTooLarge = 4, + + /// + /// The file size is invalid for the specified blob type. + /// + UploadBlobSourceFileSizeInvalid = 5, + + /// + /// User canceled. + /// + OperationCanceled = 6, + + /// + /// Both Source and Destination are locally accessible locations. + /// At least one of source and destination should be an Azure Storage location. + /// + LocalToLocalTransfersUnsupported = 7, + + /// + /// Failed to do asynchronous copy. + /// + AsyncCopyFailed = 8, + + /// + /// Source and destination are the same. + /// + SameSourceAndDestination = 9, + + /// + /// AsyncCopyController detects mismatch between copy id stored in transfer entry and + /// that retrieved from server. + /// + MismatchCopyId = 10, + + /// + /// AsyncCopyControler fails to retrieve CopyState for the object which we are to monitor. + /// + FailToRetrieveCopyStateForObject = 11, + + /// + /// Fails to allocate memory in MemoryManager. + /// + FailToAllocateMemory = 12, + + /// + /// Fails to get source's last write time. + /// + FailToGetSourceLastWriteTime = 13, + + /// + /// User choose not to overwrite existing destination. + /// + NotOverwriteExistingDestination = 14, + + /// + /// Transfer with the same source and destination already exists. + /// + TransferAlreadyExists = 15, + + /// + /// Uncategorized transfer error. + /// + Unknown = 32, + } +} diff --git a/lib/Exceptions/TransferException.cs b/lib/Exceptions/TransferException.cs new file mode 100644 index 00000000..07cfa787 --- /dev/null +++ b/lib/Exceptions/TransferException.cs @@ -0,0 +1,152 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Runtime.Serialization; + + /// + /// Base exception class for exceptions thrown by Blob/FileTransferJobs. + /// + [Serializable] + public sealed class TransferException : Exception + { + /// + /// Version of current TransferException serialization format. + /// + private const int ExceptionVersion = 1; + + /// + /// Serialization field name for Version. + /// + private const string VersionFieldName = "Version"; + + /// + /// Serialization field name for ErrorCode. + /// + private const string ErrorCodeFieldName = "ErrorCode"; + + /// + /// Transfer error code. + /// + private TransferErrorCode errorCode; + + /// + /// Initializes a new instance of the class. + /// + public TransferException() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public TransferException(string message) + : base(message) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference + /// if no inner exception is specified. + public TransferException(string message, Exception ex) + : base(message, ex) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Transfer error code. + public TransferException(TransferErrorCode errorCode) + { + this.errorCode = errorCode; + } + + /// + /// Initializes a new instance of the class. + /// + /// Transfer error code. + /// Exception message. + public TransferException( + TransferErrorCode errorCode, + string message) + : base(message) + { + this.errorCode = errorCode; + } + + /// + /// Initializes a new instance of the class. + /// + /// Transfer error code. + /// Exception message. + /// Inner exception. + public TransferException( + TransferErrorCode errorCode, + string message, + Exception innerException) + : base(message, innerException) + { + this.errorCode = errorCode; + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + private TransferException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + int exceptionVersion = info.GetInt32(VersionFieldName); + + if (exceptionVersion >= 1) + { + this.errorCode = (TransferErrorCode)info.GetInt32(ErrorCodeFieldName); + } + } + + /// + /// Gets the detailed error code. + /// + /// The error code of the exception. + public TransferErrorCode ErrorCode + { + get + { + return this.errorCode; + } + } + + /// + /// Serializes the exception. + /// + /// Serialization info object. + /// Streaming context. + public override void GetObjectData( + SerializationInfo info, + StreamingContext context) + { + if (null == info) + { + throw new ArgumentNullException("info"); + } + + info.AddValue(VersionFieldName, ExceptionVersion); + info.AddValue(ErrorCodeFieldName, this.errorCode); + + base.GetObjectData(info, context); + } + } +} diff --git a/lib/Extensions/CloudBlobExtensions.cs b/lib/Extensions/CloudBlobExtensions.cs new file mode 100644 index 00000000..0f65f070 --- /dev/null +++ b/lib/Extensions/CloudBlobExtensions.cs @@ -0,0 +1,158 @@ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.IO; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement.TransferJobs; + + /// + /// Defines extensions methods for ICloudBlob for use with BlobTransfer. + /// + public static class CloudBlobExtensions + { + /// + /// Creates a job to start copying from a blob. + /// + /// Destination blob to copy to. + /// User should call the method on this object. + /// Source blob to copy from. + /// Job object to start copying. + public static BlobStartCopyJob CreateStartCopyJob( + this ICloudBlob destBlob, + ICloudBlob sourceBlob) + { + return new BlobStartCopyJob() + { + SourceBlob = sourceBlob, + DestBlob = destBlob + }; + } + + /// + /// Creates a job to start copying from a URI source. + /// + /// Destination blob to copy to. + /// User should call the method on this object. + /// Source to copy from. + /// Job object to start copying. + public static BlobStartCopyJob CreateStartCopyJob( + this ICloudBlob destBlob, + Uri sourceUri) + { + return new BlobStartCopyJob() + { + SourceUri = sourceUri, + DestBlob = destBlob + }; + } + + /// + /// Creates a job to copy from a blob. + /// + /// Destination blob to copy to. + /// User should call the method on this object. + /// Source blob to copy from. + /// Job object to do copying. + public static BlobCopyJob CreateCopyJob( + this ICloudBlob destBlob, + ICloudBlob sourceBlob) + { + return new BlobCopyJob() + { + SourceBlob = sourceBlob, + DestBlob = destBlob + }; + } + + /// + /// Creates a job to copy from a URI source. + /// + /// Destination blob to copy to. + /// User should call the method on this object. + /// Source to copy from. + /// Job object to do copying. + public static BlobCopyJob CreateCopyJob( + this ICloudBlob destBlob, + Uri sourceUri) + { + return new BlobCopyJob() + { + SourceUri = sourceUri, + DestBlob = destBlob + }; + } + + /// + /// Creates a job to download a blob. + /// + /// Source blob that to be downloaded. + /// Path of destination to download to. + /// Job instance to download blob. + public static BlobDownloadJob CreateDownloadJob( + this ICloudBlob sourceBlob, + string destPath) + { + return new BlobDownloadJob() + { + SourceBlob = sourceBlob, + DestPath = destPath + }; + } + + /// + /// Creates a job to download a blob. + /// + /// Source blob that to be downloaded. + /// Destination stream to download to. + /// Job instance to download blob. + public static BlobDownloadJob CreateDownloadJob( + this ICloudBlob sourceBlob, + Stream destStream) + { + return new BlobDownloadJob() + { + SourceBlob = sourceBlob, + DestStream = destStream + }; + } + + /// + /// Creates a job to upload a blob. + /// + /// Destination blob to upload to. + /// Path of source file to upload from. + /// Job instance to upload blob. + public static BlobUploadJob CreateUploadJob( + this ICloudBlob destBlob, + string sourcePath) + { + return new BlobUploadJob() + { + SourcePath = sourcePath, + DestBlob = destBlob + }; + } + + /// + /// Creates a job to upload a blob. + /// + /// Destination blob to upload to. + /// Path of source file to upload from. + /// Job instance to upload blob. + public static BlobUploadJob CreateUploadJob( + this ICloudBlob destBlob, + Stream sourceStream) + { + return new BlobUploadJob() + { + SourceStream = sourceStream, + DestBlob = destBlob + }; + } + } +} diff --git a/lib/Extensions/CloudFileExtensions.cs b/lib/Extensions/CloudFileExtensions.cs new file mode 100644 index 00000000..e4a701c5 --- /dev/null +++ b/lib/Extensions/CloudFileExtensions.cs @@ -0,0 +1,100 @@ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.IO; + using Microsoft.WindowsAzure.Storage.DataMovement.TransferJobs; + using Microsoft.WindowsAzure.Storage.File; + + /// + /// Defines extensions methods for CloudFile to create FileTransferJobs. + /// + public static class CloudFileExtensions + { + /// + /// Creates a job to download a cloud file. + /// + /// Source file that to be downloaded. + /// Path of destination to download to. + /// Job instance to download file. + public static FileDownloadJob CreateDownloadJob( + this CloudFile sourceFile, + string destPath) + { + return new FileDownloadJob() + { + SourceFile = sourceFile, + DestPath = destPath + }; + } + + /// + /// Creates a job to download a cloud file. + /// + /// Source file that to be downloaded. + /// Destination stream to download to. + /// Job instance to download file. + public static FileDownloadJob CreateDownloadJob( + this CloudFile sourceFile, + Stream destStream) + { + return new FileDownloadJob() + { + SourceFile = sourceFile, + DestStream = destStream + }; + } + + /// + /// Creates a job to upload a cloud file. + /// + /// Destination file to upload to. + /// Path of source file to upload from. + /// Job instance to upload file. + public static FileUploadJob CreateUploadJob( + this CloudFile destFile, + string sourcePath) + { + return new FileUploadJob() + { + DestFile = destFile, + SourcePath = sourcePath + }; + } + + /// + /// Creates a job to upload a cloud file. + /// + /// Destination file to upload to. + /// Path of source file to upload from. + /// Job instance to upload file. + public static FileUploadJob CreateUploadJob( + this CloudFile destFile, + Stream sourceStream) + { + return new FileUploadJob() + { + DestFile = destFile, + SourceStream = sourceStream + }; + } + + /// + /// Creates a job to delete a cloud file. + /// + /// File to delete. + /// Job instance to delete file. + public static FileDeleteJob CreateDeleteJob( + this CloudFile fileToDelete) + { + return new FileDeleteJob() + { + File = fileToDelete + }; + } + } +} diff --git a/lib/Extensions/StorageExtensions.cs b/lib/Extensions/StorageExtensions.cs new file mode 100644 index 00000000..d177cf3e --- /dev/null +++ b/lib/Extensions/StorageExtensions.cs @@ -0,0 +1,189 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Globalization; + using Microsoft.WindowsAzure.Storage.Auth; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + + /// + /// Extension methods for CloudBlobs for use with BlobTransfer. + /// + internal static class StorageExtensions + { + /// + /// Determines whether two blobs have the same Uri and SnapshotTime. + /// + /// Blob to compare. + /// Comparand object. + /// True if the two blobs have the same Uri and SnapshotTime; otherwise, false. + internal static bool Equals( + CloudBlob blob, + CloudBlob comparand) + { + if (blob == comparand) + { + return true; + } + + if (null == blob || null == comparand) + { + return false; + } + + return blob.Uri.Equals(comparand.Uri) && + blob.SnapshotTime.Equals(comparand.SnapshotTime); + } + + internal static CloudFile GenerateCopySourceFile( + this CloudFile file) + { + if (null == file) + { + throw new ArgumentNullException("file"); + } + + string sasToken = GetFileSASToken(file); + + if (string.IsNullOrEmpty(sasToken)) + { + return file; + } + + return new CloudFile(file.Uri, new StorageCredentials(sasToken)); + } + + private static string GetFileSASToken(CloudFile file) + { + if (null == file.ServiceClient.Credentials + || file.ServiceClient.Credentials.IsAnonymous) + { + return string.Empty; + } + else if (file.ServiceClient.Credentials.IsSAS) + { + return file.ServiceClient.Credentials.SASToken; + } + + // SAS life time is at least 10 minutes. + TimeSpan sasLifeTime = TimeSpan.FromMinutes(Constants.CopySASLifeTimeInMinutes); + + SharedAccessFilePolicy policy = new SharedAccessFilePolicy() + { + SharedAccessExpiryTime = DateTime.Now.Add(sasLifeTime), + Permissions = SharedAccessFilePermissions.Read, + }; + + return file.GetSharedAccessSignature(policy); + } + + /// + /// Append an auto generated SAS to a blob uri. + /// + /// Blob to append SAS. + /// Blob Uri with SAS appended. + internal static CloudBlob GenerateCopySourceBlob( + this CloudBlob blob) + { + if (null == blob) + { + throw new ArgumentNullException("blob"); + } + + string sasToken = GetBlobSasToken(blob); + + if (string.IsNullOrEmpty(sasToken)) + { + return blob; + } + + Uri blobUri = null; + + if (blob.IsSnapshot) + { + blobUri = blob.SnapshotQualifiedUri; + } + else + { + blobUri = blob.Uri; + } + + return Utils.GetBlobReference(blobUri, new StorageCredentials(sasToken), blob.BlobType); + } + + /// + /// Append an auto generated SAS to a blob uri. + /// + /// Blob to append SAS. + /// Blob Uri with SAS appended. + internal static Uri GenerateUriWithCredentials( + this CloudBlob blob) + { + if (null == blob) + { + throw new ArgumentNullException("blob"); + } + + string sasToken = GetBlobSasToken(blob); + + if (string.IsNullOrEmpty(sasToken)) + { + return blob.SnapshotQualifiedUri; + } + + string uriStr = null; + + if (blob.IsSnapshot) + { + uriStr = string.Format(CultureInfo.InvariantCulture, "{0}&{1}", blob.SnapshotQualifiedUri.AbsoluteUri, sasToken.Substring(1)); + } + else + { + uriStr = string.Format(CultureInfo.InvariantCulture, "{0}{1}", blob.Uri.AbsoluteUri, sasToken); + } + + return new Uri(uriStr); + } + + private static string GetBlobSasToken(CloudBlob blob) + { + if (null == blob.ServiceClient.Credentials + || blob.ServiceClient.Credentials.IsAnonymous) + { + return string.Empty; + } + else if (blob.ServiceClient.Credentials.IsSAS) + { + return blob.ServiceClient.Credentials.SASToken; + } + + // SAS life time is at least 10 minutes. + TimeSpan sasLifeTime = TimeSpan.FromMinutes(Constants.CopySASLifeTimeInMinutes); + + SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy() + { + SharedAccessExpiryTime = DateTime.Now.Add(sasLifeTime), + Permissions = SharedAccessBlobPermissions.Read, + }; + + CloudBlob rootBlob = null; + + if (!blob.IsSnapshot) + { + rootBlob = blob; + } + else + { + rootBlob = Utils.GetBlobReference(blob.Uri, blob.ServiceClient.Credentials, blob.BlobType); + } + + return rootBlob.GetSharedAccessSignature(policy); + } + } +} diff --git a/lib/GlobalMemoryStatusNativeMethods.cs b/lib/GlobalMemoryStatusNativeMethods.cs new file mode 100644 index 00000000..50d35491 --- /dev/null +++ b/lib/GlobalMemoryStatusNativeMethods.cs @@ -0,0 +1,52 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System.Runtime.InteropServices; + + internal class GlobalMemoryStatusNativeMethods + { + private MEMORYSTATUSEX memStatus; + + public GlobalMemoryStatusNativeMethods() + { + this.memStatus = new MEMORYSTATUSEX(); + if (GlobalMemoryStatusEx(this.memStatus)) + { + this.AvailablePhysicalMemory = this.memStatus.ullAvailPhys; + } + } + + public ulong AvailablePhysicalMemory + { + get; + private set; + } + + [return: MarshalAs(UnmanagedType.Bool)] + [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)] + private static extern bool GlobalMemoryStatusEx([In, Out] MEMORYSTATUSEX lpBuffer); + + [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)] + private class MEMORYSTATUSEX + { + public uint dwLength; + public uint dwMemoryLoad; + public ulong ullTotalPhys; + public ulong ullAvailPhys; + public ulong ullTotalPageFile; + public ulong ullAvailPageFile; + public ulong ullTotalVirtual; + public ulong ullAvailVirtual; + public ulong ullAvailExtendedVirtual; + + public MEMORYSTATUSEX() + { + this.dwLength = (uint)Marshal.SizeOf(typeof(MEMORYSTATUSEX)); + } + } + } +} diff --git a/lib/MD5HashStream.cs b/lib/MD5HashStream.cs new file mode 100644 index 00000000..c0784b40 --- /dev/null +++ b/lib/MD5HashStream.cs @@ -0,0 +1,462 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Diagnostics; + using System.Diagnostics.CodeAnalysis; + using System.Globalization; + using System.IO; + using System.Security.Cryptography; + using System.Threading; + using System.Threading.Tasks; + + /// + /// Class to make thread safe stream access and calculate MD5 hash. + /// + internal class MD5HashStream : IDisposable + { + /// + /// Stream object. + /// + private Stream stream; + + /// + /// Semaphore object. In our case, we can only have one operation at the same time. + /// + private SemaphoreSlim semaphore; + + /// + /// In restart mode, we start a separate thread to calculate MD5hash of transferred part. + /// This variable indicates whether finished to calculate this part of MD5hash. + /// + private volatile bool finishedSeparateMd5Calculator = false; + + /// + /// Indicates whether succeeded in calculating MD5hash of the transferred bytes. + /// + private bool succeededSeparateMd5Calculator = false; + + /// + /// Running md5 hash of the blob being downloaded. + /// + private MD5CryptoServiceProvider md5hash; + + /// + /// Offset of the transferred bytes. We should calculate MD5hash on all bytes before this offset. + /// + private long md5hashOffset; + + /// + /// Initializes a new instance of the class. + /// + /// Stream object. + /// Offset of the transferred bytes. + /// Whether need to calculate MD5Hash. + public MD5HashStream( + Stream stream, + long lastTransferOffset, + bool md5hashCheck) + { + this.stream = stream; + this.md5hashOffset = lastTransferOffset; + + if ((0 == this.md5hashOffset) + || (!md5hashCheck)) + { + this.finishedSeparateMd5Calculator = true; + this.succeededSeparateMd5Calculator = true; + } + else + { + this.semaphore = new SemaphoreSlim(1, 1); + } + + if (md5hashCheck) + { + this.md5hash = new MD5CryptoServiceProvider(); + } + + if ((!this.finishedSeparateMd5Calculator) + && (!this.stream.CanRead)) + { + throw new NotSupportedException(string.Format( + CultureInfo.CurrentCulture, + Resources.StreamMustSupportReadException, + "Stream")); + } + + if (!this.stream.CanSeek) + { + throw new NotSupportedException(string.Format( + CultureInfo.CurrentCulture, + Resources.StreamMustSupportSeekException, + "Stream")); + } + } + + /// + /// Gets a value indicating whether need to calculate MD5 hash. + /// + public bool CheckMd5Hash + { + get + { + return null != this.md5hash; + } + } + + /// + /// Gets MD5 hash bytes. + /// + public byte[] Hash + { + get + { + return null == this.md5hash ? null : this.md5hash.Hash; + } + } + + /// + /// Gets a value indicating whether already finished to calculate MD5 hash of transferred bytes. + /// + public bool FinishedSeparateMd5Calculator + { + get + { + return this.finishedSeparateMd5Calculator; + } + } + + /// + /// Gets a value indicating whether already succeeded in calculating MD5 hash of transferred bytes. + /// + public bool SucceededSeparateMd5Calculator + { + get + { + this.WaitMD5CalculationToFinish(); + return this.succeededSeparateMd5Calculator; + } + } + + /// + /// Calculate MD5 hash of transferred bytes. + /// + /// Reference to MemoryManager object to require buffer from. + /// Action to check whether to cancel this calculation. + public void CalculateMd5(MemoryManager memoryManager, Action checkCancellation) + { + if (null == this.md5hash) + { + return; + } + + byte[] buffer = null; + + try + { + buffer = Utils.RequireBuffer(memoryManager, checkCancellation); + } + catch (Exception) + { + lock (this.md5hash) + { + this.finishedSeparateMd5Calculator = true; + } + + throw; + } + + long offset = 0; + int readLength = 0; + + while (true) + { + lock (this.md5hash) + { + if (offset >= this.md5hashOffset) + { + Debug.Assert( + offset == this.md5hashOffset, + "We should stop the separate calculator thread just at the transferred offset"); + + this.succeededSeparateMd5Calculator = true; + this.finishedSeparateMd5Calculator = true; + break; + } + + readLength = (int)Math.Min(this.md5hashOffset - offset, buffer.Length); + } + + try + { + checkCancellation(); + readLength = this.Read(offset, buffer, 0, readLength); + + lock (this.md5hash) + { + this.md5hash.TransformBlock(buffer, 0, readLength, null, 0); + } + } + catch (Exception) + { + lock (this.md5hash) + { + this.finishedSeparateMd5Calculator = true; + } + + memoryManager.ReleaseBuffer(buffer); + + throw; + } + + offset += readLength; + } + + memoryManager.ReleaseBuffer(buffer); + } + + /// + /// Begin async read from stream. + /// + /// Offset in stream to read from. + /// The buffer to read the data into. + /// The byte offset in buffer at which to begin writing data read from the stream. + /// The maximum number of bytes to read. + /// Token used to cancel the asynchronous reading. + /// A task that represents the asynchronous read operation. The value of the + /// TResult parameter contains the total number of bytes read into the buffer. + public async Task ReadAsync(long readOffset, byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + await this.WaitOnSemaphoreAsync(cancellationToken); + + try + { + this.stream.Position = readOffset; + + return await this.stream.ReadAsync( + buffer, + offset, + count, + cancellationToken); + } + finally + { + this.ReleaseSemaphore(); + } + } + + /// + /// Begin async write to stream. + /// + /// Offset in stream to write to. + /// The buffer to write the data from. + /// The byte offset in buffer from which to begin writing. + /// The maximum number of bytes to write. + /// Token used to cancel the asynchronous writing. + /// A task that represents the asynchronous write operation. + public async Task WriteAsync(long writeOffset, byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + await this.WaitOnSemaphoreAsync(cancellationToken); + + try + { + this.stream.Position = writeOffset; + await this.stream.WriteAsync( + buffer, + offset, + count, + cancellationToken); + } + finally + { + this.ReleaseSemaphore(); + } + } + + /// + /// Computes the hash value for the specified region of the input byte array + /// and copies the specified region of the input byte array to the specified + /// region of the output byte array. + /// + /// Offset in stream of the block on which to calculate MD5 hash. + /// The input to compute the hash code for. + /// The offset into the input byte array from which to begin using data. + /// The number of bytes in the input byte array to use as data. + /// A copy of the part of the input array used to compute the hash code. + /// The offset into the output byte array from which to begin writing data. + /// Whether succeeded in calculating MD5 hash + /// or not finished the separate thread to calculate MD5 hash at the time. + public bool MD5HashTransformBlock(long streamOffset, byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) + { + if (null == this.md5hash) + { + return true; + } + + if (!this.finishedSeparateMd5Calculator) + { + lock (this.md5hash) + { + if (!this.finishedSeparateMd5Calculator) + { + if (streamOffset == this.md5hashOffset) + { + this.md5hashOffset += inputCount; + } + + return true; + } + else + { + if (!this.succeededSeparateMd5Calculator) + { + return false; + } + } + } + } + + if (streamOffset >= this.md5hashOffset) + { + Debug.Assert( + this.finishedSeparateMd5Calculator, + "The separate thread to calculate MD5 hash should have finished or md5hashOffset should get updated."); + + this.md5hash.TransformBlock(inputBuffer, inputOffset, inputCount, outputBuffer, outputOffset); + } + + return true; + } + + /// + /// Computes the hash value for the specified region of the specified byte array. + /// + /// The input to compute the hash code for. + /// The offset into the byte array from which to begin using data. + /// The number of bytes in the byte array to use as data. + /// An array that is a copy of the part of the input that is hashed. + public byte[] MD5HashTransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) + { + this.WaitMD5CalculationToFinish(); + + if (!this.succeededSeparateMd5Calculator) + { + return null; + } + + return null == this.md5hash ? null : this.md5hash.TransformFinalBlock(inputBuffer, inputOffset, inputCount); + } + + /// + /// Releases or resets unmanaged resources. + /// + public virtual void Dispose() + { + this.Dispose(true); + } + + /// + /// Private dispose method to release managed/unmanaged objects. + /// If disposing = true clean up managed resources as well as unmanaged resources. + /// If disposing = false only clean up unmanaged resources. + /// + /// Indicates whether or not to dispose managed resources. + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + if (null != this.md5hash) + { + this.md5hash.Clear(); + this.md5hash = null; + } + + if (null != this.semaphore) + { + this.semaphore.Dispose(); + this.semaphore = null; + } + } + } + + /// + /// Read from stream. + /// + /// Offset in stream to read from. + /// An array of bytes. When this method returns, the buffer contains the specified + /// byte array with the values between offset and (offset + count - 1) replaced + /// by the bytes read from the current source. + /// The zero-based byte offset in buffer at which to begin storing the data read from the current stream. + /// The maximum number of bytes to be read from the current stream. + /// The total number of bytes read into the buffer. + private int Read(long readOffset, byte[] buffer, int offset, int count) + { + if (!this.finishedSeparateMd5Calculator) + { + this.semaphore.Wait(); + } + + try + { + this.stream.Position = readOffset; + int readBytes = this.stream.Read(buffer, offset, count); + + return readBytes; + } + finally + { + this.ReleaseSemaphore(); + } + } + + /// + /// Wait for one semaphore. + /// + /// Token used to cancel waiting on the semaphore. + private async Task WaitOnSemaphoreAsync(CancellationToken cancellationToken) + { + if (!this.finishedSeparateMd5Calculator) + { + await this.semaphore.WaitAsync(cancellationToken); + } + } + + /// + /// Release semaphore. + /// + private void ReleaseSemaphore() + { + if (!this.finishedSeparateMd5Calculator) + { + this.semaphore.Release(); + } + } + + /// + /// Wait for MD5 calculation to be finished. + /// In our test, MD5 calculation is really fast, + /// and SpinOnce has sleep mechanism, so use Spin instead of sleep here. + /// + private void WaitMD5CalculationToFinish() + { + if (this.finishedSeparateMd5Calculator) + { + return; + } + + SpinWait sw = new SpinWait(); + + while (!this.finishedSeparateMd5Calculator) + { + sw.SpinOnce(); + } + + sw.Reset(); + } + } +} diff --git a/lib/MemoryManager.cs b/lib/MemoryManager.cs new file mode 100644 index 00000000..04ec40f3 --- /dev/null +++ b/lib/MemoryManager.cs @@ -0,0 +1,139 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Concurrent; + + /// + /// Class for maintaining a pool of memory buffer objects. + /// + internal class MemoryManager + { + private MemoryPool memoryPool; + + public MemoryManager( + long capacity, int bufferSize) + { + long availableCells = capacity / bufferSize; + + int cellNumber = (int)Math.Min((long)Constants.MemoryManagerCellsMaximum, availableCells); + + this.memoryPool = new MemoryPool(cellNumber, bufferSize); + } + + public byte[] RequireBuffer() + { + return this.memoryPool.GetBuffer(); + } + + public void ReleaseBuffer(byte[] buffer) + { + this.memoryPool.AddBuffer(buffer); + } + + private class MemoryPool + { + public readonly int BufferSize; + + private int availableCells; + private int allocatedCells; + private object cellsListLock; + private MemoryCell cellsListHeadCell; + private ConcurrentDictionary cellsInUse; + + public MemoryPool(int cellsCount, int bufferSize) + { + this.BufferSize = bufferSize; + + this.availableCells = cellsCount; + this.allocatedCells = 0; + this.cellsListLock = new object(); + this.cellsListHeadCell = null; + this.cellsInUse = new ConcurrentDictionary(); + } + + public byte[] GetBuffer() + { + if (this.availableCells > 0) + { + MemoryCell retCell = null; + + lock (this.cellsListLock) + { + if (this.availableCells > 0) + { + if (null != this.cellsListHeadCell) + { + retCell = this.cellsListHeadCell; + this.cellsListHeadCell = retCell.NextCell; + retCell.NextCell = null; + } + else + { + retCell = new MemoryCell(this.BufferSize); + ++this.allocatedCells; + } + + --this.availableCells; + } + } + + if (null != retCell) + { + this.cellsInUse.TryAdd(retCell.Buffer, retCell); + return retCell.Buffer; + } + } + + return null; + } + + public void AddBuffer(byte[] buffer) + { + if (null == buffer) + { + throw new ArgumentNullException("buffer"); + } + + MemoryCell cell; + if (this.cellsInUse.TryRemove(buffer, out cell)) + { + lock (this.cellsListLock) + { + cell.NextCell = this.cellsListHeadCell; + this.cellsListHeadCell = cell; + ++this.availableCells; + } + } + } + } + + private class MemoryCell + { + private byte[] buffer; + + public MemoryCell(int size) + { + this.buffer = new byte[size]; + } + + public MemoryCell NextCell + { + get; + set; + } + + public byte[] Buffer + { + get + { + return this.buffer; + } + } + } + } +} diff --git a/lib/OverwriteCallback.cs b/lib/OverwriteCallback.cs new file mode 100644 index 00000000..a3f70e65 --- /dev/null +++ b/lib/OverwriteCallback.cs @@ -0,0 +1,17 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + /// + /// Callback invoked to tell whether to overwrite an existing destination + /// + /// Path of the source file used to overwrite the destination. + /// Path of the file to be overwritten. + /// True if the file should be overwritten; otherwise false. + public delegate bool OverwriteCallback( + string sourcePath, + string destinationPath); +} diff --git a/lib/Resources.Designer.cs b/lib/Resources.Designer.cs new file mode 100644 index 00000000..e7e704fa --- /dev/null +++ b/lib/Resources.Designer.cs @@ -0,0 +1,569 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// Runtime Version:4.0.30319.42000 +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement { + using System; + + + /// + /// A strongly-typed resource class, for looking up localized strings, etc. + /// + // This class was auto-generated by the StronglyTypedResourceBuilder + // class via a tool like ResGen or Visual Studio. + // To add or remove a member, edit your .ResX file then rerun ResGen + // with the /str option, or rebuild your VS project. + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + internal class Resources { + + private static global::System.Resources.ResourceManager resourceMan; + + private static global::System.Globalization.CultureInfo resourceCulture; + + [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] + internal Resources() { + } + + /// + /// Returns the cached ResourceManager instance used by this class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Resources.ResourceManager ResourceManager { + get { + if (object.ReferenceEquals(resourceMan, null)) { + global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Microsoft.WindowsAzure.Storage.DataMovement.Resources", typeof(Resources).Assembly); + resourceMan = temp; + } + return resourceMan; + } + } + + /// + /// Overrides the current thread's CurrentUICulture property for all + /// resource lookups using this strongly typed resource class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Globalization.CultureInfo Culture { + get { + return resourceCulture; + } + set { + resourceCulture = value; + } + } + + /// + /// Looks up a localized string similar to AppendBlob. + /// + internal static string AppendBlob { + get { + return ResourceManager.GetString("AppendBlob", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Copying from File Storage to append Blob Storage asynchronously is not supported.. + /// + internal static string AsyncCopyFromFileToAppendBlobNotSupportException { + get { + return ResourceManager.GetString("AsyncCopyFromFileToAppendBlobNotSupportException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Copying from File Storage to page Blob Storage asynchronously is not supported.. + /// + internal static string AsyncCopyFromFileToPageBlobNotSupportException { + get { + return ResourceManager.GetString("AsyncCopyFromFileToPageBlobNotSupportException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to File size {0} is invalid for {1}, must be a multiple of {2}.. + /// + internal static string BlobFileSizeInvalidException { + get { + return ResourceManager.GetString("BlobFileSizeInvalidException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to File size {0} is larger than {1} maximum size {2}.. + /// + internal static string BlobFileSizeTooLargeException { + get { + return ResourceManager.GetString("BlobFileSizeTooLargeException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The blob transfer has been cancelled.. + /// + internal static string BlobTransferCancelledException { + get { + return ResourceManager.GetString("BlobTransferCancelledException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to BlockBlob. + /// + internal static string BlockBlob { + get { + return ResourceManager.GetString("BlockBlob", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to BlockSize must be between {0} and {1}.. + /// + internal static string BlockSizeOutOfRangeException { + get { + return ResourceManager.GetString("BlockSizeOutOfRangeException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Cannot deserialize to TransferLocation when its TransferLocationType is {0}.. + /// + internal static string CannotDeserializeLocationType { + get { + return ResourceManager.GetString("CannotDeserializeLocationType", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The TransferLocation cannot be serialized when it represents a stream location.. + /// + internal static string CannotSerializeStreamLocation { + get { + return ResourceManager.GetString("CannotSerializeStreamLocation", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Destination of asynchronous copying must be File Storage or Blob Storage.. + /// + internal static string CanOnlyCopyToFileOrBlobException { + get { + return ResourceManager.GetString("CanOnlyCopyToFileOrBlobException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to File size {0} is larger than cloud file maximum size {1} bytes.. + /// + internal static string CloudFileSizeTooLargeException { + get { + return ResourceManager.GetString("CloudFileSizeTooLargeException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0} Deserialization failed: Version number doesn't match. Version number:{1}, expect:{2}.. + /// + internal static string DeserializationVersionNotMatchException { + get { + return ResourceManager.GetString("DeserializationVersionNotMatchException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to User specified blob type does not match the blob type of the existing destination blob.. + /// + internal static string DestinationBlobTypeNotMatch { + get { + return ResourceManager.GetString("DestinationBlobTypeNotMatch", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Destination might be changed by other process or application.. + /// + internal static string DestinationChangedException { + get { + return ResourceManager.GetString("DestinationChangedException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Destination must be a base blob.. + /// + internal static string DestinationMustBeBaseBlob { + get { + return ResourceManager.GetString("DestinationMustBeBaseBlob", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The MD5 hash calculated from the downloaded data does not match the MD5 hash stored in the property of source: {0}. Please refer to help or documentation for detail. + ///MD5 calculated: {1} + ///MD5 in property: {2}. + /// + internal static string DownloadedMd5MismatchException { + get { + return ResourceManager.GetString("DownloadedMd5MismatchException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Failed to allocate required memory.. + /// + internal static string FailedToAllocateMemoryException { + get { + return ResourceManager.GetString("FailedToAllocateMemoryException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Failed to copy from "{0}" to "{1}". Copy status: {2}; Description: {3}.. + /// + internal static string FailedToAsyncCopyObjectException { + get { + return ResourceManager.GetString("FailedToAsyncCopyObjectException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Failed to retrieve the original BlobType.. + /// + internal static string FailedToGetBlobTypeException { + get { + return ResourceManager.GetString("FailedToGetBlobTypeException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Failed to open file {0}: {1}.. + /// + internal static string FailedToOpenFileException { + get { + return ResourceManager.GetString("FailedToOpenFileException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Failed to retrieve CopyState for object "{0}".. + /// + internal static string FailedToRetrieveCopyStateForObjectException { + get { + return ResourceManager.GetString("FailedToRetrieveCopyStateForObjectException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The initial entry status {0} is invalid for {1}.. + /// + internal static string InvalidInitialEntryStatusForControllerException { + get { + return ResourceManager.GetString("InvalidInitialEntryStatusForControllerException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Both Source and Destination are locally accessible locations. At least one of source and destination should be an Azure Storage location.. + /// + internal static string LocalToLocalTransferUnsupportedException { + get { + return ResourceManager.GetString("LocalToLocalTransferUnsupportedException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The local copy id is different from the one returned from the server.. + /// + internal static string MismatchFoundBetweenLocalAndServerCopyIdsException { + get { + return ResourceManager.GetString("MismatchFoundBetweenLocalAndServerCopyIdsException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Blob type '{0}' is not supported.. + /// + internal static string NotSupportedBlobType { + get { + return ResourceManager.GetString("NotSupportedBlobType", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Skiped file "{0}" because target "{1}" already exists.. + /// + internal static string OverwriteCallbackCancelTransferException { + get { + return ResourceManager.GetString("OverwriteCallbackCancelTransferException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to PageBlob. + /// + internal static string PageBlob { + get { + return ResourceManager.GetString("PageBlob", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Parallel operations count must be positive.. + /// + internal static string ParallelCountNotPositiveException { + get { + return ResourceManager.GetString("ParallelCountNotPositiveException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0} cannot be null.. + /// + internal static string ParameterCannotBeNullException { + get { + return ResourceManager.GetString("ParameterCannotBeNullException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Exactly one of these parameters must be provided: {0}, {1}, {2}.. + /// + internal static string ProvideExactlyOneOfThreeParameters { + get { + return ResourceManager.GetString("ProvideExactlyOneOfThreeParameters", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##} bytes. + /// + internal static string ReadableSizeFormatBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##}EB. + /// + internal static string ReadableSizeFormatExaBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatExaBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##}GB. + /// + internal static string ReadableSizeFormatGigaBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatGigaBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##}KB. + /// + internal static string ReadableSizeFormatKiloBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatKiloBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##}MB. + /// + internal static string ReadableSizeFormatMegaBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatMegaBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##}PB. + /// + internal static string ReadableSizeFormatPetaBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatPetaBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0:0.##}TB. + /// + internal static string ReadableSizeFormatTeraBytes { + get { + return ResourceManager.GetString("ReadableSizeFormatTeraBytes", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Failed to read restartable info from file.. + /// + internal static string RestartableInfoCorruptedException { + get { + return ResourceManager.GetString("RestartableInfoCorruptedException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to MaximumCacheSize cannot be less than {0}.. + /// + internal static string SmallMemoryCacheSizeLimitationException { + get { + return ResourceManager.GetString("SmallMemoryCacheSizeLimitationException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Blob type of source and destination must be the same.. + /// + internal static string SourceAndDestinationBlobTypeDifferent { + get { + return ResourceManager.GetString("SourceAndDestinationBlobTypeDifferent", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Source and destination cannot be the same.. + /// + internal static string SourceAndDestinationLocationCannotBeEqualException { + get { + return ResourceManager.GetString("SourceAndDestinationLocationCannotBeEqualException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Source blob does not exist.. + /// + internal static string SourceBlobDoesNotExistException { + get { + return ResourceManager.GetString("SourceBlobDoesNotExistException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to User specified blob type does not match the blob type of the existing source blob.. + /// + internal static string SourceBlobTypeNotMatch { + get { + return ResourceManager.GetString("SourceBlobTypeNotMatch", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Source does not exist.. + /// + internal static string SourceDoesNotExistException { + get { + return ResourceManager.GetString("SourceDoesNotExistException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0} must support Read.. + /// + internal static string StreamMustSupportReadException { + get { + return ResourceManager.GetString("StreamMustSupportReadException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0} must support Seek.. + /// + internal static string StreamMustSupportSeekException { + get { + return ResourceManager.GetString("StreamMustSupportSeekException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to {0} must support Write.. + /// + internal static string StreamMustSupportWriteException { + get { + return ResourceManager.GetString("StreamMustSupportWriteException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The stream is not expandable.. + /// + internal static string StreamNotExpandable { + get { + return ResourceManager.GetString("StreamNotExpandable", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Copying from uri to Azure Blob Storage synchronously is not supported.. + /// + internal static string SyncCopyFromUriToAzureBlobNotSupportedException { + get { + return ResourceManager.GetString("SyncCopyFromUriToAzureBlobNotSupportedException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Copying from uri to Azure File Storage synchronously is not supported.. + /// + internal static string SyncCopyFromUriToAzureFileNotSupportedException { + get { + return ResourceManager.GetString("SyncCopyFromUriToAzureFileNotSupportedException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to A transfer operation with the same source and destination already exists.. + /// + internal static string TransferAlreadyExists { + get { + return ResourceManager.GetString("TransferAlreadyExists", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to TransferEntry.CopyId cannot be null or empty because we need it to verify we are monitoring the right blob copying process.. + /// + internal static string TransferEntryCopyIdCannotBeNullOrEmptyException { + get { + return ResourceManager.GetString("TransferEntryCopyIdCannotBeNullOrEmptyException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The transfer failed.. + /// + internal static string UncategorizedException { + get { + return ResourceManager.GetString("UncategorizedException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The given blob type {0} is not supported.. + /// + internal static string UnsupportedBlobTypeException { + get { + return ResourceManager.GetString("UnsupportedBlobTypeException", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The given transfer location type {0} is not supported.. + /// + internal static string UnsupportedTransferLocationException { + get { + return ResourceManager.GetString("UnsupportedTransferLocationException", resourceCulture); + } + } + } +} diff --git a/lib/Resources.resx b/lib/Resources.resx new file mode 100644 index 00000000..9e7ab94d --- /dev/null +++ b/lib/Resources.resx @@ -0,0 +1,322 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + File size {0} is invalid for {1}, must be a multiple of {2}. + {0} is file size. {1} is the destination blob type. {2} should be 512 bytes for pageblob. + + + File size {0} is larger than {1} maximum size {2}. + {0} is file size. {1} is the destination blob type. {2} is the size limit of the destination. + + + The blob transfer has been cancelled. + + + BlockSize must be between {0} and {1}. + + + Cannot deserialize to TransferLocation when its TransferLocationType is {0}. + + + The TransferLocation cannot be serialized when it represents a stream location. + + + File size {0} is larger than cloud file maximum size {1} bytes. + {0} is file size. {1} is the size limit of the destination. + + + Destination of asynchronous copying must be File Storage or Blob Storage. + + + Copying from File Storage to page Blob Storage asynchronously is not supported. + + + {0} Deserialization failed: Version number doesn't match. Version number:{1}, expect:{2}. + {0} is the class name. + {1} is the version number in serialization binary. + {2} is the expect version number. + + + Destination might be changed by other process or application. + + + The MD5 hash calculated from the downloaded data does not match the MD5 hash stored in the property of source: {0}. Please refer to help or documentation for detail. +MD5 calculated: {1} +MD5 in property: {2} + {0} is the uri of source, {1} is the calculated MD5, {2} is the MD5 stored in the source property + + + User specified blob type does not match the blob type of the existing destination blob. + + + Destination must be a base blob. + + + Failed to allocate required memory. + + + Failed to copy from "{0}" to "{1}". Copy status: {2}; Description: {3}. + {0} is uri of source, {1} is uri of destination. {2} is the copy status, {3} is the copy status description. + + + Failed to retrieve CopyState for object "{0}". + {0} is uri of target object. + + + Failed to retrieve the original BlobType. + + + Failed to open file {0}: {1}. + {0} is file name, {1} is detailed error message. + + + The initial entry status {0} is invalid for {1}. + {0} is the initial entry status, {1} is the controller. + + + Both Source and Destination are locally accessible locations. At least one of source and destination should be an Azure Storage location. + + + The local copy id is different from the one returned from the server. + + + Blob type '{0}' is not supported. + {0} is the blob type name. + + + Skiped file "{0}" because target "{1}" already exists. + {0} is source file name, {1} is destination file name. + + + Parallel operations count must be positive. + + + {0} cannot be null. + {0} is the property or parameter name. + + + Exactly one of these parameters must be provided: {0}, {1}, {2}. + {0} is the first parameter, {1} is the second one, {2} is the third one. + + + {0:0.##} bytes + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + {0:0.##}EB + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + {0:0.##}GB + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + {0:0.##}KB + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + {0:0.##}MB + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + {0:0.##}PB + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + {0:0.##}TB + {0: -> take value from the first parameter. +0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point. + + + Failed to read restartable info from file. + + + MaximumCacheSize cannot be less than {0}. + {0} is minimum memory cache size limitation + + + Blob type of source and destination must be the same. + + + Source and destination cannot be the same. + + + Source does not exist. + + + Source blob does not exist. + + + User specified blob type does not match the blob type of the existing source blob. + + + {0} must support Read. + + + {0} must support Seek. + + + {0} must support Write. + + + The stream is not expandable. + + + TransferEntry.CopyId cannot be null or empty because we need it to verify we are monitoring the right blob copying process. + + + The given blob type {0} is not supported. + {0} is the given blob type. + + + The given transfer location type {0} is not supported. + {0} is the given transfer location type. + + + Copying from File Storage to append Blob Storage asynchronously is not supported. + + + AppendBlob + + + BlockBlob + + + PageBlob + + + Copying from uri to Azure Blob Storage synchronously is not supported. + + + Copying from uri to Azure File Storage synchronously is not supported. + + + A transfer operation with the same source and destination already exists. + + + The transfer failed. + + \ No newline at end of file diff --git a/lib/SerializationHelper/SerializableAccessCondition.cs b/lib/SerializationHelper/SerializableAccessCondition.cs new file mode 100644 index 00000000..88650747 --- /dev/null +++ b/lib/SerializationHelper/SerializableAccessCondition.cs @@ -0,0 +1,161 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper +{ + using System; + using System.Runtime.Serialization; + + [Serializable] + internal sealed class SerializableAccessCondition : ISerializable + { + private const string IfMatchETagName = "IfMatchETag"; + private const string IfModifiedSinceTimeName = "IfModifiedSinceTime"; + private const string IfNoneMatchETagName = "IfNoneMatchETag"; + private const string IfNotModifiedSinceTimeName = "IfNotModifiedSinceTime"; + private const string IfSequenceNumberEqualName = "IfSequenceNumberEqual"; + private const string IfSequenceNumberLessThanName = "IfSequenceNumberLessThan"; + private const string IfSequenceNumberLessThanOrEqualName = "IfSequenceNumberLessThanOrEqual"; + private const string LeaseIdName = "LeaseId"; + + private AccessCondition accessCondition; + + public SerializableAccessCondition() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + private SerializableAccessCondition(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + string ifMatchETag = info.GetString(IfMatchETagName); + DateTimeOffset? ifModifiedSinceTime = (DateTimeOffset?)info.GetValue(IfModifiedSinceTimeName, typeof(DateTimeOffset?)); + string ifNoneMatchETag = info.GetString(IfNoneMatchETagName); + DateTimeOffset? ifNotModifiedSinceTime = (DateTimeOffset?)info.GetValue(IfNotModifiedSinceTimeName, typeof(DateTimeOffset?)); + long? ifSequenceNumberEqual = (long?)info.GetValue(IfSequenceNumberEqualName, typeof(long?)); + long? ifSequenceNumberLessThan = (long?)info.GetValue(IfSequenceNumberLessThanName, typeof(long?)); + long? ifSequenceNumberLessThanOrEqual = (long?)info.GetValue(IfSequenceNumberLessThanOrEqualName, typeof(long?)); + string leaseId = info.GetString(LeaseIdName); + + if (!string.IsNullOrEmpty(ifMatchETag) + || null != ifModifiedSinceTime + || !string.IsNullOrEmpty(ifNoneMatchETag) + || null != ifNotModifiedSinceTime + || null != ifSequenceNumberEqual + || null != ifSequenceNumberLessThan + || null != ifSequenceNumberLessThanOrEqual + || !string.IsNullOrEmpty(leaseId)) + { + this.accessCondition = new AccessCondition() + { + IfMatchETag = ifMatchETag, + IfModifiedSinceTime = ifModifiedSinceTime, + IfNoneMatchETag = ifNoneMatchETag, + IfNotModifiedSinceTime = ifNotModifiedSinceTime, + IfSequenceNumberEqual = ifSequenceNumberEqual, + IfSequenceNumberLessThan = ifSequenceNumberLessThan, + IfSequenceNumberLessThanOrEqual = ifSequenceNumberLessThanOrEqual, + LeaseId = leaseId + }; + } + else + { + this.accessCondition = null; + } + } + + internal AccessCondition AccessCondition + { + get + { + return this.accessCondition; + } + + set + { + this.accessCondition = value; + } + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + if (null == this.accessCondition) + { + info.AddValue(IfMatchETagName, null); + info.AddValue(IfModifiedSinceTimeName, null); + info.AddValue(IfNoneMatchETagName, null); + info.AddValue(IfNotModifiedSinceTimeName, null); + info.AddValue(IfSequenceNumberEqualName, null); + info.AddValue(IfSequenceNumberLessThanName, null); + info.AddValue(IfSequenceNumberLessThanOrEqualName, null); + info.AddValue(LeaseIdName, null); + } + else + { + + info.AddValue(IfMatchETagName, this.accessCondition.IfMatchETag); + info.AddValue(IfModifiedSinceTimeName, this.accessCondition.IfModifiedSinceTime); + info.AddValue(IfNoneMatchETagName, this.accessCondition.IfNoneMatchETag); + info.AddValue(IfNotModifiedSinceTimeName, this.accessCondition.IfNotModifiedSinceTime); + info.AddValue(IfSequenceNumberEqualName, this.accessCondition.IfSequenceNumberEqual); + info.AddValue(IfSequenceNumberLessThanName, this.accessCondition.IfSequenceNumberLessThan); + info.AddValue(IfSequenceNumberLessThanOrEqualName, this.accessCondition.IfSequenceNumberLessThanOrEqual); + info.AddValue(LeaseIdName, this.accessCondition.LeaseId); + } + } + + internal static AccessCondition GetAccessCondition(SerializableAccessCondition serialization) + { + if (null == serialization) + { + return null; + } + + return serialization.AccessCondition; + } + + internal static void SetAccessCondition( + ref SerializableAccessCondition serialization, + AccessCondition value) + { + if ((null == serialization) + && (null == value)) + { + return; + } + + if (null != serialization) + { + serialization.AccessCondition = value; + } + else + { + serialization = new SerializableAccessCondition() + { + AccessCondition = value + }; + } + } + } +} diff --git a/lib/SerializationHelper/SerializableBlobRequestOptions.cs b/lib/SerializationHelper/SerializableBlobRequestOptions.cs new file mode 100644 index 00000000..c886d84e --- /dev/null +++ b/lib/SerializationHelper/SerializableBlobRequestOptions.cs @@ -0,0 +1,105 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper +{ + using System; + using System.Diagnostics; + using System.Runtime.Serialization; + using Microsoft.WindowsAzure.Storage.Blob; + + [Serializable] + internal sealed class SerializableBlobRequestOptions : SerializableRequestOptions + { + private const string DisableContentMD5ValidationName = "DisableContentMD5Validation"; + private const string MaximumExecutionTimeName = "MaximumExecutionTime"; + private const string ServerTimeoutName = "ServerTimeout"; + private const string StoreBlobContentMD5Name = "StoreBlobContentMD5"; + private const string UseTransactionalMD5Name = "UseTransactionalMD5"; + + private BlobRequestOptions blobRequestOptions; + + public SerializableBlobRequestOptions() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + private SerializableBlobRequestOptions(SerializationInfo info, StreamingContext context) + : base(info, context) + { + bool? disableContentMD5Validation = (bool?)info.GetValue(DisableContentMD5ValidationName, typeof(bool?)); + TimeSpan? maximumExecutionTime = (TimeSpan?)info.GetValue(MaximumExecutionTimeName, typeof(TimeSpan?)); + TimeSpan? serverTimeout = (TimeSpan?)info.GetValue(ServerTimeoutName, typeof(TimeSpan?)); + bool? storeBlobContentMD5 = (bool?)info.GetValue(StoreBlobContentMD5Name, typeof(bool?)); + bool? useTransactionalMD5 = (bool?)info.GetValue(UseTransactionalMD5Name, typeof(bool?)); + + if (null != disableContentMD5Validation + || null != maximumExecutionTime + || null != serverTimeout + || null != storeBlobContentMD5 + || null != useTransactionalMD5) + { + this.blobRequestOptions = Transfer_RequestOptions.DefaultBlobRequestOptions; + + this.blobRequestOptions.DisableContentMD5Validation = disableContentMD5Validation; + this.blobRequestOptions.MaximumExecutionTime = maximumExecutionTime; + this.blobRequestOptions.ServerTimeout = serverTimeout; + this.blobRequestOptions.StoreBlobContentMD5 = storeBlobContentMD5; + this.blobRequestOptions.UseTransactionalMD5 = useTransactionalMD5; + } + else + { + this.blobRequestOptions = null; + } + } + + protected override IRequestOptions RequestOptions + { + get + { + return this.blobRequestOptions; + } + + set + { + BlobRequestOptions requestOptions = value as BlobRequestOptions; + Debug.Assert(null != requestOptions, "Setting RequestOptions in BlobRequestOptionsSerializer, but the value is not a BlobRequestOptions instance."); + this.blobRequestOptions = requestOptions; + } + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + + if (null == this.blobRequestOptions) + { + info.AddValue(DisableContentMD5ValidationName, null); + info.AddValue(MaximumExecutionTimeName, null, typeof(TimeSpan?)); + info.AddValue(ServerTimeoutName, null, typeof(TimeSpan?)); + info.AddValue(StoreBlobContentMD5Name, null); + info.AddValue(UseTransactionalMD5Name, null); + } + else + { + info.AddValue(DisableContentMD5ValidationName, this.blobRequestOptions.DisableContentMD5Validation); + info.AddValue(MaximumExecutionTimeName, this.blobRequestOptions.MaximumExecutionTime, typeof(TimeSpan?)); + info.AddValue(ServerTimeoutName, this.blobRequestOptions.ServerTimeout, typeof(TimeSpan?)); + info.AddValue(StoreBlobContentMD5Name, this.blobRequestOptions.StoreBlobContentMD5); + info.AddValue(UseTransactionalMD5Name, this.blobRequestOptions.UseTransactionalMD5); + } + } + } +} diff --git a/lib/SerializationHelper/SerializableCloudBlob.cs b/lib/SerializationHelper/SerializableCloudBlob.cs new file mode 100644 index 00000000..45ece249 --- /dev/null +++ b/lib/SerializationHelper/SerializableCloudBlob.cs @@ -0,0 +1,134 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper +{ + using System; + using System.Globalization; + using System.Runtime.Serialization; + using Microsoft.WindowsAzure.Storage.Auth; + using Microsoft.WindowsAzure.Storage.Blob; + + [Serializable] + internal class SerializableCloudBlob : ISerializable + { + private const string BlobUriName = "BlobUri"; + private const string BlobTypeName = "BlobType"; + + private Uri blobUri; + + private BlobType blobType; + + private CloudBlob blob; + + public SerializableCloudBlob() + { + } + + private SerializableCloudBlob(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + this.blobUri = (Uri)info.GetValue(BlobUriName, typeof(Uri)); + this.blobType = (BlobType)info.GetValue(BlobTypeName, typeof(BlobType)); + this.CreateCloudBlobInstance(null); + } + + internal CloudBlob Blob + { + get + { + return this.blob; + } + + set + { + this.blob = value; + + if (null == this.blob) + { + this.blobUri = null; + this.blobType = BlobType.Unspecified; + } + else + { + this.blobUri = this.blob.SnapshotQualifiedUri; + this.blobType = this.blob.BlobType; + } + } + } + + public void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + info.AddValue(BlobUriName, this.blobUri, typeof(Uri)); + info.AddValue(BlobTypeName, this.blobType); + } + + internal static CloudBlob GetBlob(SerializableCloudBlob blobSerialization) + { + if (null == blobSerialization) + { + return null; + } + + return blobSerialization.Blob; + } + + internal static void SetBlob(ref SerializableCloudBlob blobSerialization, CloudBlob value) + { + if ((null == blobSerialization) + && (null == value)) + { + return; + } + + if (null != blobSerialization) + { + blobSerialization.Blob = value; + } + else + { + blobSerialization = new SerializableCloudBlob() + { + Blob = value + }; + } + } + + internal void UpdateStorageCredentials(StorageCredentials credentials) + { + this.CreateCloudBlobInstance(credentials); + } + + private void CreateCloudBlobInstance(StorageCredentials credentials) + { + if ((null != this.blob) + && this.blob.ServiceClient.Credentials == credentials) + { + return; + } + + if (null == this.blobUri) + { + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ParameterCannotBeNullException, + "blobUri")); + } + + this.blob = Utils.GetBlobReference(this.blobUri, credentials, this.blobType); + } + } +} diff --git a/lib/SerializationHelper/SerializableCloudFile.cs b/lib/SerializationHelper/SerializableCloudFile.cs new file mode 100644 index 00000000..cbc2773f --- /dev/null +++ b/lib/SerializationHelper/SerializableCloudFile.cs @@ -0,0 +1,127 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper +{ + using System; + using System.Diagnostics; + using System.Globalization; + using System.Runtime.Serialization; + using Microsoft.WindowsAzure.Storage.Auth; + using Microsoft.WindowsAzure.Storage.File; + + [Serializable] + internal class SerializableCloudFile : ISerializable + { + private const string FileUriName = "FileUri"; + + private Uri fileUri; + + private CloudFile file; + + public SerializableCloudFile() + { + } + + private SerializableCloudFile(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + this.fileUri = (Uri)info.GetValue(FileUriName, typeof(Uri)); + this.CreateCloudFileInstance(null); + } + + internal CloudFile File + { + get + { + return this.file; + } + + set + { + this.file = value; + + if (null == this.file) + { + this.fileUri = null; + } + else + { + this.fileUri = this.file.Uri; + } + } + } + + public void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + info.AddValue(FileUriName, this.fileUri, typeof(Uri)); + } + + internal static CloudFile GetFile(SerializableCloudFile fileSerialization) + { + if (null == fileSerialization) + { + return null; + } + + return fileSerialization.File; + } + + internal static void SetFile(ref SerializableCloudFile fileSerialization, CloudFile value) + { + if (null == fileSerialization + && null == value) + { + return; + } + + if (null != fileSerialization) + { + fileSerialization.File = value; + } + else + { + fileSerialization = new SerializableCloudFile() + { + File = value + }; + } + } + + internal void UpdateStorageCredentials(StorageCredentials credentials) + { + this.CreateCloudFileInstance(credentials); + } + + private void CreateCloudFileInstance(StorageCredentials credentials) + { + if (null != this.file + && this.file.ServiceClient.Credentials == credentials) + { + return; + } + + if (null == this.fileUri) + { + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ParameterCannotBeNullException, + "fileUri")); + } + + this.file = new CloudFile(this.fileUri, credentials); + } + } +} diff --git a/lib/SerializationHelper/SerializableFileRequestOptions.cs b/lib/SerializationHelper/SerializableFileRequestOptions.cs new file mode 100644 index 00000000..03014954 --- /dev/null +++ b/lib/SerializationHelper/SerializableFileRequestOptions.cs @@ -0,0 +1,111 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper +{ + using System; + using System.Diagnostics; + using System.Runtime.Serialization; + using Microsoft.WindowsAzure.Storage.File; + + /// + /// Define class to serialize FileRequestOptions instance. + /// + [Serializable] + internal sealed class SerializableFileRequestOptions : SerializableRequestOptions, ISerializable + { + private const string DisableContentMD5ValidationName = "DisableContentMD5Validation"; + private const string MaximumExecutionTimeName = "MaximumExecutionTime"; + private const string ServerTimeoutName = "ServerTimeout"; + private const string StoreFileContentMD5Name = "StoreFileContentMD5"; + private const string UseTransactionalMD5Name = "UseTransactionalMD5"; + + private FileRequestOptions fileRequestOptions; + + /// + /// Initializes a new instance of the class. + /// + public SerializableFileRequestOptions() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + private SerializableFileRequestOptions(SerializationInfo info, StreamingContext context) + : base (info, context) + { + bool? disableContentMD5Validation = (bool?)info.GetValue(DisableContentMD5ValidationName, typeof(bool?)); + TimeSpan? maximumExecutionTime = (TimeSpan?)info.GetValue(MaximumExecutionTimeName, typeof(TimeSpan?)); + TimeSpan? serverTimeout = (TimeSpan?)info.GetValue(ServerTimeoutName, typeof(TimeSpan?)); + bool? storeFileContentMD5 = (bool?)info.GetValue(StoreFileContentMD5Name, typeof(bool?)); + bool? useTransactionalMD5 = (bool?)info.GetValue(UseTransactionalMD5Name, typeof(bool?)); + + if (null != disableContentMD5Validation + || null != maximumExecutionTime + || null != serverTimeout + || null != storeFileContentMD5 + || null != useTransactionalMD5) + { + this.fileRequestOptions = Transfer_RequestOptions.DefaultFileRequestOptions; + + this.fileRequestOptions.DisableContentMD5Validation = disableContentMD5Validation; + this.fileRequestOptions.MaximumExecutionTime = maximumExecutionTime; + this.fileRequestOptions.ServerTimeout = serverTimeout; + this.fileRequestOptions.StoreFileContentMD5 = storeFileContentMD5; + this.fileRequestOptions.UseTransactionalMD5 = useTransactionalMD5; + } + else + { + this.fileRequestOptions = null; + } + } + + protected override IRequestOptions RequestOptions + { + get + { + return this.fileRequestOptions; + } + + set + { + FileRequestOptions requestOptions = value as FileRequestOptions; + Debug.Assert(null != requestOptions, "Setting RequestOptions in FlobRequestOptionsSerializer, but the value is not a FileRequestOptions instance."); + + this.fileRequestOptions = requestOptions; + } + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + + if (null == this.fileRequestOptions) + { + info.AddValue(DisableContentMD5ValidationName, null); + info.AddValue(MaximumExecutionTimeName, null, typeof(TimeSpan?)); + info.AddValue(ServerTimeoutName, null, typeof(TimeSpan?)); + info.AddValue(StoreFileContentMD5Name, null); + info.AddValue(UseTransactionalMD5Name, null); + } + else + { + info.AddValue(DisableContentMD5ValidationName, this.fileRequestOptions.DisableContentMD5Validation); + info.AddValue(MaximumExecutionTimeName, this.fileRequestOptions.MaximumExecutionTime, typeof(TimeSpan?)); + info.AddValue(ServerTimeoutName, this.fileRequestOptions.ServerTimeout, typeof(TimeSpan?)); + info.AddValue(StoreFileContentMD5Name, this.fileRequestOptions.StoreFileContentMD5); + info.AddValue(UseTransactionalMD5Name, this.fileRequestOptions.UseTransactionalMD5); + } + } + } +} diff --git a/lib/SerializationHelper/SerializableRequestOptions.cs b/lib/SerializationHelper/SerializableRequestOptions.cs new file mode 100644 index 00000000..5424c771 --- /dev/null +++ b/lib/SerializationHelper/SerializableRequestOptions.cs @@ -0,0 +1,111 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper +{ + using System; + using System.Diagnostics; + using System.Runtime.Serialization; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + + [Serializable] + internal abstract class SerializableRequestOptions : ISerializable + { + protected SerializableRequestOptions() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + protected SerializableRequestOptions(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + } + + abstract protected IRequestOptions RequestOptions + { + get; + set; + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public virtual void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + } + + internal static IRequestOptions GetRequestOptions(SerializableRequestOptions serializer) + { + if (null == serializer) + { + return null; + } + + return serializer.RequestOptions; + } + + internal static void SetRequestOptions(ref SerializableRequestOptions serializer, IRequestOptions requestOptions) + { + if (null == serializer && null == requestOptions) + { + return; + } + + if (null == serializer) + { + serializer = CreateSerializableRequestOptions(requestOptions); + } + else + { + if ((requestOptions is FileRequestOptions) + && (serializer is SerializableBlobRequestOptions)) + { + serializer = new SerializableFileRequestOptions(); + } + else if ((requestOptions is BlobRequestOptions) + && (serializer is SerializableFileRequestOptions)) + { + serializer = new SerializableBlobRequestOptions(); + } + + serializer.RequestOptions = requestOptions; + } + } + + private static SerializableRequestOptions CreateSerializableRequestOptions(IRequestOptions requestOptions) + { + if (requestOptions is FileRequestOptions) + { + return new SerializableFileRequestOptions() + { + RequestOptions = requestOptions + }; + } + else + { + Debug.Assert(requestOptions is BlobRequestOptions, "Request options should be an instance of BlobRequestOptions when code reach here."); + return new SerializableBlobRequestOptions() + { + RequestOptions = requestOptions + }; + } + } + } +} diff --git a/lib/TransferCheckpoint.cs b/lib/TransferCheckpoint.cs new file mode 100644 index 00000000..faf9e7c9 --- /dev/null +++ b/lib/TransferCheckpoint.cs @@ -0,0 +1,139 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Concurrent; + using System.Collections.Generic; + using System.Runtime.Serialization; + using TransferKey = System.Tuple; + + /// + /// Represents a checkpoint from which a transfer may be resumed and continue. + /// + [Serializable] + public class TransferCheckpoint : ISerializable + { + private const string SingleObjectTransfersName = "SingleObjectTransfers"; + + /// + /// Transfers associated with this transfer checkpoint. + /// + private ConcurrentDictionary transfers = new ConcurrentDictionary(); + + /// + /// Initializes a new instance of the class. + /// + internal TransferCheckpoint() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + protected TransferCheckpoint(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + + var singleObjectTransfers = (List)info.GetValue(SingleObjectTransfersName, typeof(List)); + foreach(var transfer in singleObjectTransfers) + { + this.AddTransfer(transfer); + } + } + + + /// + /// Gets a list of all transfers + /// + internal ICollection AllTransfers + { + get + { + return this.transfers.Values; + } + } + + /// + /// Serializes the checkpoint. + /// + /// Serialization info object. + /// Streaming context. + public virtual void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + List singleObjectTransfers = new List(); + foreach(var kvPair in this.transfers) + { + SingleObjectTransfer transfer = kvPair.Value as SingleObjectTransfer; + if (transfer != null) + { + singleObjectTransfers.Add(transfer); + } + } + + info.AddValue(SingleObjectTransfersName, singleObjectTransfers, typeof(List)); + } + + /// + /// Adds a transfer to the transfer checkpoint. + /// + /// The transfer to be kept track of. + internal void AddTransfer(Transfer transfer) + { + this.transfers.TryAdd(new TransferKey(transfer.Source, transfer.Destination), transfer); + } + + /// + /// Gets a transfer with the specified source location, destination location and transfer method. + /// + /// Source location of the transfer. + /// Destination location of the transfer. + /// Transfer method. + /// A transfer that matches the specified source location, destination location and transfer method; Or null if no matches. + internal Transfer GetTransfer(TransferLocation sourceLocation, TransferLocation destLocation, TransferMethod transferMethod) + { + Transfer transfer = null; + if (this.transfers.TryGetValue(new TransferKey(sourceLocation, destLocation), out transfer)) + { + if (transfer.TransferMethod == transferMethod) + { + return transfer; + } + } + + return null; + } + + /// + /// Gets a static snapshot of this transfer checkpoint + /// + /// A snapshot of current transfer checkpoint + internal TransferCheckpoint Copy() + { + TransferCheckpoint copyObj = new TransferCheckpoint(); + foreach (var kvPair in this.transfers) + { + SingleObjectTransfer transfer = kvPair.Value as SingleObjectTransfer; + if (transfer != null) + { + copyObj.AddTransfer(transfer.Copy()); + } + } + + return copyObj; + } + } +} diff --git a/lib/TransferConfigurations.cs b/lib/TransferConfigurations.cs new file mode 100644 index 00000000..ce3284e4 --- /dev/null +++ b/lib/TransferConfigurations.cs @@ -0,0 +1,156 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Globalization; + using System.Reflection; + using ClientLibraryConstants = Microsoft.WindowsAzure.Storage.Shared.Protocol.Constants; + + /// + /// TransferConfigurations class. + /// + public class TransferConfigurations + { + /// + /// Stores the BlockSize to use for Windows Azure Storage transfers. + /// + private int blockSize; + + /// + /// How many work items to process in parallel. + /// + private int parallelOperations; + + /// + /// Maximum amount of cache memory to use in bytes. + /// + private long maximumCacheSize; + + /// + /// Instance to call native methods to get current memory status. + /// + private GlobalMemoryStatusNativeMethods memStatus = new GlobalMemoryStatusNativeMethods(); + + /// + /// Initializes a new instance of the + /// class. + /// + public TransferConfigurations() + { + // setup default values. + this.ParallelOperations = Environment.ProcessorCount * 8; + this.BlockSize = Constants.DefaultBlockSize; + } + + /// + /// Gets or sets a value indicating how many work items to process + /// concurrently. Downloading or uploading a single blob can consist + /// of a large number of work items. + /// + /// How many work items to process concurrently. + public int ParallelOperations + { + get + { + return this.parallelOperations; + } + + set + { + if (value <= 0) + { + throw new ArgumentException(string.Format( + CultureInfo.CurrentCulture, + Resources.ParallelCountNotPositiveException)); + } + + this.parallelOperations = value; + this.SetMaxMemoryCacheSize(); + } + } + + /// + /// Gets or sets the user agent suffix + /// + public string UserAgentSuffix + { + get; + set; + } + + /// + /// Gets or sets a value indicating how much memory we can cache + /// during upload/download. + /// + /// Maximum amount of cache memory to use in bytes. + internal long MaximumCacheSize + { + get + { + return this.maximumCacheSize; + } + + set + { + if (value < Constants.MaxBlockSize) + { + throw new ArgumentException(string.Format( + CultureInfo.CurrentCulture, + Resources.SmallMemoryCacheSizeLimitationException, + Utils.BytesToHumanReadableSize(Constants.MaxBlockSize))); + } + + this.maximumCacheSize = value; + } + } + + /// + /// Gets or sets the BlockSize to use for Windows Azure Storage transfers. + /// + /// BlockSize to use for Windows Azure Storage transfers. + internal int BlockSize + { + get + { + return this.blockSize; + } + + set + { + if (Constants.MinBlockSize > value || value > Constants.MaxBlockSize) + { + string errorMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.BlockSizeOutOfRangeException, + Utils.BytesToHumanReadableSize(Constants.MinBlockSize), + Utils.BytesToHumanReadableSize(Constants.MaxBlockSize)); + + throw new ArgumentOutOfRangeException("value", value, errorMessage); + } + + this.blockSize = value; + } + } + + private void SetMaxMemoryCacheSize() + { + if (0 == this.memStatus.AvailablePhysicalMemory) + { + this.MaximumCacheSize = Constants.CacheSizeMultiplierInByte * this.ParallelOperations; + } + else + { + this.MaximumCacheSize = + Math.Min( + Constants.CacheSizeMultiplierInByte * this.ParallelOperations, + Math.Min( + (long)(this.memStatus.AvailablePhysicalMemory * Constants.MemoryCacheMultiplier), + Constants.MemoryCacheMaximum)); + } + } + } +} diff --git a/lib/TransferContext.cs b/lib/TransferContext.cs new file mode 100644 index 00000000..ec35df8c --- /dev/null +++ b/lib/TransferContext.cs @@ -0,0 +1,124 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + + /// + /// Represents the context for a transfer, and provides additional runtime information about its execution. + /// + public class TransferContext + { + /// + /// Initializes a new instance of the class. + /// + public TransferContext() + : this(null) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// An object representing the last checkpoint from which the transfer continues on. + public TransferContext(TransferCheckpoint checkpoint) + { + if (checkpoint == null) + { + this.Checkpoint = new TransferCheckpoint(); + } + else + { + this.Checkpoint = checkpoint.Copy(); + } + + this.OverallProgressTracker = new TransferProgressTracker(); + foreach(Transfer transfer in this.Checkpoint.AllTransfers) + { + this.OverallProgressTracker.AddBytesTransferred(transfer.ProgressTracker.BytesTransferred); + this.OverallProgressTracker.AddNumberOfFilesTransferred(transfer.ProgressTracker.NumberOfFilesTransferred); + this.OverallProgressTracker.AddNumberOfFilesSkipped(transfer.ProgressTracker.NumberOfFilesSkipped); + this.OverallProgressTracker.AddNumberOfFilesFailed(transfer.ProgressTracker.NumberOfFilesFailed); + } + } + + /// + /// Gets or sets the client request id. + /// + /// A string containing the client request id. + /// + /// Setting this property modifies all the requests involved in the related transfer operation to include the the HTTP x-ms-client-request-id header. + /// + public string ClientRequestId + { + get; + set; + } + + /// + /// Gets or sets the logging level to be used for the related tranfer operation. + /// + /// A value of type that specifies which events are logged for the related transfer operation. + public LogLevel LogLevel + { + get; + set; + } + + /// + /// Gets the last checkpoint of the transfer. + /// + public TransferCheckpoint LastCheckpoint + { + get + { + return this.Checkpoint.Copy(); + } + } + + /// + /// Callback invoked to tell whether to overwrite an existing destination. + /// + public OverwriteCallback OverwriteCallback + { + get; + set; + } + + /// + /// Gets or sets the progress update handler. + /// + public IProgress ProgressHandler + { + get + { + return this.OverallProgressTracker.ProgressHandler; + } + set + { + this.OverallProgressTracker.ProgressHandler = value; + } + } + + /// + /// Gets the overall transfer progress. + /// + internal TransferProgressTracker OverallProgressTracker + { + get; + set; + } + + /// + /// Gets the transfer checkpoint that tracks all transfers related to this transfer context. + /// + internal TransferCheckpoint Checkpoint + { + get; + private set; + } + } +} diff --git a/lib/TransferControllers/AsyncCopyControllers/AsyncCopyController.cs b/lib/TransferControllers/AsyncCopyControllers/AsyncCopyController.cs new file mode 100644 index 00000000..ed5a80a0 --- /dev/null +++ b/lib/TransferControllers/AsyncCopyControllers/AsyncCopyController.cs @@ -0,0 +1,678 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.Blob.Protocol; + using Microsoft.WindowsAzure.Storage.File; + + internal abstract class AsyncCopyController : TransferControllerBase + { + /// + /// Timer to signal refresh status. + /// + private Timer statusRefreshTimer; + + /// + /// Lock to protect statusRefreshTimer. + /// + private object statusRefreshTimerLock = new object(); + + /// + /// Keeps track of the internal state-machine state. + /// + private volatile State state; + + /// + /// Indicates whether the controller has work available + /// or not for the calling code. + /// + private bool hasWork; + + /// + /// Indicates the BytesCopied value of last CopyState + /// + private long lastBytesCopied; + + /// + /// Initializes a new instance of the class. + /// + /// Scheduler object which creates this object. + /// Instance of job to start async copy. + /// Token user input to notify about cancellation. + internal AsyncCopyController( + TransferScheduler scheduler, + TransferJob transferJob, + CancellationToken userCancellationToken) + : base(scheduler, transferJob, userCancellationToken) + { + if (null == transferJob.Destination) + { + throw new ArgumentException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ParameterCannotBeNullException, + "Dest"), + "transferJob"); + } + + if ((null == transferJob.Source.SourceUri && null == transferJob.Source.Blob && null == transferJob.Source.AzureFile) + || (null != transferJob.Source.SourceUri && null != transferJob.Source.Blob) + || (null != transferJob.Source.Blob && null != transferJob.Source.AzureFile) + || (null != transferJob.Source.SourceUri && null != transferJob.Source.AzureFile)) + { + throw new ArgumentException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ProvideExactlyOneOfThreeParameters, + "Source.SourceUri", + "Source.Blob", + "Source.AzureFile"), + "transferJob"); + } + + this.SourceUri = this.TransferJob.Source.SourceUri; + this.SourceBlob = this.TransferJob.Source.Blob; + this.SourceFile = this.TransferJob.Source.AzureFile; + + // initialize the status refresh timer + this.statusRefreshTimer = new Timer( + new TimerCallback( + delegate(object timerState) + { + this.hasWork = true; + })); + + this.SetInitialStatus(); + } + + /// + /// Internal state values. + /// + private enum State + { + FetchSourceAttributes, + GetDestination, + StartCopy, + GetCopyState, + Finished, + Error, + } + + public override bool HasWork + { + get + { + return this.hasWork; + } + } + + protected CloudBlob SourceBlob + { + get; + private set; + } + + protected CloudFile SourceFile + { + get; + private set; + } + + protected Uri SourceUri + { + get; + private set; + } + + protected abstract Uri DestUri + { + get; + } + + public static AsyncCopyController CreateAsyncCopyController(TransferScheduler transferScheduler, TransferJob transferJob, CancellationToken cancellationToken) + { + if (transferJob.Destination.TransferLocationType == TransferLocationType.AzureFile) + { + return new FileAsyncCopyController(transferScheduler, transferJob, cancellationToken); + } + + if (transferJob.Destination.TransferLocationType == TransferLocationType.AzureBlob) + { + return new BlobAsyncCopyController(transferScheduler, transferJob, cancellationToken); + } + + throw new InvalidOperationException(Resources.CanOnlyCopyToFileOrBlobException); + } + + /// + /// Do work in the controller. + /// A controller controls the whole transfer from source to destination, + /// which could be split into several work items. This method is to let controller to do one of those work items. + /// There could be several work items to do at the same time in the controller. + /// + /// Whether the controller has completed. This is to tell TransferScheduler + /// whether the controller can be disposed. + protected override async Task DoWorkInternalAsync() + { + switch (this.state) + { + case State.FetchSourceAttributes: + await this.FetchSourceAttributesAsync(); + break; + case State.GetDestination: + await this.GetDestinationAsync(); + break; + case State.StartCopy: + await this.StartCopyAsync(); + break; + case State.GetCopyState: + await this.GetCopyStateAsync(); + break; + case State.Finished: + case State.Error: + default: + break; + } + + return (State.Error == this.state || State.Finished == this.state); + } + + /// + /// Sets the state of the controller to Error, while recording + /// the last occurred exception and setting the HasWork and + /// IsFinished fields. + /// + /// Exception to record. + protected override void SetErrorState(Exception ex) + { + Debug.Assert( + this.state != State.Finished, + "SetErrorState called, while controller already in Finished state"); + + this.state = State.Error; + this.hasWork = false; + } + + /// + /// Taken from Microsoft.WindowsAzure.Storage.Core.Util.HttpUtility: Parse the http query string. + /// + /// Http query string. + /// A dictionary of query pairs. + protected static Dictionary ParseQueryString(string query) + { + Dictionary retVal = new Dictionary(); + if (query == null || query.Length == 0) + { + return retVal; + } + + // remove ? if present + if (query.StartsWith("?", StringComparison.OrdinalIgnoreCase)) + { + query = query.Substring(1); + } + + string[] valuePairs = query.Split(new string[] { "&" }, StringSplitOptions.RemoveEmptyEntries); + + foreach (string vp in valuePairs) + { + int equalDex = vp.IndexOf("=", StringComparison.OrdinalIgnoreCase); + if (equalDex < 0) + { + retVal.Add(Uri.UnescapeDataString(vp), null); + continue; + } + + string key = vp.Substring(0, equalDex); + string value = vp.Substring(equalDex + 1); + + retVal.Add(Uri.UnescapeDataString(key), Uri.UnescapeDataString(value)); + } + + return retVal; + } + + private void SetInitialStatus() + { + switch (this.TransferJob.Status) + { + case TransferJobStatus.NotStarted: + this.TransferJob.Status = TransferJobStatus.Transfer; + break; + case TransferJobStatus.Transfer: + break; + case TransferJobStatus.Monitor: + break; + case TransferJobStatus.Finished: + default: + throw new ArgumentException(string.Format( + CultureInfo.CurrentCulture, + Resources.InvalidInitialEntryStatusForControllerException, + this.TransferJob.Status, + this.GetType().Name)); + } + + this.SetHasWorkAfterStatusChanged(); + } + + private void SetHasWorkAfterStatusChanged() + { + if (TransferJobStatus.Transfer == this.TransferJob.Status) + { + if (null != this.SourceUri) + { + this.state = State.GetDestination; + } + else + { + this.state = State.FetchSourceAttributes; + } + } + else if(TransferJobStatus.Monitor == this.TransferJob.Status) + { + this.state = State.GetCopyState; + } + else + { + Debug.Fail("We should never be here"); + } + + this.hasWork = true; + } + + private async Task FetchSourceAttributesAsync() + { + Debug.Assert( + this.state == State.FetchSourceAttributes, + "FetchSourceAttributesAsync called, but state isn't FetchSourceAttributes"); + + this.hasWork = false; + this.StartCallbackHandler(); + + try + { + await this.DoFetchSourceAttributesAsync(); + } + catch (StorageException e) + { + HandleFetchSourceAttributesException(e); + throw; + } + + this.TransferJob.Source.CheckedAccessCondition = true; + + this.state = State.GetDestination; + this.hasWork = true; + } + + private static void HandleFetchSourceAttributesException(StorageException e) + { + // Getting a storage exception is expected if the source doesn't + // exist. For those cases that indicate the source doesn't exist + // we will set a specific error state. + if (null != e.RequestInformation && + e.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + throw new InvalidOperationException(Resources.SourceDoesNotExistException); + } + } + + private async Task GetDestinationAsync() + { + Debug.Assert( + this.state == State.GetDestination, + "GetDestinationAsync called, but state isn't GetDestination"); + + this.hasWork = false; + this.StartCallbackHandler(); + + try + { + await this.DoFetchDestAttributesAsync(); + } + catch (StorageException se) + { + if (!this.HandleGetDestinationResult(se)) + { + throw se; + } + return; + } + + this.HandleGetDestinationResult(null); + } + + private bool HandleGetDestinationResult(Exception e) + { + bool destExist = true; + + if (null != e) + { + StorageException se = e as StorageException; + + // Getting a storage exception is expected if the destination doesn't + // exist. In this case we won't error out, but set the + // destExist flag to false to indicate we will copy to + // a new blob/file instead of overwriting an existing one. + if (null != se && + null != se.RequestInformation && + se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + destExist = false; + } + else + { + this.DoHandleGetDestinationException(se); + return false; + } + } + + this.TransferJob.Destination.CheckedAccessCondition = true; + + if ((TransferJobStatus.Monitor == this.TransferJob.Status) + && string.IsNullOrEmpty(this.TransferJob.CopyId)) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + // If destination file exists, query user whether to overwrite it. + + Uri sourceUri = this.GetSourceUri(); + this.CheckOverwrite( + destExist, + sourceUri.ToString(), + this.DestUri.ToString()); + + this.UpdateProgressAddBytesTransferred(0); + + this.state = State.StartCopy; + + this.hasWork = true; + return true; + } + + private async Task StartCopyAsync() + { + Debug.Assert( + this.state == State.StartCopy, + "StartCopyAsync called, but state isn't StartCopy"); + + this.hasWork = false; + + try + { + this.TransferJob.CopyId = await this.DoStartCopyAsync(); + } + catch (StorageException se) + { + if (!this.HandleStartCopyResult(se)) + { + throw; + } + + return; + } + + this.HandleStartCopyResult(null); + } + + private bool HandleStartCopyResult(StorageException se) + { + if (null != se) + { + if (null != se.RequestInformation + && null != se.RequestInformation.ExtendedErrorInformation + && BlobErrorCodeStrings.PendingCopyOperation == se.RequestInformation.ExtendedErrorInformation.ErrorCode) + { + CopyState copyState = this.FetchCopyStateAsync().Result; + + if (null == copyState) + { + return false; + } + + string baseUriString = copyState.Source.GetComponents( + UriComponents.Host | UriComponents.Port | UriComponents.Path, UriFormat.UriEscaped); + + Uri sourceUri = this.GetSourceUri(); + + string ourBaseUriString = sourceUri.GetComponents(UriComponents.Host | UriComponents.Port | UriComponents.Path, UriFormat.UriEscaped); + + DateTimeOffset? baseSnapshot = null; + DateTimeOffset? ourSnapshot = null == this.SourceBlob ? null : this.SourceBlob.SnapshotTime; + + string snapshotString; + if (ParseQueryString(copyState.Source.Query).TryGetValue("snapshot", out snapshotString)) + { + if (!string.IsNullOrEmpty(snapshotString)) + { + DateTimeOffset snapshotTime; + if (DateTimeOffset.TryParse( + snapshotString, + CultureInfo.CurrentCulture, + DateTimeStyles.AdjustToUniversal, + out snapshotTime)) + { + baseSnapshot = snapshotTime; + } + } + } + + if (!baseUriString.Equals(ourBaseUriString) || + !baseSnapshot.Equals(ourSnapshot)) + { + return false; + } + + if (string.IsNullOrEmpty(this.TransferJob.CopyId)) + { + this.TransferJob.CopyId = copyState.CopyId; + } + } + else + { + return false; + } + } + + this.state = State.GetCopyState; + this.hasWork = true; + return true; + } + + private async Task GetCopyStateAsync() + { + Debug.Assert( + this.state == State.GetCopyState, + "GetCopyStateAsync called, but state isn't GetCopyState"); + + this.hasWork = false; + this.StartCallbackHandler(); + + CopyState copyState = null; + + try + { + copyState = await this.FetchCopyStateAsync(); + } + catch (StorageException se) + { + if (null != se.RequestInformation && + se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + // The reason of 404 (Not Found) may be that the destination blob has not been created yet. + this.RestartTimer(); + } + else + { + throw; + } + } + + this.HandleFetchCopyStateResult(copyState); + } + + private void HandleFetchCopyStateResult(CopyState copyState) + { + if (null == copyState) + { + // Reach here, the destination should already exist. + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.FailedToRetrieveCopyStateForObjectException, + this.DestUri.ToString()); + + throw new TransferException( + TransferErrorCode.FailToRetrieveCopyStateForObject, + exceptionMessage); + } + else + { + // Verify we are monitoring the right blob copying process. + if (!this.TransferJob.CopyId.Equals(copyState.CopyId)) + { + throw new TransferException( + TransferErrorCode.MismatchCopyId, + Resources.MismatchFoundBetweenLocalAndServerCopyIdsException); + } + + if (CopyStatus.Success == copyState.Status) + { + this.UpdateTransferProgress(copyState); + + this.DisposeStatusRefreshTimer(); + + this.SetFinished(); + } + else if (CopyStatus.Pending == copyState.Status) + { + this.UpdateTransferProgress(copyState); + + // Wait a period to restart refresh the status. + this.RestartTimer(); + } + else + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.FailedToAsyncCopyObjectException, + this.GetSourceUri().ToString(), + this.DestUri.ToString(), + copyState.Status.ToString(), + copyState.StatusDescription); + + // CopyStatus.Invalid | Failed | Aborted + throw new TransferException( + TransferErrorCode.AsyncCopyFailed, + exceptionMessage); + } + } + } + + private void UpdateTransferProgress(CopyState copyState) + { + if (null != copyState && + copyState.TotalBytes.HasValue) + { + Debug.Assert( + copyState.BytesCopied.HasValue, + "BytesCopied cannot be null as TotalBytes is not null."); + + if (this.TransferContext != null) + { + long bytesTransferred = copyState.BytesCopied.Value; + this.UpdateProgressAddBytesTransferred(bytesTransferred - this.lastBytesCopied); + + this.lastBytesCopied = bytesTransferred; + } + } + } + + private void SetFinished() + { + this.state = State.Finished; + this.hasWork = false; + + this.FinishCallbackHandler(null); + } + + private void RestartTimer() + { + // Wait a period to restart refresh the status. + this.statusRefreshTimer.Change( + TimeSpan.FromMilliseconds(Constants.AsyncCopyStatusRefreshWaitTimeInMilliseconds), + new TimeSpan(-1)); + } + + private void DisposeStatusRefreshTimer() + { + if (null != this.statusRefreshTimer) + { + lock (this.statusRefreshTimerLock) + { + if (null != this.statusRefreshTimer) + { + this.statusRefreshTimer.Dispose(); + this.statusRefreshTimer = null; + } + } + } + } + + private Uri GetSourceUri() + { + if (null != this.SourceUri) + { + return this.SourceUri; + } + + if (null != this.SourceBlob) + { + return this.SourceBlob.SnapshotQualifiedUri; + } + + return this.SourceFile.Uri; + } + + protected async Task DoFetchSourceAttributesAsync() + { + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition( + this.TransferJob.Source.AccessCondition, + this.TransferJob.Source.CheckedAccessCondition); + OperationContext operationContext = Utils.GenerateOperationContext(this.TransferContext); + + if (this.SourceBlob != null) + { + await this.SourceBlob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.TransferJob.Source.BlobRequestOptions), + operationContext, + this.CancellationToken); + } + else if (this.SourceFile != null) + { + await this.SourceFile.FetchAttributesAsync( + accessCondition, + Utils.GenerateFileRequestOptions(this.TransferJob.Source.FileRequestOptions), + operationContext, + this.CancellationToken); + } + } + + protected abstract Task DoFetchDestAttributesAsync(); + protected abstract Task DoStartCopyAsync(); + protected abstract void DoHandleGetDestinationException(StorageException se); + protected abstract Task FetchCopyStateAsync(); + } +} diff --git a/lib/TransferControllers/AsyncCopyControllers/BlobAsyncCopyController.cs b/lib/TransferControllers/AsyncCopyControllers/BlobAsyncCopyController.cs new file mode 100644 index 00000000..848d7598 --- /dev/null +++ b/lib/TransferControllers/AsyncCopyControllers/BlobAsyncCopyController.cs @@ -0,0 +1,173 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Globalization; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement; + + /// + /// Blob asynchronous copy. + /// + internal class BlobAsyncCopyController : AsyncCopyController + { + private CloudBlob destBlob; + + public BlobAsyncCopyController( + TransferScheduler transferScheduler, + TransferJob transferJob, + CancellationToken cancellationToken) + : base(transferScheduler, transferJob, cancellationToken) + { + CloudBlob transferDestBlob = transferJob.Destination.Blob; + if (null == transferDestBlob) + { + throw new ArgumentException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ParameterCannotBeNullException, + "Dest.Blob"), + "transferJob"); + } + + if (transferDestBlob.IsSnapshot) + { + throw new ArgumentException(Resources.DestinationMustBeBaseBlob, "transferJob"); + } + + CloudBlob transferSourceBlob = transferJob.Source.Blob; + + if (null != transferSourceBlob && transferDestBlob.BlobType != transferSourceBlob.BlobType) + { + throw new ArgumentException(Resources.SourceAndDestinationBlobTypeDifferent, "transferJob"); + } + + if ((null != transferSourceBlob) + && (StorageExtensions.Equals(transferSourceBlob, transferDestBlob))) + { + throw new InvalidOperationException(Resources.SourceAndDestinationLocationCannotBeEqualException); + } + + this.destBlob = transferDestBlob; + } + + protected override Uri DestUri + { + get + { + return this.destBlob.Uri; + } + } + + protected override Task DoFetchDestAttributesAsync() + { + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition( + this.TransferJob.Destination.AccessCondition, + this.TransferJob.Destination.CheckedAccessCondition); + + return this.destBlob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.TransferContext), + this.CancellationToken); + } + + protected override Task DoStartCopyAsync() + { + AccessCondition destAccessCondition = Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition); + + if (null != this.SourceUri) + { + return this.destBlob.StartCopyAsync( + this.SourceUri, + null, + destAccessCondition, + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.TransferContext), + this.CancellationToken); + } + else if (null != this.SourceBlob) + { + AccessCondition sourceAccessCondition = + AccessCondition.GenerateIfMatchCondition(this.SourceBlob.Properties.ETag); + + return this.destBlob.StartCopyAsync( + this.SourceBlob.GenerateUriWithCredentials(), + sourceAccessCondition, + destAccessCondition, + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.TransferContext), + this.CancellationToken); + } + else + { + if (BlobType.BlockBlob == this.destBlob.BlobType) + { + return (this.destBlob as CloudBlockBlob).StartCopyAsync( + this.SourceFile.GenerateCopySourceFile(), + null, + destAccessCondition, + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.TransferContext), + this.CancellationToken); + } + else if (BlobType.PageBlob == this.destBlob.BlobType) + { + throw new InvalidOperationException(Resources.AsyncCopyFromFileToPageBlobNotSupportException); + } + else if (BlobType.AppendBlob == this.destBlob.BlobType) + { + throw new InvalidOperationException(Resources.AsyncCopyFromFileToAppendBlobNotSupportException); + } + else + { + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.NotSupportedBlobType, + this.destBlob.BlobType)); + } + } + } + + protected override void DoHandleGetDestinationException(StorageException se) + { + if (null != se) + { + if (0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase)) + { + // Current use error message to decide whether it caused by blob type mismatch, + // We should ask xscl to expose an error code for this.. + // Opened workitem 1487579 to track this. + throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); + } + } + else + { + if (null != this.SourceBlob && this.SourceBlob.Properties.BlobType != this.destBlob.Properties.BlobType) + { + throw new InvalidOperationException(Resources.SourceAndDestinationBlobTypeDifferent); + } + } + } + + protected override async Task FetchCopyStateAsync() + { + await this.destBlob.FetchAttributesAsync( + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.TransferContext), + this.CancellationToken); + + return this.destBlob.CopyState; + } + } +} diff --git a/lib/TransferControllers/AsyncCopyControllers/FileAsyncCopyController.cs b/lib/TransferControllers/AsyncCopyControllers/FileAsyncCopyController.cs new file mode 100644 index 00000000..e4f31628 --- /dev/null +++ b/lib/TransferControllers/AsyncCopyControllers/FileAsyncCopyController.cs @@ -0,0 +1,125 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Globalization; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement; + using Microsoft.WindowsAzure.Storage.File; + + /// + /// Azure file asynchronous copy. + /// + internal class FileAsyncCopyController : AsyncCopyController + { + private CloudFile destFile; + + public FileAsyncCopyController( + TransferScheduler transferScheduler, + TransferJob transferJob, + CancellationToken cancellationToken) + : base(transferScheduler, transferJob, cancellationToken) + { + if (null == transferJob.Destination.AzureFile) + { + throw new ArgumentException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ParameterCannotBeNullException, + "Dest.AzureFile"), + "transferJob"); + } + + if ((null == transferJob.Source.SourceUri && null == transferJob.Source.Blob && null == transferJob.Source.AzureFile) + || (null != transferJob.Source.SourceUri && null != transferJob.Source.Blob) + || (null != transferJob.Source.Blob && null != transferJob.Source.AzureFile) + || (null != transferJob.Source.SourceUri && null != transferJob.Source.AzureFile)) + { + throw new ArgumentException( + string.Format( + CultureInfo.CurrentCulture, + Resources.ProvideExactlyOneOfThreeParameters, + "Source.SourceUri", + "Source.Blob", + "Source.AzureFile"), + "transferJob"); + } + + this.destFile = this.TransferJob.Destination.AzureFile; + } + + protected override Uri DestUri + { + get + { + return this.destFile.Uri; + } + } + + protected override Task DoFetchDestAttributesAsync() + { + return this.destFile.FetchAttributesAsync( + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + null, + this.CancellationToken); + } + + protected override Task DoStartCopyAsync() + { + OperationContext operationContext = Utils.GenerateOperationContext(this.TransferContext); + if (null != this.SourceUri) + { + return this.destFile.StartCopyAsync( + this.SourceUri, + null, + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + operationContext, + this.CancellationToken); + } + else if (null != this.SourceBlob) + { + return this.destFile.StartCopyAsync( + this.SourceBlob.GenerateCopySourceBlob(), + null, + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + operationContext, + this.CancellationToken); + } + else + { + return this.destFile.StartCopyAsync( + this.SourceFile.GenerateCopySourceFile(), + null, + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + operationContext, + this.CancellationToken); + } + } + + protected override void DoHandleGetDestinationException(StorageException se) + { + } + + protected override async Task FetchCopyStateAsync() + { + await this.destFile.FetchAttributesAsync( + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + Utils.GenerateOperationContext(this.TransferContext), + this.CancellationToken); + + return this.destFile.CopyState; + } + } +} diff --git a/lib/TransferControllers/ITransferController.cs b/lib/TransferControllers/ITransferController.cs new file mode 100644 index 00000000..12f94fbf --- /dev/null +++ b/lib/TransferControllers/ITransferController.cs @@ -0,0 +1,27 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Threading.Tasks; + + internal interface ITransferController + { + bool HasWork + { + get; + } + + bool IsFinished + { + get; + } + + Task DoWorkAsync(); + + void CancelWork(); + } +} diff --git a/lib/TransferControllers/SyncTransferController.cs b/lib/TransferControllers/SyncTransferController.cs new file mode 100644 index 00000000..a6c7bfdf --- /dev/null +++ b/lib/TransferControllers/SyncTransferController.cs @@ -0,0 +1,201 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Concurrent; + using System.Globalization; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + + internal class SyncTransferController : TransferControllerBase + { + private TransferReaderWriterBase reader; + private TransferReaderWriterBase writer; + + public SyncTransferController( + TransferScheduler transferScheduler, + TransferJob transferJob, + CancellationToken userCancellationToken) + : base(transferScheduler, transferJob, userCancellationToken) + { + if (null == transferScheduler) + { + throw new ArgumentNullException("transferScheduler"); + } + + if (null == transferJob) + { + throw new ArgumentNullException("transferJob"); + } + + this.SharedTransferData = new SharedTransferData() + { + TransferJob = this.TransferJob, + AvailableData = new ConcurrentDictionary(), + }; + + if (null == transferJob.CheckPoint) + { + transferJob.CheckPoint = new SingleObjectCheckpoint(); + } + + reader = this.GetReader(transferJob.Source); + writer = this.GetWriter(transferJob.Destination); + } + + public SharedTransferData SharedTransferData + { + get; + private set; + } + + public bool ErrorOccurred + { + get; + private set; + } + + public override bool HasWork + { + get + { + var hasWork = (!this.reader.PreProcessed && this.reader.HasWork) || (this.reader.PreProcessed && this.writer.HasWork) || (this.writer.PreProcessed && this.reader.HasWork); + return !this.ErrorOccurred && hasWork; + } + } + + protected override async Task DoWorkInternalAsync() + { + if (!this.reader.PreProcessed && this.reader.HasWork) + { + await this.reader.DoWorkInternalAsync(); + } + else if (this.reader.PreProcessed && this.writer.HasWork) + { + await this.writer.DoWorkInternalAsync(); + } + else if (this.writer.PreProcessed && this.reader.HasWork) + { + await this.reader.DoWorkInternalAsync(); + } + + return this.ErrorOccurred || this.writer.IsFinished; + } + + protected override void SetErrorState(Exception ex) + { + this.ErrorOccurred = true; + } + + private TransferReaderWriterBase GetReader(TransferLocation sourceLocation) + { + switch (sourceLocation.TransferLocationType) + { + case TransferLocationType.Stream: + return new StreamedReader(this.Scheduler, this, this.CancellationToken); + case TransferLocationType.FilePath: + return new StreamedReader(this.Scheduler, this, this.CancellationToken); + case TransferLocationType.AzureBlob: + if (sourceLocation.Blob is CloudPageBlob) + { + return new PageBlobReader(this.Scheduler, this, this.CancellationToken); + } + else if (sourceLocation.Blob is CloudBlockBlob) + { + return new BlockBasedBlobReader(this.Scheduler, this, this.CancellationToken); + } + else if (sourceLocation.Blob is CloudAppendBlob) + { + return new BlockBasedBlobReader(this.Scheduler, this, this.CancellationToken); + } + else + { + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.UnsupportedBlobTypeException, + sourceLocation.Blob.BlobType)); + } + case TransferLocationType.AzureFile: + return new CloudFileReader(this.Scheduler, this, this.CancellationToken); + default: + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.UnsupportedTransferLocationException, + sourceLocation.TransferLocationType)); + } + } + + private TransferReaderWriterBase GetWriter(TransferLocation destLocation) + { + switch (destLocation.TransferLocationType) + { + case TransferLocationType.Stream: + return new StreamedWriter(this.Scheduler, this, this.CancellationToken); + case TransferLocationType.FilePath: + return new StreamedWriter(this.Scheduler, this, this.CancellationToken); + case TransferLocationType.AzureBlob: + if (destLocation.Blob is CloudPageBlob) + { + return new PageBlobWriter(this.Scheduler, this, this.CancellationToken); + } + else if (destLocation.Blob is CloudBlockBlob) + { + return new BlockBlobWriter(this.Scheduler, this, this.CancellationToken); + } + else if (destLocation.Blob is CloudAppendBlob) + { + return new AppendBlobWriter(this.Scheduler, this, this.CancellationToken); + } + else + { + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.UnsupportedBlobTypeException, + destLocation.Blob.BlobType)); + } + case TransferLocationType.AzureFile: + return new CloudFileWriter(this.Scheduler, this, this.CancellationToken); + default: + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.UnsupportedTransferLocationException, + destLocation.TransferLocationType)); + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + if (null != this.reader) + { + this.reader.Dispose(); + } + + if (null != this.writer) + { + this.writer.Dispose(); + } + + foreach(var transferData in this.SharedTransferData.AvailableData.Values) + { + transferData.Dispose(); + } + + this.SharedTransferData.AvailableData.Clear(); + } + } + } +} diff --git a/lib/TransferControllers/TransferControllerBase.cs b/lib/TransferControllers/TransferControllerBase.cs new file mode 100644 index 00000000..fe8ee5f0 --- /dev/null +++ b/lib/TransferControllers/TransferControllerBase.cs @@ -0,0 +1,336 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Globalization; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.DataMovement; + + internal abstract class TransferControllerBase : ITransferController, IDisposable + { + /// + /// Count of active tasks in this controller. + /// + private int activeTasks; + + private volatile bool isFinished = false; + + private object lockOnFinished = new object(); + + private int notifiedFinish; + + private CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + private CancellationTokenRegistration transferSchedulerCancellationTokenRegistration; + + private CancellationTokenRegistration userCancellationTokenRegistration; + + protected TransferControllerBase(TransferScheduler transferScheduler, TransferJob transferJob, CancellationToken userCancellationToken) + { + if (null == transferScheduler) + { + throw new ArgumentNullException("transferScheduler"); + } + + if (null == transferJob) + { + throw new ArgumentNullException("transferJob"); + } + + this.Scheduler = transferScheduler; + this.TransferJob = transferJob; + + this.transferSchedulerCancellationTokenRegistration = + this.Scheduler.CancellationTokenSource.Token.Register(this.CancelWork); + + this.userCancellationTokenRegistration = userCancellationToken.Register(this.CancelWork); + this.TaskCompletionSource = new TaskCompletionSource(); + } + + ~TransferControllerBase() + { + this.Dispose(false); + } + + /// + /// Gets or sets the transfer context for the controller. + /// + public TransferContext TransferContext + { + get + { + return this.TransferJob.Transfer.Context; + } + } + + /// + /// Gets or sets a value indicating whether the controller has work available + /// or not for the calling code. If HasWork is false, while IsFinished + /// is also false this indicates that there are currently still active + /// async tasks running. The caller should continue checking if this + /// controller HasWork available later; once the currently active + /// async tasks are done HasWork will change to True, or IsFinished + /// will be set to True. + /// + public abstract bool HasWork + { + get; + } + + /// + /// Gets a value indicating whether this controller is finished with + /// its transferring task. + /// + public bool IsFinished + { + get + { + return this.isFinished; + } + } + + public TaskCompletionSource TaskCompletionSource + { + get; + set; + } + + /// + /// Gets scheduler object which creates this object. + /// + protected TransferScheduler Scheduler + { + get; + private set; + } + + /// + /// Gets TransferJob related to this controller. + /// + protected TransferJob TransferJob + { + get; + private set; + } + + protected CancellationToken CancellationToken + { + get + { + return cancellationTokenSource.Token; + } + } + + /// + /// Do work in the controller. + /// A controller controls the whole transfer from source to destination, + /// which could be split into several work items. This method is to let controller to do one of those work items. + /// There could be several work items to do at the same time in the controller. + /// + /// Whether the controller has completed. This is to tell TransferScheduler + /// whether the controller can be disposed. + public async Task DoWorkAsync() + { + if (!this.HasWork) + { + return false; + } + + bool setFinish = false; + Exception exception = null; + this.PreWork(); + + try + { + setFinish = await this.DoWorkInternalAsync(); + } + catch (Exception ex) + { + this.SetErrorState(ex); + setFinish = true; + exception = ex; + } + + if (setFinish) + { + var postWork = this.SetFinishedAndPostWork(); + if (exception != null) + { + // There might be still some active tasks running while error occurs, and + // those tasks shouldn't take long time to complete, so just spin until they are done. + var spin = new SpinWait(); + while (this.activeTasks != 0) + { + spin.SpinOnce(); + } + + this.FinishCallbackHandler(exception); + return true; + } + else + { + return postWork; + } + } + else + { + return this.PostWork(); + } + } + + /// + /// Cancels all work in the controller. + /// + public void CancelWork() + { + this.cancellationTokenSource.Cancel(); + } + + /// + /// Public dispose method to release all resources owned. + /// + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + public void CheckCancellation() + { + Utils.CheckCancellation(this.cancellationTokenSource); + } + + public void UpdateProgressAddBytesTransferred(long bytesTransferredToAdd) + { + this.TransferJob.Transfer.ProgressTracker.AddBytesTransferred(bytesTransferredToAdd); + } + + public void StartCallbackHandler() + { + if (this.TransferJob.Status == TransferJobStatus.NotStarted) + { + this.TransferJob.Status = TransferJobStatus.Transfer; + } + } + + public void FinishCallbackHandler(Exception exception) + { + if (Interlocked.CompareExchange(ref this.notifiedFinish, 1, 0) == 0) + { + if (null != exception) + { + this.TaskCompletionSource.SetException(exception); + } + else + { + this.TaskCompletionSource.SetResult(null); + } + } + } + + protected abstract Task DoWorkInternalAsync(); + + /// + /// Pre work action. + /// + protected void PreWork() + { + Interlocked.Increment(ref this.activeTasks); + } + + /// + /// Post work action. + /// + /// + /// Count of current active task in the controller. + /// A Controller can only be destroyed after this count of active tasks is 0. + /// + protected bool PostWork() + { + lock (this.lockOnFinished) + { + return 0 == Interlocked.Decrement(ref this.activeTasks) && this.isFinished; + } + } + + protected bool SetFinishedAndPostWork() + { + lock (this.lockOnFinished) + { + this.isFinished = true; + return 0 == Interlocked.Decrement(ref this.activeTasks) && this.isFinished; + } + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + try + { + this.transferSchedulerCancellationTokenRegistration.Dispose(); + } + catch (ObjectDisposedException) + { + // Object has been disposed before, just catch this exception, do nothing else. + } + + try + { + this.userCancellationTokenRegistration.Dispose(); + } + catch (ObjectDisposedException) + { + // Object has been disposed before, just catch this exception, do nothing else. + } + + try + { + this.cancellationTokenSource.Dispose(); + } + catch (ObjectDisposedException) + { + // Object has been disposed before, just catch this exception, do nothing else. + } + } + } + + /// + /// Sets the state of the controller to Error, while recording + /// the last occurred exception and setting the HasWork and + /// IsFinished fields. + /// + /// Exception to record. + protected abstract void SetErrorState(Exception ex); + + public void CheckOverwrite( + bool exist, + string sourceFileName, + string destFileName) + { + if (null == this.TransferJob.Overwrite) + { + this.TransferJob.Overwrite = true; + if (exist) + { + if (null == this.TransferContext || null == this.TransferContext.OverwriteCallback || !this.TransferContext.OverwriteCallback(sourceFileName, destFileName)) + { + this.TransferJob.Overwrite = false; + } + } + } + + if (exist && !this.TransferJob.Overwrite.Value) + { + string exceptionMessage = string.Format(CultureInfo.InvariantCulture, Resources.OverwriteCallbackCancelTransferException, sourceFileName, destFileName); + throw new TransferException(TransferErrorCode.NotOverwriteExistingDestination, exceptionMessage); + } + } + } +} diff --git a/lib/TransferControllers/TransferReaderWriterBase.cs b/lib/TransferControllers/TransferReaderWriterBase.cs new file mode 100644 index 00000000..820a2900 --- /dev/null +++ b/lib/TransferControllers/TransferReaderWriterBase.cs @@ -0,0 +1,111 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + + internal abstract class TransferReaderWriterBase : IDisposable + { + protected TransferReaderWriterBase( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + { + this.Scheduler = scheduler; + this.Controller = controller; + this.CancellationToken = cancellationToken; + } + + /// + /// Gets a value indicating whether it finished preprocess. + /// For producer, preprocess is to validate source and fetch block list/page ranges; + /// For consumer, preprocess is to open or create destination. + /// + public virtual bool PreProcessed + { + get; + protected set; + } + + public abstract bool HasWork + { + get; + } + + public abstract bool IsFinished + { + get; + } + + protected TransferScheduler Scheduler + { + get; + private set; + } + + protected SyncTransferController Controller + { + get; + private set; + } + + protected SharedTransferData SharedTransferData + { + get + { + return this.Controller.SharedTransferData; + } + } + + protected CancellationToken CancellationToken + { + get; + private set; + } + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + } + + protected void NotifyStarting() + { + this.Controller.StartCallbackHandler(); + } + + protected void NotifyFinished(Exception ex) + { + this.Controller.FinishCallbackHandler(ex); + } + + public abstract Task DoWorkInternalAsync(); + + public TransferData GetFirstAvailable() + { + TransferData transferData = null; + var transferDatas = this.SharedTransferData.AvailableData.Values; + + if (transferDatas.Any()) + { + transferData = transferDatas.First(); + TransferData tempData; + this.SharedTransferData.AvailableData.TryRemove(transferData.StartOffset, out tempData); + return transferData; + } + + return null; + } + } +} diff --git a/lib/TransferControllers/TransferReaders/BlockBasedBlobReader.cs b/lib/TransferControllers/TransferReaders/BlockBasedBlobReader.cs new file mode 100644 index 00000000..ca4a07cc --- /dev/null +++ b/lib/TransferControllers/TransferReaders/BlockBasedBlobReader.cs @@ -0,0 +1,357 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.IO; + using System.Linq; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + + internal sealed class BlockBasedBlobReader : TransferReaderWriterBase + { + /// + /// Block/append blob instance to be downloaded from. + /// + private CloudBlob blob; + + /// + /// Window to record unfinished chunks to be retransferred again. + /// + private Queue lastTransferWindow; + + /// + /// Instance to represent source location. + /// + private TransferLocation transferLocation; + + private TransferJob transferJob; + + /// + /// Value to indicate whether the transfer is finished. + /// This is to tell the caller that the reader can be disposed, + /// Both error happened or completed will be treated to be finished. + /// + private volatile bool isFinished = false; + + private volatile bool hasWork; + + private CountdownEvent downloadCountdownEvent; + + public BlockBasedBlobReader( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.transferLocation = this.SharedTransferData.TransferJob.Source; + this.transferJob = this.SharedTransferData.TransferJob; + this.blob = this.transferLocation.Blob; + + Debug.Assert( + (this.blob is CloudBlockBlob) ||(this.blob is CloudAppendBlob), + "Initializing BlockBlobReader while source location is not a block blob or an append blob."); + + this.hasWork = true; + } + + public override bool IsFinished + { + get + { + return this.isFinished; + } + } + + public override bool HasWork + { + get + { + return this.hasWork; + } + } + + public override async Task DoWorkInternalAsync() + { + try + { + if (!this.PreProcessed) + { + await this.FetchAttributeAsync(); + } + else + { + await this.DownloadBlockBlobAsync(); + } + } + catch (Exception) + { + this.isFinished = true; + throw; + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + if (null != this.downloadCountdownEvent) + { + this.downloadCountdownEvent.Dispose(); + this.downloadCountdownEvent = null; + } + } + } + + private async Task FetchAttributeAsync() + { + this.hasWork = false; + this.NotifyStarting(); + + AccessCondition accessCondition = Utils.GenerateIfMatchConditionWithCustomerCondition( + this.transferLocation.ETag, + this.transferLocation.AccessCondition, + this.transferLocation.CheckedAccessCondition); + + try + { + await this.blob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.transferLocation.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + catch (StorageException e) + { + if (null != e.RequestInformation && + e.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + throw new InvalidOperationException(Resources.SourceBlobDoesNotExistException); + } + else + { + throw; + } + } + + this.transferLocation.CheckedAccessCondition = true; + + if (this.blob.Properties.BlobType == BlobType.Unspecified) + { + throw new InvalidOperationException(Resources.FailedToGetBlobTypeException); + } + + if (string.IsNullOrEmpty(this.transferLocation.ETag)) + { + if (0 != this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.transferLocation.ETag = this.blob.Properties.ETag; + } + else if ((this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset > this.blob.Properties.Length) + || (this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset < 0)) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.SharedTransferData.SourceLocation = this.blob.Uri.ToString(); + + this.SharedTransferData.DisableContentMD5Validation = + null != this.transferLocation.BlobRequestOptions ? + this.transferLocation.BlobRequestOptions.DisableContentMD5Validation.HasValue ? + this.transferLocation.BlobRequestOptions.DisableContentMD5Validation.Value : false : false; + + this.SharedTransferData.TotalLength = this.blob.Properties.Length; + this.SharedTransferData.Attributes = Utils.GenerateAttributes(this.blob); + + if ((0 == this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset) + && (null != this.SharedTransferData.TransferJob.CheckPoint.TransferWindow) + && (0 != this.SharedTransferData.TransferJob.CheckPoint.TransferWindow.Count)) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.lastTransferWindow = new Queue(this.SharedTransferData.TransferJob.CheckPoint.TransferWindow); + + int downloadCount = this.lastTransferWindow.Count + + (int)Math.Ceiling((double)(this.blob.Properties.Length - this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset) / this.Scheduler.TransferOptions.BlockSize); + + if (0 == downloadCount) + { + this.isFinished = true; + this.PreProcessed = true; + this.hasWork = true; + } + else + { + this.downloadCountdownEvent = new CountdownEvent(downloadCount); + + this.PreProcessed = true; + this.hasWork = true; + } + } + + private async Task DownloadBlockBlobAsync() + { + this.hasWork = false; + + byte[] memoryBuffer = this.Scheduler.MemoryManager.RequireBuffer(); + + if (null != memoryBuffer) + { + long startOffset = 0; + + if (!this.IsTransferWindowEmpty()) + { + startOffset = this.lastTransferWindow.Dequeue(); + } + else + { + bool canUpload = false; + + lock (this.transferJob.CheckPoint.TransferWindowLock) + { + if (this.transferJob.CheckPoint.TransferWindow.Count < Constants.MaxCountInTransferWindow) + { + startOffset = this.transferJob.CheckPoint.EntryTransferOffset; + + if (this.transferJob.CheckPoint.EntryTransferOffset < this.SharedTransferData.TotalLength) + { + this.transferJob.CheckPoint.TransferWindow.Add(startOffset); + this.transferJob.CheckPoint.EntryTransferOffset = Math.Min( + this.transferJob.CheckPoint.EntryTransferOffset + this.Scheduler.TransferOptions.BlockSize, + this.SharedTransferData.TotalLength); + + canUpload = true; + } + } + } + + if (!canUpload) + { + this.hasWork = true; + this.Scheduler.MemoryManager.ReleaseBuffer(memoryBuffer); + return; + } + } + + if ((startOffset > this.SharedTransferData.TotalLength) + || (startOffset < 0)) + { + this.Scheduler.MemoryManager.ReleaseBuffer(memoryBuffer); + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.SetBlockDownloadHasWork(); + + ReadDataState asyncState = new ReadDataState + { + MemoryBuffer = memoryBuffer, + BytesRead = 0, + StartOffset = startOffset, + Length = (int)Math.Min(this.Scheduler.TransferOptions.BlockSize, this.SharedTransferData.TotalLength - startOffset), + MemoryManager = this.Scheduler.MemoryManager, + }; + + using (asyncState) + { + await this.DownloadChunkAsync(asyncState); + } + + return; + } + + this.SetBlockDownloadHasWork(); + } + + private async Task DownloadChunkAsync(ReadDataState asyncState) + { + Debug.Assert(null != asyncState, "asyncState object expected"); + + // If a parallel operation caused the controller to be placed in + // error state exit early to avoid unnecessary I/O. + if (this.Controller.ErrorOccurred) + { + return; + } + + AccessCondition accessCondition = Utils.GenerateIfMatchConditionWithCustomerCondition( + this.blob.Properties.ETag, + this.transferLocation.AccessCondition); + + // We're to download this block. + asyncState.MemoryStream = + new MemoryStream( + asyncState.MemoryBuffer, + 0, + asyncState.Length); + + await this.blob.DownloadRangeToStreamAsync( + asyncState.MemoryStream, + asyncState.StartOffset, + asyncState.Length, + accessCondition, + Utils.GenerateBlobRequestOptions(this.transferLocation.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + + TransferData transferData = new TransferData(this.Scheduler.MemoryManager) + { + StartOffset = asyncState.StartOffset, + Length = asyncState.Length, + MemoryBuffer = asyncState.MemoryBuffer + }; + + this.SharedTransferData.AvailableData.TryAdd(transferData.StartOffset, transferData); + + // Set memory buffer to null. We don't want its dispose method to + // be called once our asyncState is disposed. The memory should + // not be reused yet, we still need to write it to disk. + asyncState.MemoryBuffer = null; + + this.SetFinish(); + this.SetBlockDownloadHasWork(); + } + + private void SetFinish() + { + if (this.downloadCountdownEvent.Signal()) + { + this.isFinished = true; + } + } + + private void SetBlockDownloadHasWork() + { + if (this.HasWork) + { + return; + } + + // Check if we have blocks available to download. + if (!this.IsTransferWindowEmpty() + || this.transferJob.CheckPoint.EntryTransferOffset < this.SharedTransferData.TotalLength) + { + this.hasWork = true; + return; + } + } + + private bool IsTransferWindowEmpty() + { + return null == this.lastTransferWindow || this.lastTransferWindow.Count == 0; + } + } +} diff --git a/lib/TransferControllers/TransferReaders/CloudFileReader.cs b/lib/TransferControllers/TransferReaders/CloudFileReader.cs new file mode 100644 index 00000000..5948a809 --- /dev/null +++ b/lib/TransferControllers/TransferReaders/CloudFileReader.cs @@ -0,0 +1,100 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.File; + + class CloudFileReader : RangeBasedReader + { + private CloudFile file; + + public CloudFileReader( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + :base(scheduler, controller, cancellationToken) + { + this.file = this.SharedTransferData.TransferJob.Source.AzureFile; + Debug.Assert(null != this.file, "Initializing a CloudFileReader, the source location should be a CloudFile instance."); + } + + protected override async Task DoFetchAttributesAsync() + { + await this.file.FetchAttributesAsync( + null, + Utils.GenerateFileRequestOptions(this.Location.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + + if (string.IsNullOrEmpty(this.Location.ETag)) + { + if ((0 != this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset) + || (this.SharedTransferData.TransferJob.CheckPoint.TransferWindow.Any())) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.Location.ETag = this.Location.AzureFile.Properties.ETag; + } + else if ((this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset > this.Location.AzureFile.Properties.Length) + || (this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset < 0)) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.SharedTransferData.DisableContentMD5Validation = + null != this.Location.FileRequestOptions ? + this.Location.FileRequestOptions.DisableContentMD5Validation.HasValue ? + this.Location.FileRequestOptions.DisableContentMD5Validation.Value : false : false; + + this.SharedTransferData.Attributes = Utils.GenerateAttributes(this.file); + this.SharedTransferData.TotalLength = this.file.Properties.Length; + this.SharedTransferData.SourceLocation = this.file.Uri.ToString(); + } + + protected override async Task> DoGetRangesAsync(RangesSpan rangesSpan) + { + List rangeList = new List(); + + foreach (var fileRange in await this.file.ListRangesAsync( + rangesSpan.StartOffset, + rangesSpan.EndOffset - rangesSpan.StartOffset + 1, + null, + Utils.GenerateFileRequestOptions(this.Location.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken)) + { + rangeList.Add(new Range() + { + StartOffset = fileRange.StartOffset, + EndOffset = fileRange.EndOffset, + HasData = true + }); + } + + return rangeList; + } + + protected override async Task DoDownloadRangeToStreamAsync(RangeBasedDownloadState asyncState) + { + await this.Location.AzureFile.DownloadRangeToStreamAsync( + asyncState.DownloadStream, + asyncState.StartOffset, + asyncState.Length, + null, + Utils.GenerateFileRequestOptions(this.Location.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + } +} diff --git a/lib/TransferControllers/TransferReaders/PageBlobReader.cs b/lib/TransferControllers/TransferReaders/PageBlobReader.cs new file mode 100644 index 00000000..9d2d7142 --- /dev/null +++ b/lib/TransferControllers/TransferReaders/PageBlobReader.cs @@ -0,0 +1,113 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + + internal sealed class PageBlobReader : RangeBasedReader + { + private CloudPageBlob pageBlob; + + public PageBlobReader( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + :base(scheduler, controller, cancellationToken) + { + pageBlob = this.SharedTransferData.TransferJob.Source.Blob as CloudPageBlob; + Debug.Assert(null != this.pageBlob, "Initializing a PageBlobReader, the source location should be a CloudPageBlob instance."); + } + + protected override async Task DoFetchAttributesAsync() + { + AccessCondition accessCondition = Utils.GenerateIfMatchConditionWithCustomerCondition( + this.Location.ETag, + this.Location.AccessCondition, + this.Location.CheckedAccessCondition); + + await this.pageBlob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.Location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + + if (string.IsNullOrEmpty(this.Location.ETag)) + { + if ((0 != this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset) + || (this.SharedTransferData.TransferJob.CheckPoint.TransferWindow.Any())) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.Location.ETag = this.Location.Blob.Properties.ETag; + } + else if ((this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset > this.Location.Blob.Properties.Length) + || (this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset < 0)) + { + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + this.SharedTransferData.DisableContentMD5Validation = + null != this.Location.BlobRequestOptions ? + this.Location.BlobRequestOptions.DisableContentMD5Validation.HasValue ? + this.Location.BlobRequestOptions.DisableContentMD5Validation.Value : false : false; + + this.SharedTransferData.Attributes = Utils.GenerateAttributes(this.pageBlob); + this.SharedTransferData.TotalLength = this.pageBlob.Properties.Length; + this.SharedTransferData.SourceLocation = this.pageBlob.Uri.ToString(); + } + + protected override async Task> DoGetRangesAsync(RangesSpan rangesSpan) + { + AccessCondition accessCondition = Utils.GenerateIfMatchConditionWithCustomerCondition( + this.Location.Blob.Properties.ETag, + this.Location.AccessCondition); + + List rangeList = new List(); + + foreach (var pageRange in await this.pageBlob.GetPageRangesAsync( + rangesSpan.StartOffset, + rangesSpan.EndOffset - rangesSpan.StartOffset + 1, + accessCondition, + Utils.GenerateBlobRequestOptions(this.Location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken)) + { + rangeList.Add(new Range() + { + StartOffset = pageRange.StartOffset, + EndOffset = pageRange.EndOffset, + HasData = true + }); + } + + return rangeList; + } + + protected override async Task DoDownloadRangeToStreamAsync(RangeBasedDownloadState asyncState) + { + AccessCondition accessCondition = Utils.GenerateIfMatchConditionWithCustomerCondition( + this.Location.Blob.Properties.ETag, + this.Location.AccessCondition); + + await this.Location.Blob.DownloadRangeToStreamAsync( + asyncState.DownloadStream, + asyncState.StartOffset, + asyncState.Length, + accessCondition, + Utils.GenerateBlobRequestOptions(this.Location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + } +} diff --git a/lib/TransferControllers/TransferReaders/RangeBasedReader.cs b/lib/TransferControllers/TransferReaders/RangeBasedReader.cs new file mode 100644 index 00000000..10e0be00 --- /dev/null +++ b/lib/TransferControllers/TransferReaders/RangeBasedReader.cs @@ -0,0 +1,890 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.IO; + using System.Linq; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + + internal abstract class RangeBasedReader : TransferReaderWriterBase + { + /// + /// Minimum size of empty range, the empty ranges which is smaller than this size will be merged to the adjacent range with data. + /// + const int MinimumNoDataRangeSize = 8 * 1024; + + private volatile State state; + private TransferJob transferJob; + private CountdownEvent getRangesCountDownEvent; + private CountdownEvent toDownloadItemsCountdownEvent; + private int getRangesSpanIndex = 0; + private List rangesSpanList; + private List rangeList; + private int nextDownloadIndex = 0; + private long lastTransferOffset; + private TransferDownloadBuffer currentDownloadBuffer = null; + + private volatile bool hasWork; + + public RangeBasedReader( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.transferJob = this.SharedTransferData.TransferJob; + this.Location = this.transferJob.Source; + this.hasWork = true; + } + + private enum State + { + FetchAttributes, + GetRanges, + Download, + Error, + Finished + }; + + public override async Task DoWorkInternalAsync() + { + try + { + switch (this.state) + { + case State.FetchAttributes: + await this.FetchAttributesAsync(); + break; + case State.GetRanges: + await this.GetRangesAsync(); + break; + case State.Download: + await this.DownloadRangeAsync(); + break; + default: + break; + } + } + catch + { + this.state = State.Error; + throw; + } + } + + public override bool HasWork + { + get + { + return this.hasWork; + } + } + + public override bool IsFinished + { + get + { + return State.Error == this.state || State.Finished == this.state; + } + } + + protected TransferLocation Location + { + get; + private set; + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + if (null != this.getRangesCountDownEvent) + { + this.getRangesCountDownEvent.Dispose(); + this.getRangesCountDownEvent = null; + } + + if (null != this.toDownloadItemsCountdownEvent) + { + this.toDownloadItemsCountdownEvent.Dispose(); + this.toDownloadItemsCountdownEvent = null; + } + } + } + + private async Task FetchAttributesAsync() + { + Debug.Assert( + this.state == State.FetchAttributes, + "FetchAttributesAsync called, but state isn't FetchAttributes"); + + this.hasWork = false; + this.NotifyStarting(); + + try + { + await this.DoFetchAttributesAsync(); + } + catch (StorageException e) + { + // Getting a storage exception is expected if the blob doesn't + // exist. For those cases that indicate the blob doesn't exist + // we will set a specific error state. + if (null != e.RequestInformation && + e.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + throw new InvalidOperationException(Resources.SourceBlobDoesNotExistException); + } + else + { + throw; + } + } + + this.Location.CheckedAccessCondition = true; + + this.Controller.CheckCancellation(); + + this.state = State.GetRanges; + this.PrepareToGetRanges(); + + if (!this.rangesSpanList.Any()) + { + // InitDownloadInfo will set hasWork. + this.InitDownloadInfo(); + this.PreProcessed = true; + return; + } + + this.PreProcessed = true; + this.hasWork = true; + } + + private async Task GetRangesAsync() + { + Debug.Assert( + (this.state == State.GetRanges) || (this.state == State.Error), + "GetRangesAsync called, but state isn't GetRanges or Error"); + + this.hasWork = false; + + this.lastTransferOffset = this.SharedTransferData.TransferJob.CheckPoint.EntryTransferOffset; + + int spanIndex = Interlocked.Increment(ref this.getRangesSpanIndex); + + this.hasWork = spanIndex < (this.rangesSpanList.Count - 1); + + RangesSpan rangesSpan = this.rangesSpanList[spanIndex]; + + rangesSpan.Ranges = await this.DoGetRangesAsync(rangesSpan); + + List ranges = new List(); + Range currentRange = null; + long currentStartOffset = rangesSpan.StartOffset; + + foreach (var range in rangesSpan.Ranges) + { + long emptySize = range.StartOffset - currentStartOffset; + if (emptySize > 0 && emptySize < MinimumNoDataRangeSize) + { + // There is empty range which size is smaller than MinimumNoDataRangeSize + // merge it to the adjacent data range. + if (null == currentRange) + { + currentRange = new Range() + { + StartOffset = currentStartOffset, + EndOffset = range.EndOffset, + HasData = range.HasData + }; + } + else + { + currentRange.EndOffset = range.EndOffset; + } + } + else + { + // Empty range size is larger than MinimumNoDataRangeSize + // put current data range in list and start to deal with the next data range. + if (null != currentRange) + { + ranges.Add(currentRange); + } + + currentRange = new Range + { + StartOffset = range.StartOffset, + EndOffset = range.EndOffset, + HasData = range.HasData + }; + } + + currentStartOffset = range.EndOffset + 1; + } + + if (null != currentRange) + { + ranges.Add(currentRange); + } + + rangesSpan.Ranges = ranges; + + if (this.getRangesCountDownEvent.Signal()) + { + this.ArrangeRanges(); + + // Don't call CallFinish here, InitDownloadInfo will call it. + this.InitDownloadInfo(); + } + } + + private async Task DownloadRangeAsync() + { + Debug.Assert( + this.state == State.Error || this.state == State.Download, + "DownloadRangeAsync called, but state isn't Download or Error"); + + this.hasWork = false; + + if (State.Error == this.state) + { + // Some thread has set error message, just return here. + return; + } + + if (this.nextDownloadIndex < this.rangeList.Count) + { + Range rangeData = this.rangeList[this.nextDownloadIndex]; + + int blockSize = this.Scheduler.TransferOptions.BlockSize; + long blockStartOffset = (rangeData.StartOffset / blockSize) * blockSize; + long nextBlockStartOffset = Math.Min(blockStartOffset + blockSize, this.SharedTransferData.TotalLength); + + TransferDownloadStream downloadStream = null; + + if ((rangeData.StartOffset > blockStartOffset) && (rangeData.EndOffset < nextBlockStartOffset)) + { + Debug.Assert(null != this.currentDownloadBuffer, "Download buffer should have been allocated when range start offset is not block size aligned"); + downloadStream = new TransferDownloadStream(this.Scheduler.MemoryManager, this.currentDownloadBuffer, (int)(rangeData.StartOffset - blockStartOffset), (int)(rangeData.EndOffset + 1 - rangeData.StartOffset)); + } + else + { + // Attempt to reserve memory. If none available we'll + // retry some time later. + byte[] memoryBuffer = this.Scheduler.MemoryManager.RequireBuffer(); + + if (null == memoryBuffer) + { + this.SetRangeDownloadHasWork(); + return; + } + + if (rangeData.EndOffset >= this.lastTransferOffset) + { + bool canRead = true; + lock (this.transferJob.CheckPoint.TransferWindowLock) + { + if (this.transferJob.CheckPoint.TransferWindow.Count >= Constants.MaxCountInTransferWindow) + { + canRead = false; + } + else + { + if (this.transferJob.CheckPoint.EntryTransferOffset < this.SharedTransferData.TotalLength) + { + this.transferJob.CheckPoint.TransferWindow.Add(this.transferJob.CheckPoint.EntryTransferOffset); + this.transferJob.CheckPoint.EntryTransferOffset = Math.Min(this.transferJob.CheckPoint.EntryTransferOffset + this.Scheduler.TransferOptions.BlockSize, this.SharedTransferData.TotalLength); + } + } + } + + if (!canRead) + { + this.Scheduler.MemoryManager.ReleaseBuffer(memoryBuffer); + this.SetRangeDownloadHasWork(); + return; + } + } + + if (rangeData.StartOffset == blockStartOffset) + { + this.currentDownloadBuffer = new TransferDownloadBuffer(blockStartOffset, (int)Math.Min(blockSize, this.SharedTransferData.TotalLength - blockStartOffset), memoryBuffer); + downloadStream = new TransferDownloadStream(this.Scheduler.MemoryManager, this.currentDownloadBuffer, 0, (int)(rangeData.EndOffset + 1 - rangeData.StartOffset)); + } + else + { + Debug.Assert(null != this.currentDownloadBuffer, "Download buffer should have been allocated when range start offset is not block size aligned"); + + TransferDownloadBuffer nextBuffer = new TransferDownloadBuffer(nextBlockStartOffset, (int)Math.Min(blockSize, this.SharedTransferData.TotalLength - nextBlockStartOffset), memoryBuffer); + + downloadStream = new TransferDownloadStream( + this.Scheduler.MemoryManager, + this.currentDownloadBuffer, + (int)(rangeData.StartOffset - blockStartOffset), + (int)(nextBlockStartOffset - rangeData.StartOffset), + nextBuffer, + 0, + (int)(rangeData.EndOffset + 1 - nextBlockStartOffset)); + + this.currentDownloadBuffer = nextBuffer; + } + } + + using (downloadStream) + { + this.nextDownloadIndex++; + this.SetRangeDownloadHasWork(); + + RangeBasedDownloadState rangeBasedDownloadState = new RangeBasedDownloadState + { + Range = rangeData, + DownloadStream = downloadStream + }; + + await this.DownloadRangeAsync(rangeBasedDownloadState); + } + + this.SetChunkFinish(); + return; + } + + this.SetRangeDownloadHasWork(); + } + + private void SetRangeDownloadHasWork() + { + if (this.HasWork) + { + return; + } + + // Check if we have ranges available to download. + if (this.nextDownloadIndex < this.rangeList.Count) + { + this.hasWork = true; + return; + } + } + + private async Task DownloadRangeAsync(RangeBasedDownloadState asyncState) + { + Debug.Assert(null != asyncState, "asyncState object expected"); + Debug.Assert( + this.state == State.Download || this.state == State.Error, + "DownloadRangeAsync called, but state isn't Download or Error"); + + // If a parallel operation caused the controller to be placed in + // error state exit early to avoid unnecessary I/O. + if (this.state == State.Error) + { + return; + } + + if (asyncState.Range.HasData) + { + await this.DoDownloadRangeToStreamAsync(asyncState); + } + else + { + // Zero memory buffer. + asyncState.DownloadStream.SetAllZero(); + } + + asyncState.DownloadStream.FinishWrite(); + asyncState.DownloadStream.ReserveBuffer = true; + + foreach (var buffer in asyncState.DownloadStream.GetBuffers()) + { + // Two download streams can refer to the same download buffer instance. It may cause the download + // buffer be added into shared transfer data twice if only buffer.Finished is checked here: + // Thread A: FinishedWrite() + // Thread B: FinishedWrite(), buffer.Finished is true now + // Thread A: Check buffer.Finished + // Thread B: Check buffer.Finished + // Thread A: Add buffer into sharedTransferData + // Thread C: Writer remove buffer from sharedTransferData + // Thread B: Add buffer into sharedTransferData again + // So call MarkAsProcessed to make sure buffer is added exactly once. + if (buffer.Finished && buffer.MarkAsProcessed()) + { + TransferData transferData = new TransferData(this.Scheduler.MemoryManager) + { + StartOffset = buffer.StartOffset, + Length = buffer.Length, + MemoryBuffer = buffer.MemoryBuffer + }; + + this.SharedTransferData.AvailableData.TryAdd(buffer.StartOffset, transferData); + } + } + } + + /// + /// It might fail to get large ranges list from storage. This method is to split the whole file to spans of 148MB to get ranges. + /// In restartable, we only need to get ranges for chunks in TransferWindow and after TransferEntryOffset in check point. + /// In TransferWindow, there might be some chunks adjacent to TransferEntryOffset, so this method will first merge these chunks into TransferEntryOffset; + /// Then in remained chunks in the TransferWindow, it's very possible that ranges of several chunks can be got in one 148MB span. + /// To avoid sending too many get ranges requests, this method will merge the chunks to 148MB spans. + /// + private void PrepareToGetRanges() + { + this.getRangesSpanIndex = -1; + this.rangesSpanList = new List(); + this.rangeList = new List(); + + this.nextDownloadIndex = 0; + + SingleObjectCheckpoint checkpoint = this.transferJob.CheckPoint; + int blockSize = this.Scheduler.TransferOptions.BlockSize; + + RangesSpan rangesSpan = null; + + if ((null != checkpoint.TransferWindow) + && (checkpoint.TransferWindow.Any())) + { + checkpoint.TransferWindow.Sort(); + + long lastOffset = 0; + if (checkpoint.EntryTransferOffset == this.SharedTransferData.TotalLength) + { + long lengthBeforeLastChunk = checkpoint.EntryTransferOffset % blockSize; + lastOffset = 0 == lengthBeforeLastChunk ? + checkpoint.EntryTransferOffset - blockSize : + checkpoint.EntryTransferOffset - lengthBeforeLastChunk; + } + else + { + lastOffset = checkpoint.EntryTransferOffset - blockSize; + } + + for (int i = checkpoint.TransferWindow.Count - 1; i >= 0; i--) + { + if (lastOffset == checkpoint.TransferWindow[i]) + { + checkpoint.TransferWindow.RemoveAt(i); + checkpoint.EntryTransferOffset = lastOffset; + } + else if (lastOffset < checkpoint.TransferWindow[i]) + { + throw new FormatException(Resources.RestartableInfoCorruptedException); + } + else + { + break; + } + + lastOffset = checkpoint.EntryTransferOffset - blockSize; + } + + if (this.transferJob.CheckPoint.TransferWindow.Any()) + { + rangesSpan = new RangesSpan(); + rangesSpan.StartOffset = checkpoint.TransferWindow[0]; + rangesSpan.EndOffset = Math.Min(rangesSpan.StartOffset + Constants.PageRangesSpanSize, this.SharedTransferData.TotalLength) - 1; + + for (int i = 1; i < checkpoint.TransferWindow.Count; ++i ) + { + if (checkpoint.TransferWindow[i] + blockSize > rangesSpan.EndOffset) + { + long lastEndOffset = rangesSpan.EndOffset; + this.rangesSpanList.Add(rangesSpan); + rangesSpan = new RangesSpan(); + rangesSpan.StartOffset = checkpoint.TransferWindow[i] > lastEndOffset ? checkpoint.TransferWindow[i] : lastEndOffset + 1; + rangesSpan.EndOffset = Math.Min(rangesSpan.StartOffset + Constants.PageRangesSpanSize, this.SharedTransferData.TotalLength) - 1; + } + } + + this.rangesSpanList.Add(rangesSpan); + } + } + + long offset = null != rangesSpan ? + rangesSpan.EndOffset > checkpoint.EntryTransferOffset ? + rangesSpan.EndOffset + 1 : + checkpoint.EntryTransferOffset : + checkpoint.EntryTransferOffset; + + while (offset < this.SharedTransferData.TotalLength) + { + rangesSpan = new RangesSpan() + { + StartOffset = offset, + EndOffset = Math.Min(offset + Constants.PageRangesSpanSize, this.SharedTransferData.TotalLength) - 1 + }; + + this.rangesSpanList.Add(rangesSpan); + offset = rangesSpan.EndOffset + 1; + } + + if (this.rangesSpanList.Any()) + { + this.getRangesCountDownEvent = new CountdownEvent(this.rangesSpanList.Count); + } + } + + private void ClearForGetRanges() + { + this.rangesSpanList = null; + + if (null != this.getRangesCountDownEvent) + { + this.getRangesCountDownEvent.Dispose(); + this.getRangesCountDownEvent = null; + } + } + + /// + /// Turn raw ranges get from Azure Storage in rangesSpanList + /// into list of Range. + /// + private void ArrangeRanges() + { + long currentEndOffset = -1; + + IEnumerator enumerator = this.rangesSpanList.GetEnumerator(); + bool hasValue = enumerator.MoveNext(); + bool reachLastTransferOffset = false; + int lastTransferWindowIndex = 0; + + RangesSpan current; + RangesSpan next; + + if (hasValue) + { + current = enumerator.Current; + + while (hasValue) + { + hasValue = enumerator.MoveNext(); + + if (!current.Ranges.Any()) + { + current = enumerator.Current; + continue; + } + + if (hasValue) + { + next = enumerator.Current; + + Debug.Assert( + current.EndOffset < this.transferJob.CheckPoint.EntryTransferOffset + || ((current.EndOffset + 1) == next.StartOffset), + "Something wrong with ranges list."); + + if (next.Ranges.Any()) + { + if ((current.Ranges.Last().EndOffset + 1) == next.Ranges.First().StartOffset) + { + Range mergedRange = new Range() + { + StartOffset = current.Ranges.Last().StartOffset, + EndOffset = next.Ranges.First().EndOffset, + HasData = true + }; + + current.Ranges.RemoveAt(current.Ranges.Count - 1); + next.Ranges.RemoveAt(0); + current.Ranges.Add(mergedRange); + current.EndOffset = mergedRange.EndOffset; + next.StartOffset = mergedRange.EndOffset + 1; + + if (next.EndOffset == mergedRange.EndOffset) + { + continue; + } + } + } + } + + foreach (Range range in current.Ranges) + { + // Check if we have a gap before the current range. + // If so we'll generate a range with HasData = false. + if (currentEndOffset != range.StartOffset - 1) + { + this.AddRangesByCheckPoint( + currentEndOffset + 1, + range.StartOffset - 1, + false, + ref reachLastTransferOffset, + ref lastTransferWindowIndex); + } + + this.AddRangesByCheckPoint( + range.StartOffset, + range.EndOffset, + true, + ref reachLastTransferOffset, + ref lastTransferWindowIndex); + + currentEndOffset = range.EndOffset; + } + + current = enumerator.Current; + } + } + + if (currentEndOffset < this.SharedTransferData.TotalLength - 1) + { + this.AddRangesByCheckPoint( + currentEndOffset + 1, + this.SharedTransferData.TotalLength - 1, + false, + ref reachLastTransferOffset, + ref lastTransferWindowIndex); + } + } + + private void AddRangesByCheckPoint(long startOffset, long endOffset, bool hasData, ref bool reachLastTransferOffset, ref int lastTransferWindowIndex) + { + SingleObjectCheckpoint checkpoint = this.transferJob.CheckPoint; + if (reachLastTransferOffset) + { + this.rangeList.AddRange( + new Range + { + StartOffset = startOffset, + EndOffset = endOffset, + HasData = hasData, + }.SplitRanges(this.Scheduler.TransferOptions.BlockSize)); + } + else + { + Range range = new Range() + { + StartOffset = -1, + HasData = hasData + }; + + while (lastTransferWindowIndex < checkpoint.TransferWindow.Count) + { + long lastTransferWindowStart = checkpoint.TransferWindow[lastTransferWindowIndex]; + long lastTransferWindowEnd = Math.Min(checkpoint.TransferWindow[lastTransferWindowIndex] + this.Scheduler.TransferOptions.BlockSize - 1, this.SharedTransferData.TotalLength); + + if (lastTransferWindowStart <= endOffset) + { + if (-1 == range.StartOffset) + { + // New range + range.StartOffset = Math.Max(lastTransferWindowStart, startOffset); + range.EndOffset = Math.Min(lastTransferWindowEnd, endOffset); + } + else + { + if (range.EndOffset != lastTransferWindowStart - 1) + { + // Store the previous range and create a new one + this.rangeList.AddRange(range.SplitRanges(this.Scheduler.TransferOptions.BlockSize)); + range = new Range() + { + StartOffset = Math.Max(lastTransferWindowStart, startOffset), + HasData = hasData + }; + } + + range.EndOffset = Math.Min(lastTransferWindowEnd, endOffset); + } + + if (range.EndOffset == lastTransferWindowEnd) + { + // Reach the end of transfer window, move to next + ++lastTransferWindowIndex; + continue; + } + } + + break; + } + + if (-1 != range.StartOffset) + { + this.rangeList.AddRange(range.SplitRanges(this.Scheduler.TransferOptions.BlockSize)); + } + + if (checkpoint.EntryTransferOffset <= endOffset + 1) + { + reachLastTransferOffset = true; + + if (checkpoint.EntryTransferOffset <= endOffset) + { + this.rangeList.AddRange(new Range() + { + StartOffset = checkpoint.EntryTransferOffset, + EndOffset = endOffset, + HasData = hasData, + }.SplitRanges(this.Scheduler.TransferOptions.BlockSize)); + } + } + } + } + + /// + /// To initialize range based object download related information in the controller. + /// This method will call CallFinish. + /// + private void InitDownloadInfo() + { + this.ClearForGetRanges(); + + this.state = State.Download; + + if (this.rangeList.Count == this.nextDownloadIndex) + { + this.toDownloadItemsCountdownEvent = new CountdownEvent(1); + this.SetChunkFinish(); + } + else + { + this.toDownloadItemsCountdownEvent = new CountdownEvent(this.rangeList.Count); + this.hasWork = true; + } + } + + private void SetChunkFinish() + { + if (this.toDownloadItemsCountdownEvent.Signal()) + { + this.state = State.Finished; + this.hasWork = false; + } + } + + protected class RangesSpan + { + public long StartOffset + { + get; + set; + } + + public long EndOffset + { + get; + set; + } + + public List Ranges + { + get; + set; + } + } + + protected class Range + { + public long StartOffset + { + get; + set; + } + + public long EndOffset + { + get; + set; + } + + public bool HasData + { + get; + set; + } + + /// + /// Split a Range into multiple Range objects, each at most maxRangeSize long. + /// + /// Maximum length for each piece. + /// List of Range objects. + public IEnumerable SplitRanges(long maxRangeSize) + { + long startOffset = this.StartOffset; + long rangeSize = this.EndOffset - this.StartOffset + 1; + + do + { + long singleRangeSize = Math.Min(rangeSize, maxRangeSize); + Range subRange = new Range + { + StartOffset = startOffset, + EndOffset = startOffset + singleRangeSize - 1, + HasData = this.HasData, + }; + + startOffset += singleRangeSize; + rangeSize -= singleRangeSize; + + yield return subRange; + } + while (rangeSize > 0); + } + } + + protected class RangeBasedDownloadState + { + private Range range; + + public Range Range + { + get + { + return this.range; + } + + set + { + this.range = value; + + this.StartOffset = value.StartOffset; + this.Length = (int)(value.EndOffset - value.StartOffset + 1); + } + } + + /// + /// Gets or sets a handle to the memory buffer to ensure the + /// memory buffer remains in memory during the entire operation. + /// + public TransferDownloadStream DownloadStream + { + get; + set; + } + + /// + /// Gets or sets the starting offset of this part of data. + /// + public long StartOffset + { + get; + set; + } + + /// + /// Gets or sets the length of this part of data. + /// + public int Length + { + get; + set; + } + } + + protected abstract Task DoFetchAttributesAsync(); + + protected abstract Task DoDownloadRangeToStreamAsync(RangeBasedDownloadState asyncState); + + protected abstract Task> DoGetRangesAsync(RangesSpan rangesSpan); + } +} diff --git a/lib/TransferControllers/TransferReaders/StreamedReader.cs b/lib/TransferControllers/TransferReaders/StreamedReader.cs new file mode 100644 index 00000000..f84f5704 --- /dev/null +++ b/lib/TransferControllers/TransferReaders/StreamedReader.cs @@ -0,0 +1,426 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.Linq; + using System.Security; + using System.Threading; + using System.Threading.Tasks; + + internal sealed class StreamedReader : TransferReaderWriterBase + { + /// + /// Source stream to be read from. + /// It's a user input stream or a FileStream with the user input FilePath in source location. + /// + private Stream inputStream; + + /// + /// Value to indicate whether the input stream is a file stream owned by this reader or input by user. + /// If it's a file stream owned by this reader, we should close it when reading is finished. + /// + private bool ownsStream; + + /// + /// Transfer job instance. + /// + private TransferJob transferJob; + + /// + /// Countdown event to track the download status. + /// Its count should be the same with count of chunks to be read. + /// + private CountdownEvent countdownEvent; + + /// + /// Transfer window in check point. + /// + private Queue lastTransferWindow; + + private volatile State state; + + private volatile bool hasWork; + + /// + /// Stream to read from source and calculate md5 hash of source. + /// + private MD5HashStream md5HashStream; + + public StreamedReader( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.transferJob = this.SharedTransferData.TransferJob; + this.hasWork = true; + } + + private enum State + { + OpenInputStream, + ReadStream, + Error, + Finished + } + + public override bool IsFinished + { + get + { + return this.state == State.Error || this.state == State.Finished; + } + } + + public override bool HasWork + { + get + { + return this.hasWork; + } + } + + public override async Task DoWorkInternalAsync() + { + switch (this.state) + { + case State.OpenInputStream: + await this.OpenInputStreamAsync(); + break; + case State.ReadStream: + await this.ReadStreamAsync(); + break; + case State.Error: + case State.Finished: + break; + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + this.CloseOwnStream(); + + if (null != this.md5HashStream) + { + this.md5HashStream.Dispose(); + this.md5HashStream = null; + } + + if (null != this.countdownEvent) + { + this.countdownEvent.Dispose(); + } + } + } + + private async Task OpenInputStreamAsync() + { + Debug.Assert( + State.OpenInputStream == this.state, + "OpenInputStreamAsync called, but state is not OpenInputStream."); + + this.hasWork = false; + + await Task.Run(() => + { + this.NotifyStarting(); + this.Controller.CheckCancellation(); + + if (this.transferJob.Source.Stream != null) + { + this.inputStream = this.transferJob.Source.Stream; + this.ownsStream = false; + + if (!this.inputStream.CanRead) + { + throw new NotSupportedException(string.Format( + CultureInfo.CurrentCulture, + Resources.StreamMustSupportReadException, + "inputStream")); + } + + if (!this.inputStream.CanSeek) + { + throw new NotSupportedException(string.Format( + CultureInfo.CurrentCulture, + Resources.StreamMustSupportSeekException, + "inputStream")); + } + } + else + { + Debug.Assert( + !string.IsNullOrEmpty(this.transferJob.Source.FilePath), + "Initializing StreamedReader instance, but source is neither a stream nor a file"); + this.SharedTransferData.SourceLocation = this.transferJob.Source.FilePath; + + try + { + // Attempt to open the file first so that we throw an exception before getting into the async work + this.inputStream = new FileStream( + this.transferJob.Source.FilePath, + FileMode.Open, + FileAccess.Read, + FileShare.Read); + + this.ownsStream = true; + } + catch (Exception ex) + { + if ((ex is NotSupportedException) || + (ex is IOException) || + (ex is UnauthorizedAccessException) || + (ex is SecurityException) || + (ex is ArgumentException && !(ex is ArgumentNullException))) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.FailedToOpenFileException, + this.transferJob.Source.FilePath, + ex.Message); + + throw new TransferException( + TransferErrorCode.OpenFileFailed, + exceptionMessage, + ex); + } + else + { + throw; + } + } + } + }); + + this.SharedTransferData.TotalLength = this.inputStream.Length; + + int count = (int)Math.Ceiling((double)(this.SharedTransferData.TotalLength - this.transferJob.CheckPoint.EntryTransferOffset) / this.Scheduler.TransferOptions.BlockSize); + + if (null != this.transferJob.CheckPoint.TransferWindow) + { + count += this.transferJob.CheckPoint.TransferWindow.Count; + } + + this.lastTransferWindow = new Queue(this.transferJob.CheckPoint.TransferWindow); + + this.md5HashStream = new MD5HashStream( + this.inputStream, + this.transferJob.CheckPoint.EntryTransferOffset, + true); + + this.PreProcessed = true; + + if (!this.md5HashStream.FinishedSeparateMd5Calculator) + { + await Task.Run(() => + { + this.md5HashStream.CalculateMd5(this.Scheduler.MemoryManager, this.Controller.CheckCancellation); + }); + } + + if (0 == count) + { + this.countdownEvent = new CountdownEvent(1); + this.SetChunkFinish(); + } + else + { + this.countdownEvent = new CountdownEvent(count); + + this.state = State.ReadStream; + this.hasWork = true; + } + } + + private async Task ReadStreamAsync() + { + Debug.Assert( + this.state == State.ReadStream || this.state == State.Error, + "ReadChunks called, but state isn't ReadStream or Error"); + + this.hasWork = false; + + byte[] memoryBuffer = this.Scheduler.MemoryManager.RequireBuffer(); + + if (null != memoryBuffer) + { + long startOffset = 0; + + if (0 != this.lastTransferWindow.Count) + { + startOffset = this.lastTransferWindow.Dequeue(); + } + else + { + bool canRead = false; + + lock (this.transferJob.CheckPoint.TransferWindowLock) + { + if (this.transferJob.CheckPoint.TransferWindow.Count < Constants.MaxCountInTransferWindow) + { + startOffset = this.transferJob.CheckPoint.EntryTransferOffset; + + if (this.transferJob.CheckPoint.EntryTransferOffset < this.SharedTransferData.TotalLength) + { + this.transferJob.CheckPoint.TransferWindow.Add(startOffset); + this.transferJob.CheckPoint.EntryTransferOffset = Math.Min( + this.transferJob.CheckPoint.EntryTransferOffset + this.Scheduler.TransferOptions.BlockSize, + this.SharedTransferData.TotalLength); + + canRead = true; + } + } + } + + if (!canRead) + { + this.Scheduler.MemoryManager.ReleaseBuffer(memoryBuffer); + this.hasWork = true; + return; + } + } + + if ((startOffset > this.SharedTransferData.TotalLength) + || (startOffset < 0)) + { + this.Scheduler.MemoryManager.ReleaseBuffer(memoryBuffer); + throw new InvalidOperationException(Resources.RestartableInfoCorruptedException); + } + + ReadDataState asyncState = new ReadDataState + { + MemoryBuffer = memoryBuffer, + BytesRead = 0, + StartOffset = startOffset, + Length = (int)Math.Min(this.Scheduler.TransferOptions.BlockSize, this.SharedTransferData.TotalLength - startOffset), + MemoryManager = this.Scheduler.MemoryManager, + }; + + using (asyncState) + { + await this.ReadChunkAsync(asyncState); + } + } + + this.SetHasWork(); + } + + private async Task ReadChunkAsync(ReadDataState asyncState) + { + Debug.Assert(null != asyncState, "asyncState object expected"); + Debug.Assert( + this.state == State.ReadStream || this.state == State.Error, + "ReadChunkAsync called, but state isn't Upload or Error"); + + int readBytes = await this.md5HashStream.ReadAsync( + asyncState.StartOffset + asyncState.BytesRead, + asyncState.MemoryBuffer, + asyncState.BytesRead, + asyncState.Length - asyncState.BytesRead, + this.CancellationToken); + + // If a parallel operation caused the controller to be placed in + // error state exit early to avoid unnecessary I/O. + // Note that this check needs to be after the EndRead operation + // above to avoid leaking resources. + if (this.state == State.Error) + { + return; + } + + asyncState.BytesRead += readBytes; + + if (asyncState.BytesRead < asyncState.Length) + { + await this.ReadChunkAsync(asyncState); + } + else + { + this.Controller.CheckCancellation(); + + if (!this.md5HashStream.MD5HashTransformBlock(asyncState.StartOffset, asyncState.MemoryBuffer, 0, asyncState.Length, null, 0)) + { + // Error info has been set in Calculate MD5 action, just return + return; + } + + TransferData transferData = new TransferData(this.Scheduler.MemoryManager) + { + StartOffset = asyncState.StartOffset, + Length = asyncState.Length, + MemoryBuffer = asyncState.MemoryBuffer + }; + + asyncState.MemoryBuffer = null; + + this.SharedTransferData.AvailableData.TryAdd(transferData.StartOffset, transferData); + + this.SetChunkFinish(); + } + } + + private void SetHasWork() + { + if (this.HasWork) + { + return; + } + + // Check if we have blocks available to download. + if ((null != this.lastTransferWindow && this.lastTransferWindow.Any()) + || this.transferJob.CheckPoint.EntryTransferOffset < this.SharedTransferData.TotalLength) + { + this.hasWork = true; + return; + } + } + + private void SetChunkFinish() + { + if (this.countdownEvent.Signal()) + { + this.state = State.Finished; + this.CloseOwnStream(); + + if (!this.md5HashStream.SucceededSeparateMd5Calculator) + { + return; + } + + this.md5HashStream.MD5HashTransformFinalBlock(new byte[0], 0, 0); + this.SharedTransferData.Attributes = new Attributes() + { + ContentMD5 = Convert.ToBase64String(this.md5HashStream.Hash), + OverWriteAll = false + }; + + this.SharedTransferData.Attributes.ContentType = this.transferJob.ContentType; + } + } + + private void CloseOwnStream() + { + if (this.ownsStream) + { + if (null != this.inputStream) + { + this.inputStream.Close(); + this.inputStream = null; + } + } + } + } +} diff --git a/lib/TransferControllers/TransferWriters/AppendBlobWriter.cs b/lib/TransferControllers/TransferWriters/AppendBlobWriter.cs new file mode 100644 index 00000000..c56a6d25 --- /dev/null +++ b/lib/TransferControllers/TransferWriters/AppendBlobWriter.cs @@ -0,0 +1,424 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.Linq; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.Blob.Protocol; + + internal sealed class AppendBlobWriter : TransferReaderWriterBase + { + private volatile State state; + private volatile bool hasWork; + private TransferLocation location; + private CloudAppendBlob appendBlob; + private long expectedOffset = 0; + + /// + /// To indicate whether the destination already exist before this writing. + /// If no, when try to set destination's attribute, should get its attributes first. + /// + private bool destExist = false; + + public AppendBlobWriter( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.location = this.SharedTransferData.TransferJob.Destination; + this.appendBlob = this.location.Blob as CloudAppendBlob; + + Debug.Assert(null != this.appendBlob, "The destination is not an append blob while initializing a AppendBlobWriter instance."); + + this.state = State.FetchAttributes; + this.hasWork = true; + } + + public override bool HasWork + { + get + { + return this.hasWork && + ((State.FetchAttributes == this.state) || + (State.Create == this.state) || + (State.UploadBlob == this.state && this.SharedTransferData.AvailableData.ContainsKey(this.expectedOffset)) || + (State.Commit == this.state && null != this.SharedTransferData.Attributes)); + } + } + + public override bool IsFinished + { + get + { + return State.Error == this.state || State.Finished == this.state; + } + } + + private enum State + { + FetchAttributes, + Create, + UploadBlob, + Commit, + Error, + Finished + }; + + public override async Task DoWorkInternalAsync() + { + switch (this.state) + { + case State.FetchAttributes: + await this.FetchAttributesAsync(); + break; + case State.Create: + await this.CreateAsync(); + break; + case State.UploadBlob: + await this.UploadBlobAsync(); + break; + case State.Commit: + await this.CommitAsync(); + break; + case State.Error: + default: + break; + } + } + + private async Task FetchAttributesAsync() + { + Debug.Assert( + this.state == State.FetchAttributes, + "FetchAttributesAsync called, but state isn't FetchAttributes", + "Current state is {0}", + this.state); + + this.hasWork = false; + + if (this.SharedTransferData.TotalLength > Constants.MaxBlockBlobFileSize) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.BlobFileSizeTooLargeException, + Utils.BytesToHumanReadableSize(this.SharedTransferData.TotalLength), + Resources.AppendBlob, + Utils.BytesToHumanReadableSize(Constants.MaxBlockBlobFileSize)); + + throw new TransferException( + TransferErrorCode.UploadSourceFileSizeTooLarge, + exceptionMessage); + } + + bool existingBlob = true; + + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition( + this.location.AccessCondition, + this.location.CheckedAccessCondition); + + try + { + await this.appendBlob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + + this.destExist = true; + } + catch (StorageException se) + { + // Getting a storage exception is expected if the blob doesn't + // exist. In this case we won't error out, but set the + // existingBlob flag to false to indicate we're uploading + // a new blob instead of overwriting an existing blob. + if (null != se.RequestInformation && + se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + existingBlob = false; + } + else if (null != se && + (0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase))) + { + throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); + } + else + { + throw; + } + } + + this.HandleFetchAttributesResult(existingBlob); + } + + private void HandleFetchAttributesResult(bool existingBlob) + { + this.location.CheckedAccessCondition = true; + + // If destination file exists, query user whether to overwrite it. + this.Controller.CheckOverwrite( + existingBlob, + this.SharedTransferData.SourceLocation, + this.appendBlob.Uri.ToString()); + + this.Controller.UpdateProgressAddBytesTransferred(0); + + if (existingBlob) + { + if (this.appendBlob.Properties.BlobType == BlobType.Unspecified) + { + throw new InvalidOperationException(Resources.FailedToGetBlobTypeException); + } + + if (this.appendBlob.Properties.BlobType != BlobType.AppendBlob) + { + throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); + } + } + + // We do check point consistency validation in reader, so directly use it here. + SingleObjectCheckpoint checkpoint = this.SharedTransferData.TransferJob.CheckPoint; + + if ((null != checkpoint.TransferWindow) + && (checkpoint.TransferWindow.Any())) + { + checkpoint.TransferWindow.Sort(); + this.expectedOffset = checkpoint.TransferWindow[0]; + } + else + { + this.expectedOffset = checkpoint.EntryTransferOffset; + } + + if (0 == this.expectedOffset) + { + this.state = State.Create; + } + else + { + if (!existingBlob) + { + throw new TransferException(Resources.DestinationChangedException); + } + + this.PreProcessed = true; + + if (this.expectedOffset == this.SharedTransferData.TotalLength) + { + this.state = State.Commit; + } + else + { + this.state = State.UploadBlob; + } + } + + this.hasWork = true; + } + + private async Task CreateAsync() + { + Debug.Assert(State.Create == this.state, "Calling CreateAsync, state should be Create"); + + this.hasWork = false; + + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition( + this.location.AccessCondition, + true); + + await this.appendBlob.CreateOrReplaceAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + + this.PreProcessed = true; + + if (this.expectedOffset == this.SharedTransferData.TotalLength) + { + this.state = State.Commit; + } + else + { + this.state = State.UploadBlob; + } + + this.hasWork = true; + } + + private async Task UploadBlobAsync() + { + Debug.Assert(State.UploadBlob == this.state, "Calling UploadBlobAsync, state should be UploadBlob"); + + this.hasWork = false; + + TransferData transferData = null; + if (!this.SharedTransferData.AvailableData.TryRemove(this.expectedOffset, out transferData)) + { + this.hasWork = true; + return; + } + + if (null != transferData) + { + using (transferData) + { + long currentOffset = this.expectedOffset; + this.expectedOffset += transferData.Length; + + transferData.Stream = new MemoryStream(transferData.MemoryBuffer, 0, transferData.Length); + + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition, true) ?? new AccessCondition(); + accessCondition.IfAppendPositionEqual = currentOffset; + + bool needToCheckContent = false; + + try + { + await this.appendBlob.AppendBlockAsync(transferData.Stream, + null, + accessCondition, + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + catch (StorageException se) + { + if ((null != se.RequestInformation) && + ((int)HttpStatusCode.PreconditionFailed == se.RequestInformation.HttpStatusCode) && + (null != se.RequestInformation.ExtendedErrorInformation) && + (se.RequestInformation.ExtendedErrorInformation.ErrorCode == BlobErrorCodeStrings.InvalidAppendCondition)) + { + needToCheckContent = true; + } + else + { + throw; + } + } + + if (needToCheckContent && + (!await this.ValidateUploadedChunkAsync(transferData.MemoryBuffer, currentOffset, (long)transferData.Length))) + { + throw new InvalidOperationException(Resources.DestinationChangedException); + } + + lock(this.SharedTransferData.TransferJob.CheckPoint.TransferWindowLock) + { + this.SharedTransferData.TransferJob.CheckPoint.TransferWindow.Remove(currentOffset); + } + + // update progress + this.Controller.UpdateProgressAddBytesTransferred(transferData.Length); + + if (this.expectedOffset == this.SharedTransferData.TotalLength) + { + this.state = State.Commit; + } + + this.hasWork = true; + } + } + } + + private async Task CommitAsync() + { + Debug.Assert(State.Commit == this.state, "Calling CommitAsync, state should be Commit"); + + this.hasWork = false; + + BlobRequestOptions blobRequestOptions = Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions); + OperationContext operationContext = Utils.GenerateOperationContext(this.Controller.TransferContext); + + if (!this.destExist) + { + await this.appendBlob.FetchAttributesAsync( + Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + } + + var originalMetadata = new Dictionary(this.appendBlob.Metadata); + Utils.SetAttributes(this.appendBlob, this.SharedTransferData.Attributes); + + await this.appendBlob.SetPropertiesAsync( + Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + + if (!originalMetadata.DictionaryEquals(this.appendBlob.Metadata)) + { + await this.appendBlob.SetMetadataAsync( + Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + } + + this.SetFinish(); + } + + private async Task ValidateUploadedChunkAsync(byte[] currentData, long startOffset, long length) + { + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition, true); + OperationContext operationContext = Utils.GenerateOperationContext(this.Controller.TransferContext); + await this.appendBlob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + operationContext, + this.CancellationToken); + + this.destExist = true; + + if (this.appendBlob.Properties.Length != (startOffset + length)) + { + return false; + } + + byte[] buffer = new byte[length]; + + // Do not expect any exception here. + await this.appendBlob.DownloadRangeToByteArrayAsync( + buffer, + 0, + startOffset, + length, + accessCondition, + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + operationContext, + this.CancellationToken); + + for (int i = 0; i < length; ++i) + { + if (currentData[i] != buffer[i]) + { + return false; + } + } + + return true; + } + + private void SetFinish() + { + this.state = State.Finished; + this.NotifyFinished(null); + this.hasWork = false; + } + } +} diff --git a/lib/TransferControllers/TransferWriters/BlockBlobWriter.cs b/lib/TransferControllers/TransferWriters/BlockBlobWriter.cs new file mode 100644 index 00000000..f59d8579 --- /dev/null +++ b/lib/TransferControllers/TransferWriters/BlockBlobWriter.cs @@ -0,0 +1,377 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Concurrent; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.Linq; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + + internal sealed class BlockBlobWriter : TransferReaderWriterBase + { + private volatile bool hasWork; + private volatile State state; + private CountdownEvent countdownEvent; + private TransferLocation location; + private string[] blockIdSequence; + private CloudBlockBlob blockBlob; + + public BlockBlobWriter( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.location = this.SharedTransferData.TransferJob.Destination; + this.blockBlob = this.location.Blob as CloudBlockBlob; + + Debug.Assert(null != this.blockBlob, "The destination is not a block blob while initializing a BlockBlobWriter instance."); + + this.state = State.FetchAttributes; + this.hasWork = true; + } + + private enum State + { + FetchAttributes, + UploadBlob, + Commit, + Error, + Finished + }; + + public override bool PreProcessed + { + get; + protected set; + } + + public override bool HasWork + { + get + { + return this.hasWork && + (!this.PreProcessed + || ((this.state == State.UploadBlob) && this.SharedTransferData.AvailableData.Any()) + || ((this.state == State.Commit) && (null != this.SharedTransferData.Attributes))); + } + } + + public override bool IsFinished + { + get + { + return State.Error == this.state || State.Finished == this.state; + } + } + + public override async Task DoWorkInternalAsync() + { + switch (this.state) + { + case State.FetchAttributes: + await this.FetchAttributesAsync(); + break; + case State.UploadBlob: + await this.UploadBlobAsync(); + break; + case State.Commit: + await this.CommitAsync(); + break; + case State.Error: + default: + break; + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + if (null != this.countdownEvent) + { + this.countdownEvent.Dispose(); + this.countdownEvent = null; + } + } + } + + private async Task FetchAttributesAsync() + { + Debug.Assert( + this.state == State.FetchAttributes, + "FetchAttributesAsync called, but state isn't FetchAttributes", + "Current state is {0}", + this.state); + + this.hasWork = false; + + if (this.SharedTransferData.TotalLength > Constants.MaxBlockBlobFileSize) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.BlobFileSizeTooLargeException, + Utils.BytesToHumanReadableSize(this.SharedTransferData.TotalLength), + Resources.BlockBlob, + Utils.BytesToHumanReadableSize(Constants.MaxBlockBlobFileSize)); + + throw new TransferException( + TransferErrorCode.UploadSourceFileSizeTooLarge, + exceptionMessage); + } + + AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition( + this.location.AccessCondition, + this.location.CheckedAccessCondition); + + try + { + await this.location.Blob.FetchAttributesAsync( + accessCondition, + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + catch (Exception e) + { + this.HandleFetchAttributesResult(e); + return; + } + + this.HandleFetchAttributesResult(null); + } + + private void HandleFetchAttributesResult(Exception e) + { + bool existingBlob = true; + + if (null != e) + { + StorageException se = e as StorageException; + + if (null != se) + { + // Getting a storage exception is expected if the blob doesn't + // exist. In this case we won't error out, but set the + // existingBlob flag to false to indicate we're uploading + // a new blob instead of overwriting an existing blob. + if (null != se.RequestInformation && + se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + existingBlob = false; + } + else if (null != se && + (0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase))) + { + throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); + } + else + { + throw se; + } + } + } + + this.location.CheckedAccessCondition = true; + + if (string.IsNullOrEmpty(this.location.BlockIdPrefix)) + { + // BlockIdPrefix is never set before that this is the first time to transfer this file. + // In block blob upload, it stores uploaded but not committed blocks on Azure Storage. + // In DM, we use block id to identify the blocks uploaded so we only need to upload it once. + // Keep BlockIdPrefix in upload job object for restarting the transfer if anything happens. + this.location.BlockIdPrefix = Guid.NewGuid().ToString("N") + "-"; + } + + // If destination file exists, query user whether to overwrite it. + this.Controller.CheckOverwrite( + existingBlob, + this.SharedTransferData.SourceLocation, + this.location.Blob.Uri.ToString()); + + this.Controller.UpdateProgressAddBytesTransferred(0); + + if (existingBlob) + { + if (this.location.Blob.Properties.BlobType == BlobType.Unspecified) + { + throw new InvalidOperationException(Resources.FailedToGetBlobTypeException); + } + if (this.location.Blob.Properties.BlobType != BlobType.BlockBlob) + { + throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); + } + + Debug.Assert( + this.location.Blob.Properties.BlobType == BlobType.BlockBlob, + "BlobType should be BlockBlob if we reach here."); + } + + // Calculate number of blocks. + int numBlocks = (int)Math.Ceiling( + this.SharedTransferData.TotalLength / (double)this.Scheduler.TransferOptions.BlockSize); + + // Create sequence array. + this.blockIdSequence = new string[numBlocks]; + + for (int i = 0; i < numBlocks; ++i) + { + string blockIdSuffix = i.ToString("D6", CultureInfo.InvariantCulture); + byte[] blockIdInBytes = System.Text.Encoding.UTF8.GetBytes(this.location.BlockIdPrefix + blockIdSuffix); + string blockId = Convert.ToBase64String(blockIdInBytes); + this.blockIdSequence[i] = blockId; + } + + SingleObjectCheckpoint checkpoint = this.SharedTransferData.TransferJob.CheckPoint; + + int leftBlockCount = (int)Math.Ceiling( + (this.SharedTransferData.TotalLength - checkpoint.EntryTransferOffset) / (double)this.Scheduler.TransferOptions.BlockSize) + checkpoint.TransferWindow.Count; + + if (0 == leftBlockCount) + { + this.state = State.Commit; + } + else + { + this.countdownEvent = new CountdownEvent(leftBlockCount); + + this.state = State.UploadBlob; + } + + this.PreProcessed = true; + this.hasWork = true; + } + + private async Task UploadBlobAsync() + { + Debug.Assert( + State.UploadBlob == this.state || State.Error == this.state, + "UploadBlobAsync called but state is not UploadBlob nor Error.", + "Current state is {0}", + this.state); + + TransferData transferData = this.GetFirstAvailable(); + + if (null != transferData) + { + using (transferData) + { + transferData.Stream = new MemoryStream(transferData.MemoryBuffer, 0, transferData.Length); + + await this.blockBlob.PutBlockAsync( + this.GetBlockId(transferData.StartOffset), + transferData.Stream, + null, + Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition, true), + Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + lock (this.SharedTransferData.TransferJob.CheckPoint.TransferWindowLock) + { + this.SharedTransferData.TransferJob.CheckPoint.TransferWindow.Remove(transferData.StartOffset); + } + + this.FinishBlock(transferData.Length); + } + + // Do not set hasWork to true because it's always true in State.UploadBlob + // Otherwise it may cause CommitAsync be called multiple times: + // 1. UploadBlobAsync downloads all content, but doesn't set hasWork to true yet + // 2. Call CommitAysnc, set hasWork to false + // 3. UploadBlobAsync set hasWork to true. + // 4. Call CommitAsync again since hasWork is true. + } + + private async Task CommitAsync() + { + Debug.Assert( + this.state == State.Commit, + "CommitAsync called, but state isn't Commit", + "Current state is {0}", + this.state); + + this.hasWork = false; + + Utils.SetAttributes(this.blockBlob, this.SharedTransferData.Attributes); + + BlobRequestOptions blobRequestOptions = Utils.GenerateBlobRequestOptions(this.location.BlobRequestOptions); + OperationContext operationContext = Utils.GenerateOperationContext(this.Controller.TransferContext); + + await this.blockBlob.PutBlockListAsync( + this.blockIdSequence, + Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + + // REST API PutBlockList cannot clear existing Content-Type of block blob, so if it's needed to clear existing + // Content-Type, REST API SetBlobProperties must be called explicitly: + // 1. The attributes are inherited from others and Content-Type is null or empty. + // 2. User specifies Content-Type to string.Empty while uploading. + if (this.SharedTransferData.Attributes.OverWriteAll && string.IsNullOrEmpty(this.SharedTransferData.Attributes.ContentType) + || (!this.SharedTransferData.Attributes.OverWriteAll && this.SharedTransferData.Attributes.ContentType == string.Empty)) + { + await this.blockBlob.SetPropertiesAsync( + Utils.GenerateConditionWithCustomerCondition(this.location.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + } + + this.SetFinish(); + } + + private void SetFinish() + { + this.state = State.Finished; + this.NotifyFinished(null); + this.hasWork = false; + } + + private void FinishBlock(long length) + { + Debug.Assert( + this.state == State.UploadBlob || this.state == State.Error, + "FinishBlock called, but state isn't Upload or Error", + "Current state is {0}", + this.state); + + // If a parallel operation caused the controller to be placed in + // error state exit, make sure not to accidentally change it to + // the Commit state. + if (this.state == State.Error) + { + return; + } + + this.Controller.UpdateProgressAddBytesTransferred(length); + + if (this.countdownEvent.Signal()) + { + this.state = State.Commit; + } + } + + private string GetBlockId(long startOffset) + { + Debug.Assert(startOffset % this.Scheduler.TransferOptions.BlockSize == 0, "Block startOffset should be multiples of block size."); + + int count = (int)(startOffset / this.Scheduler.TransferOptions.BlockSize); + return this.blockIdSequence[count]; + } + } +} diff --git a/lib/TransferControllers/TransferWriters/CloudFileWriter.cs b/lib/TransferControllers/TransferWriters/CloudFileWriter.cs new file mode 100644 index 00000000..c9fd93d4 --- /dev/null +++ b/lib/TransferControllers/TransferWriters/CloudFileWriter.cs @@ -0,0 +1,141 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Globalization; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.File; + + internal sealed class CloudFileWriter : RangeBasedWriter + { + private CloudFile cloudFile; + + /// + /// To indicate whether the destination already exist before this writing. + /// If no, when try to set destination's attribute, should get its attributes first. + /// + private bool destExist = false; + + internal CloudFileWriter( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.cloudFile = this.TransferJob.Destination.AzureFile; + } + + protected override Uri DestUri + { + get + { + return this.cloudFile.Uri; + } + } + + protected override void CheckInputStreamLength(long inputStreamLength) + { + if (inputStreamLength > Constants.MaxCloudFileSize) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.CloudFileSizeTooLargeException, + Utils.BytesToHumanReadableSize(inputStreamLength), + Utils.BytesToHumanReadableSize(Constants.MaxCloudFileSize)); + + throw new TransferException( + TransferErrorCode.UploadSourceFileSizeTooLarge, + exceptionMessage); + } + + return; + } + + protected override async Task DoFetchAttributesAsync() + { + await this.cloudFile.FetchAttributesAsync( + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + this.destExist = true; + } + + protected override void HandleFetchAttributesResult(Exception e) + { + // Do nothing here. + } + + protected override async Task DoCreateAsync(long size) + { + await this.cloudFile.CreateAsync( + size, + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + protected override async Task DoResizeAsync(long size) + { + await this.cloudFile.ResizeAsync( + size, + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + protected override async Task WriteRangeAsync(TransferData transferData) + { + await this.cloudFile.WriteRangeAsync( + transferData.Stream, + transferData.StartOffset, + null, + null, + Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + protected override async Task DoCommitAsync() + { + FileRequestOptions fileRequestOptions = Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions); + OperationContext operationContext = Utils.GenerateOperationContext(this.Controller.TransferContext); + + if (!this.destExist) + { + await this.cloudFile.FetchAttributesAsync( + null, + fileRequestOptions, + operationContext, + this.CancellationToken); + } + + var originalMetadata = new Dictionary(this.cloudFile.Metadata); + Utils.SetAttributes(this.cloudFile, this.SharedTransferData.Attributes); + + await this.cloudFile.SetPropertiesAsync( + null, + fileRequestOptions, + operationContext, + this.CancellationToken); + + if (!originalMetadata.DictionaryEquals(this.cloudFile.Metadata)) + { + await this.cloudFile.SetMetadataAsync( + null, + fileRequestOptions, + operationContext, + this.CancellationToken); + } + } + } +} diff --git a/lib/TransferControllers/TransferWriters/PageBlobWriter.cs b/lib/TransferControllers/TransferWriters/PageBlobWriter.cs new file mode 100644 index 00000000..4e668af9 --- /dev/null +++ b/lib/TransferControllers/TransferWriters/PageBlobWriter.cs @@ -0,0 +1,169 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Globalization; + using System.Linq; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + + internal sealed class PageBlobWriter : RangeBasedWriter + { + private CloudPageBlob pageBlob; + + /// + /// Size of all files transferred to page blob must be exactly + /// divided by this constant. + /// + private const long PageBlobPageSize = (long)512; + + /// + /// To indicate whether the destination already exist before this writing. + /// If no, when try to set destination's attribute, should get its attributes first. + /// + private bool destExist = false; + + internal PageBlobWriter( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.pageBlob = this.TransferJob.Destination.Blob as CloudPageBlob; + } + + protected override Uri DestUri + { + get + { + return this.pageBlob.Uri; + } + } + + protected override void CheckInputStreamLength(long inputStreamLength) + { + if (inputStreamLength > Constants.MaxPageBlobFileSize) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.BlobFileSizeTooLargeException, + Utils.BytesToHumanReadableSize(inputStreamLength), + Resources.PageBlob, + Utils.BytesToHumanReadableSize(Constants.MaxPageBlobFileSize)); + + throw new TransferException( + TransferErrorCode.UploadSourceFileSizeTooLarge, + exceptionMessage); + } + + if (0 != inputStreamLength % PageBlobPageSize) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.BlobFileSizeInvalidException, + Utils.BytesToHumanReadableSize(inputStreamLength), + Resources.PageBlob, + Utils.BytesToHumanReadableSize(PageBlobPageSize)); + + throw new TransferException( + TransferErrorCode.UploadBlobSourceFileSizeInvalid, + exceptionMessage); + } + + return; + } + + protected override async Task DoFetchAttributesAsync() + { + await this.pageBlob.FetchAttributesAsync( + this.TransferJob.Destination.AccessCondition, + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + this.destExist = true; + } + + protected override void HandleFetchAttributesResult(Exception e) + { + StorageException se = e as StorageException; + if (null != se && + (0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase))) + { + throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch); + } + } + + protected override async Task DoCreateAsync(long size) + { + await this.pageBlob.CreateAsync( + size, + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + protected override async Task DoResizeAsync(long size) + { + await this.pageBlob.ResizeAsync( + size, + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + protected override async Task WriteRangeAsync(TransferData transferData) + { + await this.pageBlob.WritePagesAsync( + transferData.Stream, + transferData.StartOffset, + null, + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions), + Utils.GenerateOperationContext(this.Controller.TransferContext), + this.CancellationToken); + } + + protected override async Task DoCommitAsync() + { + BlobRequestOptions blobRequestOptions = Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions); + OperationContext operationContext = Utils.GenerateOperationContext(this.Controller.TransferContext); + + if (!this.destExist) + { + await this.pageBlob.FetchAttributesAsync( + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + } + + var originalMetadata = new Dictionary(this.pageBlob.Metadata); + Utils.SetAttributes(this.pageBlob, this.SharedTransferData.Attributes); + + await this.pageBlob.SetPropertiesAsync( + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + blobRequestOptions, + null, + this.CancellationToken); + + if (!originalMetadata.DictionaryEquals(this.pageBlob.Metadata)) + { + await this.pageBlob.SetMetadataAsync( + Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition), + blobRequestOptions, + operationContext, + this.CancellationToken); + } + } + } +} diff --git a/lib/TransferControllers/TransferWriters/RangeBasedWriter.cs b/lib/TransferControllers/TransferWriters/RangeBasedWriter.cs new file mode 100644 index 00000000..9375f9df --- /dev/null +++ b/lib/TransferControllers/TransferWriters/RangeBasedWriter.cs @@ -0,0 +1,412 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.IO; + using System.Linq; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + + abstract class RangeBasedWriter : TransferReaderWriterBase + { + /// + /// Keeps track of the internal state-machine state. + /// + private volatile State state; + + /// + /// Countdown event to track number of chunks that still need to be + /// uploaded/are in progress of being uploaded. Used to detect when + /// all blocks have finished uploading and change state to Commit + /// state. + /// + private CountdownEvent toUploadChunksCountdownEvent; + + private volatile bool hasWork; + + protected RangeBasedWriter( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.hasWork = true; + } + + private enum State + { + FetchAttributes, + Create, + Resize, + Upload, + Commit, + Error, + Finished + }; + + public override bool IsFinished + { + get + { + return State.Error == this.state || State.Finished == this.state; + } + } + + public override bool HasWork + { + get + { + return this.hasWork && + (!this.PreProcessed + || ((State.Upload == this.state) && this.SharedTransferData.AvailableData.Any()) + || ((State.Commit == this.state) && (null != this.SharedTransferData.Attributes))); + } + } + + protected TransferJob TransferJob + { + get + { + return this.SharedTransferData.TransferJob; + } + } + + protected abstract Uri DestUri + { + get; + } + + public override async Task DoWorkInternalAsync() + { + switch (this.state) + { + case State.FetchAttributes: + await this.FetchAttributesAsync(); + break; + case State.Create: + await this.CreateAsync(); + break; + case State.Resize: + await this.ResizeAsync(); + break; + case State.Upload: + await this.UploadAsync(); + break; + case State.Commit: + await this.CommitAsync(); + break; + case State.Error: + case State.Finished: + break; + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + if (null != this.toUploadChunksCountdownEvent) + { + this.toUploadChunksCountdownEvent.Dispose(); + this.toUploadChunksCountdownEvent = null; + } + } + } + + private async Task FetchAttributesAsync() + { + Debug.Assert( + this.state == State.FetchAttributes, + "FetchAttributesAsync called, but state isn't FetchAttributes", + "Current state is {0}", + this.state); + + this.hasWork = false; + + this.CheckInputStreamLength(this.SharedTransferData.TotalLength); + + bool exist = true; + + try + { + await this.DoFetchAttributesAsync(); + } + catch (StorageException se) + { + // Getting a storage exception is expected if the file doesn't + // exist. In this case we won't error out, but set the + // exist flag to false to indicate we're uploading + // a new file instead of overwriting an existing one. + if (null != se.RequestInformation && + se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound) + { + exist = false; + } + else + { + this.HandleFetchAttributesResult(se); + throw; + } + } + catch (Exception e) + { + this.HandleFetchAttributesResult(e); + throw; + } + + this.TransferJob.Destination.CheckedAccessCondition = true; + + this.Controller.CheckOverwrite( + exist, + this.SharedTransferData.SourceLocation, + this.DestUri.ToString()); + + this.Controller.UpdateProgressAddBytesTransferred(0); + + if (exist) + { + // If the destination has already existed, + // and if we haven't uploaded anything to it, try to resize it to the expected length. + // Or if we have uploaded something, the destination should be created by the last transferring, + // don't do resize again. + SingleObjectCheckpoint checkpoint = this.TransferJob.CheckPoint; + bool shouldResize = (checkpoint.EntryTransferOffset == 0) && (!checkpoint.TransferWindow.Any()); + + if (shouldResize) + { + this.state = State.Resize; + } + else + { + this.InitUpload(); + } + } + else + { + this.state = State.Create; + } + + this.hasWork = true; + } + + private async Task CreateAsync() + { + Debug.Assert( + this.state == State.Create, + "CreateAsync called, but state isn't Create", + "Current state is {0}", + this.state); + + this.hasWork = false; + + await this.DoCreateAsync(this.SharedTransferData.TotalLength); + + this.InitUpload(); + } + + private async Task ResizeAsync() + { + Debug.Assert( + this.state == State.Resize, + "ResizeAsync called, but state isn't Resize", + "Current state is {0}", + this.state); + + this.hasWork = false; + + // Resize destination to 0 to clear all exist page ranges, + // then in uploading, we don't need to clear them if source data is all zero.. + await this.DoResizeAsync(0); + + await this.DoResizeAsync(this.SharedTransferData.TotalLength); + + this.InitUpload(); + } + + private void InitUpload() + { + Debug.Assert( + null == this.toUploadChunksCountdownEvent, + "toUploadChunksCountdownEvent expected to be null"); + + if ((this.TransferJob.CheckPoint.EntryTransferOffset != this.SharedTransferData.TotalLength) + && (0 != this.TransferJob.CheckPoint.EntryTransferOffset % this.Scheduler.TransferOptions.BlockSize)) + { + throw new FormatException(Resources.RestartableInfoCorruptedException); + } + + // Calculate number of chunks. + int numChunks = (int)Math.Ceiling( + (this.SharedTransferData.TotalLength - this.TransferJob.CheckPoint.EntryTransferOffset) / (double)this.Scheduler.TransferOptions.BlockSize) + + this.TransferJob.CheckPoint.TransferWindow.Count; + + if (0 == numChunks) + { + this.PreProcessed = true; + this.SetCommit(); + } + else + { + this.toUploadChunksCountdownEvent = new CountdownEvent(numChunks); + + this.state = State.Upload; + this.PreProcessed = true; + this.hasWork = true; + } + } + + private async Task UploadAsync() + { + Debug.Assert( + State.Upload == this.state || State.Error == this.state, + "UploadAsync called, but state isn't Upload", + "Current state is {0}", + this.state); + + this.hasWork = false; + + Debug.Assert( + null != this.toUploadChunksCountdownEvent, + "toUploadChunksCountdownEvent not expected to be null"); + + if (State.Error == this.state) + { + // Some thread has set the error message, just return here. + return; + } + + TransferData transferData = this.GetFirstAvailable(); + + this.hasWork = true; + + if (null != transferData) + { + using (transferData) + { + await this.UploadChunkAsync(transferData); + } + } + } + + private async Task UploadChunkAsync(TransferData transferData) + { + Debug.Assert(null != transferData, "transferData object expected"); + Debug.Assert( + this.state == State.Upload || this.state == State.Error, + "UploadChunkAsync called, but state isn't Upload or Error", + "Current state is {0}", + this.state); + + // If a parallel operation caused the controller to be placed in + // error state exit early to avoid unnecessary I/O. + if (this.state == State.Error) + { + return; + } + + bool allZero = true; + + for (int i = 0; i < transferData.MemoryBuffer.Length; ++i) + { + if (0 != transferData.MemoryBuffer[i]) + { + allZero = false; + break; + } + } + + this.Controller.CheckCancellation(); + + if (!allZero) + { + transferData.Stream = new MemoryStream(transferData.MemoryBuffer, 0, transferData.Length); + await this.WriteRangeAsync(transferData); + } + + this.FinishChunk(transferData); + } + + private void FinishChunk(TransferData transferData) + { + Debug.Assert(null != transferData, "transferData object expected"); + Debug.Assert( + this.state == State.Upload || this.state == State.Error, + "FinishChunk called, but state isn't Upload or Error", + "Current state is {0}", + this.state); + + // If a parallel operation caused the controller to be placed in + // error state exit, make sure not to accidentally change it to + // the Commit state. + if (this.state == State.Error) + { + return; + } + + lock (this.TransferJob.CheckPoint.TransferWindowLock) + { + this.TransferJob.CheckPoint.TransferWindow.Remove(transferData.StartOffset); + } + + this.Controller.UpdateProgressAddBytesTransferred(transferData.Length); + + if (this.toUploadChunksCountdownEvent.Signal()) + { + this.SetCommit(); + } + } + + private void SetCommit() + { + this.state = State.Commit; + this.hasWork = true; + } + + private async Task CommitAsync() + { + Debug.Assert( + this.state == State.Commit, + "CommitAsync called, but state isn't Commit", + "Current state is {0}", + this.state); + + this.hasWork = false; + + await this.DoCommitAsync(); + + this.SetFinished(); + } + + private void SetFinished() + { + this.state = State.Finished; + this.hasWork = false; + + this.NotifyFinished(null); + } + + protected abstract void CheckInputStreamLength(long streamLength); + + protected abstract Task DoFetchAttributesAsync(); + + protected abstract void HandleFetchAttributesResult(Exception e); + + protected abstract Task DoCreateAsync(long size); + + protected abstract Task DoResizeAsync(long size); + + protected abstract Task WriteRangeAsync(TransferData transferData); + + protected abstract Task DoCommitAsync(); + } +} diff --git a/lib/TransferControllers/TransferWriters/StreamedWriter.cs b/lib/TransferControllers/TransferWriters/StreamedWriter.cs new file mode 100644 index 00000000..f3182f85 --- /dev/null +++ b/lib/TransferControllers/TransferWriters/StreamedWriter.cs @@ -0,0 +1,356 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers +{ + using System; + using System.Diagnostics; + using System.Globalization; + using System.Linq; + using System.IO; + using System.Threading; + using System.Threading.Tasks; + + internal sealed class StreamedWriter : TransferReaderWriterBase, IDisposable + { + /// + /// Streamed destination is written sequentially. + /// This variable records offset of next chunk to be written. + /// + private long expectOffset = 0; + + /// + /// Value to indicate whether there's work to do in the writer. + /// + private volatile bool hasWork; + + /// + /// Stream to calculation destination's content MD5. + /// + private MD5HashStream md5HashStream; + + private Stream outputStream; + + /// + /// Value to indicate whether the stream is a file stream opened by the writer or input by user. + /// If it's a file stream opened by the writer, we should closed it after transferring finished. + /// + private bool ownsStream; + + private volatile State state; + + public StreamedWriter( + TransferScheduler scheduler, + SyncTransferController controller, + CancellationToken cancellationToken) + : base(scheduler, controller, cancellationToken) + { + this.hasWork = true; + this.state = State.OpenOutputStream; + } + + private enum State + { + OpenOutputStream, + CalculateMD5, + Write, + Error, + Finished + }; + + private TransferJob TransferJob + { + get + { + return this.SharedTransferData.TransferJob; + } + } + + public override bool HasWork + { + get + { + return this.hasWork && + ((State.OpenOutputStream == this.state) + || (State.CalculateMD5 == this.state) + || ((State.Write == this.state) + && ((this.SharedTransferData.TotalLength == this.expectOffset) || this.SharedTransferData.AvailableData.ContainsKey(this.expectOffset)))); + } + } + + public override bool IsFinished + { + get + { + return State.Error == this.state || State.Finished == this.state; + } + } + + public override async Task DoWorkInternalAsync() + { + switch (this.state) + { + case State.OpenOutputStream: + await HandleOutputStreamAsync(); + break; + case State.CalculateMD5: + await CalculateMD5Async(); + break; + case State.Write: + await this.WriteChunkDataAsync(); + break; + default: + break; + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + if (disposing) + { + this.CloseOwnedOutputStream(); + } + } + + private async Task HandleOutputStreamAsync() + { + this.hasWork = false; + + await Task.Run(() => + { + if (TransferLocationType.Stream == this.TransferJob.Destination.TransferLocationType) + { + Stream streamInDestination = this.TransferJob.Destination.Stream; + if (!streamInDestination.CanWrite) + { + throw new NotSupportedException(string.Format( + CultureInfo.CurrentCulture, + Resources.StreamMustSupportWriteException, + "outputStream")); + } + + if (!streamInDestination.CanSeek) + { + throw new NotSupportedException(string.Format( + CultureInfo.CurrentCulture, + Resources.StreamMustSupportSeekException, + "outputStream")); + } + + this.outputStream = this.TransferJob.Destination.Stream; + } + else + { + this.Controller.CheckOverwrite( + File.Exists(this.TransferJob.Destination.FilePath), + this.SharedTransferData.SourceLocation, + this.TransferJob.Destination.FilePath); + + this.Controller.UpdateProgressAddBytesTransferred(0); + + this.Controller.CheckCancellation(); + + // We do check point consistancy validation in reader, and directly use it in writer. + if ((null != this.TransferJob.CheckPoint.TransferWindow) + && (this.TransferJob.CheckPoint.TransferWindow.Any())) + { + this.TransferJob.CheckPoint.TransferWindow.Sort(); + this.expectOffset = this.TransferJob.CheckPoint.TransferWindow[0]; + } + else + { + this.expectOffset = this.TransferJob.CheckPoint.EntryTransferOffset; + } + + try + { + FileMode fileMode = 0 == this.expectOffset ? FileMode.OpenOrCreate : FileMode.Open; + + // Attempt to open the file first so that we throw an exception before getting into the async work + this.outputStream = new FileStream( + this.TransferJob.Destination.FilePath, + fileMode, + FileAccess.ReadWrite, + FileShare.None); + + this.ownsStream = true; + } + catch (Exception ex) + { + string exceptionMessage = string.Format( + CultureInfo.CurrentCulture, + Resources.FailedToOpenFileException, + this.TransferJob.Destination.FilePath, + ex.Message); + + throw new TransferException( + TransferErrorCode.OpenFileFailed, + exceptionMessage, + ex); + } + } + + this.outputStream.SetLength(this.SharedTransferData.TotalLength); + + this.Controller.UpdateProgressAddBytesTransferred(0); + + this.md5HashStream = new MD5HashStream( + this.outputStream, + this.expectOffset, + !this.SharedTransferData.DisableContentMD5Validation); + + if (this.md5HashStream.FinishedSeparateMd5Calculator) + { + this.state = State.Write; + } + else + { + this.state = State.CalculateMD5; + } + + this.PreProcessed = true; + this.hasWork = true; + }); + } + + private Task CalculateMD5Async() + { + Debug.Assert( + this.state == State.CalculateMD5, + "GetCalculateMD5Action called, but state isn't CalculateMD5", + "Current state is {0}", + this.state); + + this.state = State.Write; + this.hasWork = true; + + return Task.Run( + delegate + { + this.md5HashStream.CalculateMd5(this.Scheduler.MemoryManager, this.Controller.CheckCancellation); + }); + } + + private async Task WriteChunkDataAsync() + { + Debug.Assert( + this.state == State.Write || this.state == State.Error, + "WriteChunkDataAsync called, but state isn't Write or Error", + "Current state is {0}", + this.state); + + this.hasWork = false; + long currentWriteOffset = this.expectOffset; + TransferData transferData; + if (this.SharedTransferData.AvailableData.TryRemove(this.expectOffset, out transferData)) + { + this.expectOffset = Math.Min(this.expectOffset + transferData.Length, this.SharedTransferData.TotalLength); + } + else + { + this.SetHasWorkOrFinished(); + return; + } + + Debug.Assert(null != transferData, "TransferData in available data should not be null"); + Debug.Assert(currentWriteOffset == transferData.StartOffset, "StartOffset of TransferData in available data should be the same with the key."); + + try + { + await this.md5HashStream.WriteAsync( + currentWriteOffset, + transferData.MemoryBuffer, + 0, + transferData.Length, + this.CancellationToken); + + // If MD5HashTransformBlock returns false, it means some error happened in md5HashStream to calculate MD5. + // then exception was already thrown out there, don't do anything more here. + if (!this.md5HashStream.MD5HashTransformBlock( + transferData.StartOffset, + transferData.MemoryBuffer, + 0, + transferData.Length, + null, + 0)) + { + return; + } + } + finally + { + this.Scheduler.MemoryManager.ReleaseBuffer(transferData.MemoryBuffer); + } + + int blockSize = this.Scheduler.TransferOptions.BlockSize; + long chunkStartOffset = (currentWriteOffset / blockSize) * blockSize; + + if ((currentWriteOffset + transferData.Length) >= Math.Min(chunkStartOffset + blockSize, this.SharedTransferData.TotalLength)) + { + lock (this.TransferJob.CheckPoint.TransferWindowLock) + { + this.TransferJob.CheckPoint.TransferWindow.Remove(chunkStartOffset); + } + } + + this.Controller.UpdateProgressAddBytesTransferred(transferData.Length); + this.SetHasWorkOrFinished(); + } + + private void SetHasWorkOrFinished() + { + if (this.expectOffset == this.SharedTransferData.TotalLength) + { + Exception ex = null; + if (this.md5HashStream.CheckMd5Hash && this.md5HashStream.SucceededSeparateMd5Calculator) + { + this.md5HashStream.MD5HashTransformFinalBlock(new byte[0], 0, 0); + + string calculatedMd5 = Convert.ToBase64String(this.md5HashStream.Hash); + string storedMd5 = this.SharedTransferData.Attributes.ContentMD5; + + if (!calculatedMd5.Equals(storedMd5)) + { + ex = new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.DownloadedMd5MismatchException, + this.SharedTransferData.SourceLocation, + calculatedMd5, + storedMd5)); + } + } + + this.CloseOwnedOutputStream(); + this.NotifyFinished(ex); + this.state = State.Finished; + } + else + { + this.hasWork = true; + } + } + + private void CloseOwnedOutputStream() + { + if (null != this.md5HashStream) + { + this.md5HashStream.Dispose(); + this.md5HashStream = null; + } + + if (this.ownsStream) + { + if (null != this.outputStream) + { + this.outputStream.Close(); + this.outputStream = null; + } + } + } + } +} diff --git a/lib/TransferJobs/SingleObjectCheckpoint.cs b/lib/TransferJobs/SingleObjectCheckpoint.cs new file mode 100644 index 00000000..0b5274ba --- /dev/null +++ b/lib/TransferJobs/SingleObjectCheckpoint.cs @@ -0,0 +1,81 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Generic; + + /// + /// Represents checkpoint of a single transfer job, + /// includes position of transferred bytes and transfer window. + /// + [Serializable] + internal sealed class SingleObjectCheckpoint + { + /// + /// Initializes a new instance of the class. + /// + /// Transferred offset of this transfer entry. + /// Transfer window of this transfer entry. + public SingleObjectCheckpoint(long entryTransferOffset, IEnumerable transferWindow) + { + this.EntryTransferOffset = entryTransferOffset; + if (null != transferWindow) + { + this.TransferWindow = new List(transferWindow); + } + else + { + this.TransferWindow = new List(Constants.MaxCountInTransferWindow); + } + + this.TransferWindowLock = new object(); + } + + public SingleObjectCheckpoint() + : this(0, null) + { + } + + /// + /// Gets or sets transferred offset of this transfer entry. + /// + /// Transferred offset of this transfer entry. + public long EntryTransferOffset + { + get; + set; + } + + /// + /// Gets or sets transfer window of this transfer entry. + /// + /// Transfer window of this transfer entry. + public List TransferWindow + { + get; + set; + } + + public object TransferWindowLock + { + get; + private set; + } + + public SingleObjectCheckpoint Copy() + { + SingleObjectCheckpoint copyObj = new SingleObjectCheckpoint(); + lock (this.TransferWindowLock) + { + copyObj.EntryTransferOffset = this.EntryTransferOffset; + copyObj.TransferWindow = new List(this.TransferWindow); + } + + return copyObj; + } + } +} diff --git a/lib/TransferJobs/SingleObjectTransfer.cs b/lib/TransferJobs/SingleObjectTransfer.cs new file mode 100644 index 00000000..d13461d8 --- /dev/null +++ b/lib/TransferJobs/SingleObjectTransfer.cs @@ -0,0 +1,160 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Runtime.Serialization; + using System.Threading; + using System.Threading.Tasks; + + /// + /// Represents a single object transfer operation. + /// + [Serializable] + internal class SingleObjectTransfer : Transfer + { + private const string TransferJobName = "TransferJob"; + + /// + /// Internal transfer jobs. + /// + private TransferJob transferJob; + + /// + /// Initializes a new instance of the class. + /// This constructor will check whether source and destination is valid for the operation: + /// Uri is only valid for non-staging copy. + /// cannot copy from local file/stream to local file/stream + /// + /// Transfer source. + /// Transfer destination. + /// Transfer method, see for detail available methods. + public SingleObjectTransfer(TransferLocation source, TransferLocation dest, TransferMethod transferMethod) + : base(source, dest, transferMethod) + { + if (null == source) + { + throw new ArgumentNullException("source"); + } + + if (null == dest) + { + throw new ArgumentNullException("dest"); + } + + if ((null != source.FilePath || null != source.Stream) + && (null != dest.FilePath || null != dest.Stream)) + { + throw new InvalidOperationException(Resources.LocalToLocalTransferUnsupportedException); + } + + if ((null != source.Blob) + && (null != dest.Blob)) + { + if (source.Blob.BlobType != dest.Blob.BlobType) + { + throw new InvalidOperationException(Resources.SourceAndDestinationBlobTypeDifferent); + } + + if (StorageExtensions.Equals(source.Blob, dest.Blob)) + { + throw new InvalidOperationException(Resources.SourceAndDestinationLocationCannotBeEqualException); + } + } + + if ((null != source.AzureFile) + && (null != dest.AzureFile) + && string.Equals(source.AzureFile.Uri.Host, dest.AzureFile.Uri.Host, StringComparison.OrdinalIgnoreCase) + && string.Equals(source.AzureFile.Uri.AbsolutePath, dest.AzureFile.Uri.AbsolutePath, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException(Resources.SourceAndDestinationLocationCannotBeEqualException); + } + + this.transferJob = new TransferJob(this.Source, this.Destination); + this.transferJob.Transfer = this; + } + + protected SingleObjectTransfer(SerializationInfo info, StreamingContext context) + : base(info, context) + { + this.transferJob = (TransferJob)info.GetValue(TransferJobName, typeof(TransferJob)); + this.transferJob.Transfer = this; + } + + private SingleObjectTransfer(SingleObjectTransfer other) + : base(other) + { + this.transferJob = other.transferJob.Copy(); + this.transferJob.Transfer = this; + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue(TransferJobName, this.transferJob, typeof(TransferJob)); + } + + /// + /// Gets a copy of this transfer object. + /// + /// A copy of current transfer object + public SingleObjectTransfer Copy() + { + lock (this.ProgressTracker) + { + return new SingleObjectTransfer(this); + } + } + + /// + /// Execute the transfer asynchronously. + /// + /// Transfer scheduler + /// Token that can be used to cancel the transfer. + /// A task representing the transfer operation. + public override async Task ExecuteAsync(TransferScheduler scheduler, CancellationToken cancellationToken) + { + if (this.transferJob.Status == TransferJobStatus.Finished || + this.transferJob.Status == TransferJobStatus.Skipped) + { + return; + } + + if (transferJob.Status == TransferJobStatus.Failed) + { + // Resuming a failed transfer job + this.UpdateTransferJobStatus(transferJob, TransferJobStatus.Transfer); + } + + try + { + await scheduler.ExecuteJobAsync(transferJob, cancellationToken); + this.UpdateTransferJobStatus(transferJob, TransferJobStatus.Finished); + } + catch (TransferException exception) + { + if (exception.ErrorCode == TransferErrorCode.NotOverwriteExistingDestination) + { + // transfer skipped + this.UpdateTransferJobStatus(transferJob, TransferJobStatus.Skipped); + } + else + { + // transfer failed + this.UpdateTransferJobStatus(transferJob, TransferJobStatus.Failed); + } + + throw; + } + } + } +} diff --git a/lib/TransferJobs/Transfer.cs b/lib/TransferJobs/Transfer.cs new file mode 100644 index 00000000..e80abf10 --- /dev/null +++ b/lib/TransferJobs/Transfer.cs @@ -0,0 +1,199 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Globalization; + using System.Runtime.Serialization; + using System.Threading; + using System.Threading.Tasks; + + /// + /// Base class for transfer operation. + /// + internal abstract class Transfer : ISerializable + { + private const string FormatVersionName = "Version"; + private const string SourceName = "Source"; + private const string DestName = "Dest"; + private const string TransferMethodName = "TransferMethod"; + private const string TransferProgressName = "Progress"; + + /// + /// Initializes a new instance of the class. + /// + /// Transfer source. + /// Transfer destination. + /// Transfer method, see for detail available methods. + public Transfer(TransferLocation source, TransferLocation dest, TransferMethod transferMethod) + { + this.Source = source; + this.Destination = dest; + this.TransferMethod = transferMethod; + this.ProgressTracker = new TransferProgressTracker(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + protected Transfer(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + + string version = info.GetString(FormatVersionName); + if (!string.Equals(Constants.FormatVersion, version, StringComparison.Ordinal)) + { + throw new System.InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.DeserializationVersionNotMatchException, + "TransferJob", + version, + Constants.FormatVersion)); + } + + this.Source = (TransferLocation)info.GetValue(SourceName, typeof(TransferLocation)); + this.Destination = (TransferLocation)info.GetValue(DestName, typeof(TransferLocation)); + this.TransferMethod = (TransferMethod)info.GetValue(TransferMethodName, typeof(TransferMethod)); + this.ProgressTracker = (TransferProgressTracker)info.GetValue(TransferProgressName, typeof(TransferProgressTracker)); + } + + /// + /// Initializes a new instance of the class. + /// + protected Transfer(Transfer other) + { + this.Source = other.Source; + this.Destination = other.Destination; + this.TransferMethod = other.TransferMethod; + this.ContentType = other.ContentType; + this.ProgressTracker = other.ProgressTracker.Copy(); + } + + /// + /// Gets source location for this transfer. + /// + public TransferLocation Source + { + get; + private set; + } + + /// + /// Gets destination location for this transfer. + /// + public TransferLocation Destination + { + get; + private set; + } + + /// + /// Gets the transfer method used in this transfer. + /// + public TransferMethod TransferMethod + { + get; + private set; + } + + /// + /// Gets or sets the transfer context of this transfer. + /// + public TransferContext Context + { + get; + set; + } + + /// + /// Gets or sets content type to set to destination in uploading. + /// + public string ContentType + { + get; + set; + } + + /// + /// Gets the progress tracker for this transfer. + /// + public TransferProgressTracker ProgressTracker + { + get; + private set; + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public virtual void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + info.AddValue(FormatVersionName, Constants.FormatVersion, typeof(string)); + info.AddValue(SourceName, this.Source, typeof(TransferLocation)); + info.AddValue(DestName, this.Destination, typeof(TransferLocation)); + info.AddValue(TransferMethodName, this.TransferMethod); + info.AddValue(TransferProgressName, this.ProgressTracker); + } + + /// + /// Execute the transfer asynchronously. + /// + /// Transfer scheduler + /// Token that can be used to cancel the transfer. + /// A task representing the transfer operation. + public abstract Task ExecuteAsync(TransferScheduler scheduler, CancellationToken cancellationToken); + + public void UpdateTransferJobStatus(TransferJob transferJob, TransferJobStatus targetStatus) + { + lock (this.ProgressTracker) + { + switch (targetStatus) + { + case TransferJobStatus.Transfer: + if (transferJob.Status == TransferJobStatus.Failed) + { + this.ProgressTracker.AddNumberOfFilesFailed(-1); + } + + break; + + case TransferJobStatus.Skipped: + this.ProgressTracker.AddNumberOfFilesSkipped(1); + break; + + case TransferJobStatus.Finished: + this.ProgressTracker.AddNumberOfFilesTransferred(1); + break; + + case TransferJobStatus.Failed: + this.ProgressTracker.AddNumberOfFilesFailed(1); + break; + + case TransferJobStatus.NotStarted: + case TransferJobStatus.Monitor: + default: + break; + } + + transferJob.Status = targetStatus; + } + } + } +} diff --git a/lib/TransferJobs/TransferJob.cs b/lib/TransferJobs/TransferJob.cs new file mode 100644 index 00000000..85db8c1e --- /dev/null +++ b/lib/TransferJobs/TransferJob.cs @@ -0,0 +1,178 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Globalization; + using System.Runtime.Serialization; + + /// + /// Represents transfer of a single file/blob. + /// + [Serializable] + internal class TransferJob : ISerializable + { + private const string SourceName = "Source"; + private const string DestName = "Dest"; + private const string CheckedOverwriteName = "CheckedOverwrite"; + private const string OverwriteName = "Overwrite"; + private const string CopyIdName = "CopyId"; + private const string CheckpointName = "Checkpoint"; + + /// + /// Initializes a new instance of the class. + /// + /// Source location. + /// Destination location. + public TransferJob(TransferLocation source, TransferLocation dest) + { + this.Source = source; + this.Destination = dest; + + this.CheckPoint = new SingleObjectCheckpoint(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + protected TransferJob(SerializationInfo info, StreamingContext context) + { + this.Source = (TransferLocation)info.GetValue(SourceName, typeof(TransferLocation)); + this.Destination = (TransferLocation)info.GetValue(DestName, typeof(TransferLocation)); + + if (info.GetBoolean(CheckedOverwriteName)) + { + this.Overwrite = info.GetBoolean(OverwriteName); + } + else + { + this.Overwrite = null; + } + + this.CopyId = info.GetString(CopyIdName); + this.CheckPoint = (SingleObjectCheckpoint)info.GetValue(CheckpointName, typeof(SingleObjectCheckpoint)); + } + + /// + /// Initializes a new instance of the class. + /// + private TransferJob(TransferJob other) + { + this.Source = other.Source; + this.Destination = other.Destination; + this.Overwrite = other.Overwrite; + this.CopyId = other.CopyId; + this.CheckPoint = other.CheckPoint.Copy(); + this.Status = other.Status; + } + + /// + /// Gets source location for this transfer job. + /// + public TransferLocation Source + { + get; + private set; + } + + /// + /// Gets destination location for this transfer job. + /// + public TransferLocation Destination + { + get; + private set; + } + + /// + /// Gets or sets the overwrite flag. + /// + public bool? Overwrite + { + get; + set; + } + + /// + /// Gets ID for the asynchronous copy operation. + /// + /// ID for the asynchronous copy operation. + public string CopyId + { + get; + set; + } + + public TransferJobStatus Status + { + get; + set; + } + + public SingleObjectCheckpoint CheckPoint + { + get; + set; + } + + /// + /// Gets or sets the parent transfer of this transfer job + /// + public Transfer Transfer + { + get; + set; + } + + /// + /// Gets or sets content type to set to destination in uploading. + /// + public string ContentType + { + get + { + return this.Transfer.ContentType; + } + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public virtual void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + info.AddValue(SourceName, this.Source, typeof(TransferLocation)); + info.AddValue(DestName, this.Destination, typeof(TransferLocation)); + + info.AddValue(CheckedOverwriteName, this.Overwrite.HasValue); + if (this.Overwrite.HasValue) + { + info.AddValue(OverwriteName, this.Overwrite.Value); + } + + info.AddValue(CopyIdName, this.CopyId, typeof(string)); + info.AddValue(CheckpointName, this.CheckPoint, typeof(SingleObjectCheckpoint)); + } + + /// + /// Gets a copy of this transfer job. + /// + /// A copy of current transfer job + public TransferJob Copy() + { + return new TransferJob(this); + } + } +} diff --git a/lib/TransferJobs/TransferJobStatus.cs b/lib/TransferJobs/TransferJobStatus.cs new file mode 100644 index 00000000..4c1bc937 --- /dev/null +++ b/lib/TransferJobs/TransferJobStatus.cs @@ -0,0 +1,46 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + /// + /// Status for TransferEntry. + /// NotStarted -> Skipped + /// -> Transfer -> [Monitor ->] Finished. + /// Failed. + /// + internal enum TransferJobStatus + { + /// + /// Transfer is not started. + /// + NotStarted, + + /// + /// Transfer is skipped + /// + Skipped, + + /// + /// Transfer file. + /// + Transfer, + + /// + /// Monitor transfer process. + /// + Monitor, + + /// + /// Transfer is finished successfully. + /// + Finished, + + /// + /// Transfer is failed. + /// + Failed, + } +} diff --git a/lib/TransferJobs/TransferLocation.cs b/lib/TransferJobs/TransferLocation.cs new file mode 100644 index 00000000..14822d12 --- /dev/null +++ b/lib/TransferJobs/TransferLocation.cs @@ -0,0 +1,458 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Globalization; + using System.IO; + using System.Runtime.Serialization; + using Microsoft.WindowsAzure.Storage.Auth; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper; + using Microsoft.WindowsAzure.Storage.File; + + [Serializable] + internal sealed class TransferLocation : ISerializable + { + private const string TransferLocationTypeName = "LocationType"; + private const string FilePathName = "FilePath"; + private const string SourceUriName = "SourceUri"; + private const string BlobName = "Blob"; + private const string AzureFileName = "AzureFile"; + private const string AccessConditionName = "AccessCondition"; + private const string CheckedAccessConditionName = "CheckedAccessCondition"; + private const string RequestOptionsName = "RequestOptions"; + private const string ETagName = "ETag"; + private const string BlockIDPrefixName = "BlockIDPrefix"; + + private SerializableAccessCondition accessCondition; + private SerializableRequestOptions requestOptions; + private SerializableCloudBlob blobSerializer; + private SerializableCloudFile fileSerializer; + + private TransferLocation(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + + this.TransferLocationType = (TransferLocationType)info.GetValue(TransferLocationTypeName, typeof(TransferLocationType)); + + switch (this.TransferLocationType) + { + case TransferLocationType.FilePath: + this.FilePath = info.GetString(FilePathName); + break; + case TransferLocationType.Stream: + throw new InvalidOperationException(Resources.CannotSerializeStreamLocation); + case TransferLocationType.SourceUri: + this.SourceUri = (Uri)info.GetValue(SourceUriName, typeof(Uri)); + break; + case TransferLocationType.AzureBlob: + this.blobSerializer = (SerializableCloudBlob)info.GetValue(BlobName, typeof(SerializableCloudBlob)); + break; + case TransferLocationType.AzureFile: + this.fileSerializer = (SerializableCloudFile)info.GetValue(AzureFileName, typeof(SerializableCloudFile)); + break; + default: + break; + } + + this.accessCondition = (SerializableAccessCondition)info.GetValue(AccessConditionName, typeof(SerializableAccessCondition)); + this.CheckedAccessCondition = info.GetBoolean(CheckedAccessConditionName); + this.requestOptions = (SerializableRequestOptions)info.GetValue(RequestOptionsName, typeof(SerializableRequestOptions)); + this.ETag = info.GetString(ETagName); + this.BlockIdPrefix = info.GetString(BlockIDPrefixName); + } + + /// + /// Initializes a new instance of the class. + /// + /// Path to the local file as a source/destination to be read from/written to in a transfer. + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1057:StringUriOverloadsCallSystemUriOverloads", Justification="We need to distinct from local file with URI")] + public TransferLocation(string filePath) + { + if (null == filePath) + { + throw new ArgumentNullException("filePath"); + } + + if (string.IsNullOrWhiteSpace(filePath)) + { + throw new ArgumentException("message, should not be an empty string", "filePath"); + } + this.FilePath = filePath; + this.TransferLocationType = TransferLocationType.FilePath; + } + + /// + /// Initializes a new instance of the class. + /// + /// Stream instance as a source/destination to be read from/written to in a transfer. + public TransferLocation(Stream stream) + { + if (null == stream) + { + throw new ArgumentNullException("stream"); + } + + this.Stream = stream; + this.TransferLocationType = TransferLocationType.Stream; + } + + /// + /// Initializes a new instance of the class. + /// + /// Blob instance as a location in a transfer job. + /// It could be a source, a destination. + public TransferLocation(CloudBlob blob) + { + if (null == blob) + { + throw new ArgumentNullException("blob"); + } + + this.Blob = blob; + this.TransferLocationType = TransferLocationType.AzureBlob; + } + + /// + /// Initializes a new instance of the class. + /// + /// CloudFile instance as a location in a transfer job. + /// It could be a source, a destination. + public TransferLocation(CloudFile azureFile) + { + if (null == azureFile) + { + throw new ArgumentNullException("azureFile"); + } + + this.AzureFile = azureFile; + this.TransferLocationType = TransferLocationType.AzureFile; + } + + /// + /// Initializes a new instance of the class. + /// + /// Uri to the source in an asynchronously copying job. + public TransferLocation(Uri sourceUri) + { + if (null == sourceUri) + { + throw new ArgumentNullException("sourceUri"); + } + + this.SourceUri = sourceUri; + this.TransferLocationType = TransferLocationType.SourceUri; + } + + /// + /// Gets or sets access condition for this location. + /// This property only takes effact when the location is a blob or an azure file. + /// + public AccessCondition AccessCondition + { + get + { + return SerializableAccessCondition.GetAccessCondition(this.accessCondition); + } + + set + { + SerializableAccessCondition.SetAccessCondition(ref this.accessCondition, value); + } + } + + /// + /// Gets or sets request options when send request to this location. + /// Only a FileRequestOptions instance takes effact when the location is an azure file; + /// Only a BlobRequestOptions instance takes effact when the locaiton is a blob. + /// + public IRequestOptions RequestOptions + { + get + { + return SerializableRequestOptions.GetRequestOptions(this.requestOptions); + } + + set + { + SerializableRequestOptions.SetRequestOptions(ref this.requestOptions, value); + } + } + + /// + /// Gets the type for this location. + /// + public TransferLocationType TransferLocationType + { + get; + private set; + } + + /// + /// Gets path to the local file location. + /// + public string FilePath + { + get; + private set; + } + + /// + /// Gets an stream instance representing the location for this instance. + /// + public Stream Stream + { + get; + private set; + } + + /// + /// Gets Uri to the source location in asynchronously copying job. + /// + public Uri SourceUri + { + get; + private set; + } + + /// + /// Gets blob location in this instance. + /// + public CloudBlob Blob + { + get + { + return SerializableCloudBlob.GetBlob(this.blobSerializer); + } + + private set + { + SerializableCloudBlob.SetBlob(ref this.blobSerializer, value); + } + } + + /// + /// Gets azure file location in this instance. + /// + public CloudFile AzureFile + { + get + { + return SerializableCloudFile.GetFile(this.fileSerializer); + } + + private set + { + SerializableCloudFile.SetFile(ref this.fileSerializer, value); + } + } + + internal string ETag + { + get; + set; + } + + internal bool CheckedAccessCondition + { + get; + set; + } + + internal BlobRequestOptions BlobRequestOptions + { + get + { + return this.RequestOptions as BlobRequestOptions; + } + } + + internal FileRequestOptions FileRequestOptions + { + get + { + return this.RequestOptions as FileRequestOptions; + } + } + + internal string BlockIdPrefix + { + get; + set; + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1057:StringUriOverloadsCallSystemUriOverloads", Justification="We need to distinct from local file with URI")] + public static implicit operator TransferLocation(string filePath) + { + return new TransferLocation(filePath); + } + + public static implicit operator TransferLocation(Stream stream) + { + return new TransferLocation(stream); + } + + public static implicit operator TransferLocation(CloudBlockBlob blob) + { + return new TransferLocation(blob); + } + + public static implicit operator TransferLocation(CloudPageBlob blob) + { + return new TransferLocation(blob); + } + + public static implicit operator TransferLocation(CloudFile azureFile) + { + return new TransferLocation(azureFile); + } + + public static implicit operator TransferLocation(Uri sourceUri) + { + return ToTransferLocation(sourceUri); + } + + public static TransferLocation ToTransferLocation(Uri sourceUri) + { + return new TransferLocation(sourceUri); + } + + /// + /// Serializes the object. + /// + /// Serialization info object. + /// Streaming context. + public void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + + info.AddValue(TransferLocationTypeName, this.TransferLocationType); + + switch (this.TransferLocationType) + { + case TransferLocationType.FilePath: + info.AddValue(FilePathName, this.FilePath); + break; + case TransferLocationType.SourceUri: + info.AddValue(SourceUriName, this.SourceUri, typeof(Uri)); + break; + case TransferLocationType.AzureBlob: + info.AddValue(BlobName, this.blobSerializer, typeof(SerializableCloudBlob)); + break; + case TransferLocationType.AzureFile: + info.AddValue(AzureFileName, this.fileSerializer, typeof(SerializableCloudFile)); + break; + case TransferLocationType.Stream: + default: + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.CannotDeserializeLocationType, + this.TransferLocationType)); + } + + info.AddValue(AccessConditionName, this.accessCondition, typeof(SerializableAccessCondition)); + info.AddValue(CheckedAccessConditionName, this.CheckedAccessCondition); + info.AddValue(RequestOptionsName, this.requestOptions, typeof(SerializableRequestOptions)); + info.AddValue(ETagName, this.ETag); + info.AddValue(BlockIDPrefixName, this.BlockIdPrefix); + } + + /// + /// Update credentials of blob or azure file location. + /// + /// Storage credentials to be updated in blob or azure file location. + public void UpdateCredentials(StorageCredentials credentials) + { + if (null != this.blobSerializer) + { + this.blobSerializer.UpdateStorageCredentials(credentials); + } + else if (null != this.fileSerializer) + { + this.fileSerializer.UpdateStorageCredentials(credentials); + } + } + + // + // Summary: + // Returns a string that represents the transfer location. + // + // Returns: + // A string that represents the transfer location. + public override string ToString() + { + switch(this.TransferLocationType) + { + case TransferLocationType.FilePath: + return this.FilePath; + + case TransferLocationType.AzureBlob: + return this.Blob.SnapshotQualifiedUri.ToString(); + + case TransferLocationType.AzureFile: + return this.AzureFile.Uri.ToString(); + + case TransferLocationType.SourceUri: + return this.SourceUri.ToString(); + + case TransferLocationType.Stream: + return this.Stream.ToString(); + + default: + throw new ArgumentException("TransferLocationType"); + } + } + + // Summary: + // Determines whether the specified transfer location is equal to the current transfer location. + // + // Parameters: + // obj: + // The transfer location to compare with the current transfer location. + // + // Returns: + // true if the specified transfer location is equal to the current transfer location; otherwise, false. + public override bool Equals(object obj) + { + TransferLocation location = obj as TransferLocation; + if (location == null || this.TransferLocationType != location.TransferLocationType) + return false; + + switch (this.TransferLocationType) + { + case TransferLocationType.AzureBlob: + case TransferLocationType.AzureFile: + case TransferLocationType.FilePath: + case TransferLocationType.SourceUri: + return this.ToString() == location.ToString(); + + case TransferLocationType.Stream: + default: + return false; + } + } + + // + // Summary: + // Returns the hash code for the transfer location. + // + // Returns: + // A 32-bit signed integer hash code. + public override int GetHashCode() + { + return this.ToString().GetHashCode(); + } + } +} diff --git a/lib/TransferJobs/TransferLocationType.cs b/lib/TransferJobs/TransferLocationType.cs new file mode 100644 index 00000000..12f2a25a --- /dev/null +++ b/lib/TransferJobs/TransferLocationType.cs @@ -0,0 +1,17 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + internal enum TransferLocationType + { + FilePath, + Stream, + AzureBlob, + AzureFile, + SourceUri + } +} diff --git a/lib/TransferJobs/TransferMethod.cs b/lib/TransferJobs/TransferMethod.cs new file mode 100644 index 00000000..f8d5b94a --- /dev/null +++ b/lib/TransferJobs/TransferMethod.cs @@ -0,0 +1,22 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + internal enum TransferMethod + { + /// + /// To read data from source to memory and then write the data in memory to destination. + /// + SyncCopy, + + /// + /// To send a start copy request to azure storage to let it do the copying, + /// and monitor the copying progress until the copy finished. + /// + AsyncCopy, + } +} diff --git a/lib/TransferManager.cs b/lib/TransferManager.cs new file mode 100644 index 00000000..f9df09e3 --- /dev/null +++ b/lib/TransferManager.cs @@ -0,0 +1,845 @@ +//----------------------------------------------------------------------------- +// +// Copyright (c) Microsoft Corporation +// +//----------------------------------------------------------------------------- +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Concurrent; + using System.IO; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using TransferKey = System.Tuple; + + /// + /// TransferManager class + /// + public static class TransferManager + { + /// + /// Transfer scheduler that schedules execution of transfer jobs + /// + private static TransferScheduler scheduler = new TransferScheduler(); + + /// + /// Transfer configurations associated with the transfer manager + /// + private static TransferConfigurations configurations = new TransferConfigurations(); + + /// + /// Stores all running transfers + /// + private static ConcurrentDictionary allTransfers = new ConcurrentDictionary(); + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1810:InitializeReferenceTypeStaticFieldsInline", Justification = "Performance")] + static TransferManager() + { + OperationContext.GlobalSendingRequest += (sender, args) => + { + string userAgent = Constants.UserAgent + ";" + Microsoft.WindowsAzure.Storage.Shared.Protocol.Constants.HeaderConstants.UserAgent; + + if (!string.IsNullOrEmpty(configurations.UserAgentSuffix)) + { + userAgent += ";" + configurations.UserAgentSuffix; + } + + args.Request.UserAgent = userAgent; + }; + } + + /// + /// Gets or sets the transfer configurations associated with the transfer manager + /// + public static TransferConfigurations Configurations + { + get + { + return configurations; + } + } + + /// + /// Upload a file to Azure Blob Storage. + /// + /// Path to the source file. + /// The that is the destination Azure blob. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(string sourcePath, CloudBlob destBlob) + { + return UploadAsync(sourcePath, destBlob, null, null); + } + + /// + /// Upload a file to Azure Blob Storage. + /// + /// Path to the source file. + /// The that is the destination Azure blob. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(string sourcePath, CloudBlob destBlob, UploadOptions options, TransferContext context) + { + return UploadAsync(sourcePath, destBlob, options, context, CancellationToken.None); + } + + /// + /// Upload a file to Azure Blob Storage. + /// + /// Path to the source file. + /// The that is the destination Azure blob. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + /// A object to observe while waiting for a task to complete. + public static Task UploadAsync(string sourcePath, CloudBlob destBlob, UploadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourcePath); + TransferLocation destLocation = new TransferLocation(destBlob); + return UploadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Upload a file to Azure Blob Storage. + /// + /// A object providing the file content. + /// The that is the destination Azure blob. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(Stream sourceStream, CloudBlob destBlob) + { + return UploadAsync(sourceStream, destBlob, null, null); + } + + /// + /// Upload a file to Azure Blob Storage. + /// + /// A object providing the file content. + /// The that is the destination Azure blob. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(Stream sourceStream, CloudBlob destBlob, UploadOptions options, TransferContext context) + { + return UploadAsync(sourceStream, destBlob, options, context, CancellationToken.None); + } + + /// + /// Upload a file to Azure Blob Storage. + /// + /// A object providing the file content. + /// The that is the destination Azure blob. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + /// A object to observe while waiting for a task to complete. + public static Task UploadAsync(Stream sourceStream, CloudBlob destBlob, UploadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceStream); + TransferLocation destLocation = new TransferLocation(destBlob); + return UploadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Upload a file to Azure File Storage. + /// + /// Path to the source file. + /// The that is the destination Azure file. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(string sourcePath, CloudFile destFile) + { + return UploadAsync(sourcePath, destFile, null, null); + } + + /// + /// Upload a file to Azure File Storage. + /// + /// Path to the source file. + /// The that is the destination Azure file. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(string sourcePath, CloudFile destFile, UploadOptions options, TransferContext context) + { + return UploadAsync(sourcePath, destFile, options, context, CancellationToken.None); + } + + /// + /// Upload a file to Azure File Storage. + /// + /// Path to the source file. + /// The that is the destination Azure file. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(string sourcePath, CloudFile destFile, UploadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourcePath); + TransferLocation destLocation = new TransferLocation(destFile); + return UploadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Upload a file to Azure File Storage. + /// + /// A object providing the file content. + /// The that is the destination Azure file. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(Stream sourceStream, CloudFile destFile) + { + return UploadAsync(sourceStream, destFile, null, null); + } + + /// + /// Upload a file to Azure File Storage. + /// + /// A object providing the file content. + /// The that is the destination Azure file. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(Stream sourceStream, CloudFile destFile, UploadOptions options, TransferContext context) + { + return UploadAsync(sourceStream, destFile, options, context, CancellationToken.None); + } + + /// + /// Upload a file to Azure File Storage. + /// + /// A object providing the file content. + /// The that is the destination Azure file. + /// An object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task UploadAsync(Stream sourceStream, CloudFile destFile, UploadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceStream); + TransferLocation destLocation = new TransferLocation(destFile); + return UploadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Download an Azure blob from Azure Blob Storage. + /// + /// The that is the source Azure blob. + /// Path to the destination file. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudBlob sourceBlob, string destPath) + { + return DownloadAsync(sourceBlob, destPath, null, null); + } + + /// + /// Download an Azure blob from Azure Blob Storage. + /// + /// The that is the source Azure blob. + /// Path to the destination file. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudBlob sourceBlob, string destPath, DownloadOptions options, TransferContext context) + { + return DownloadAsync(sourceBlob, destPath, options, context, CancellationToken.None); + } + + /// + /// Download an Azure blob from Azure Blob Storage. + /// + /// The that is the source Azure blob. + /// Path to the destination file. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudBlob sourceBlob, string destPath, DownloadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceBlob); + TransferLocation destLocation = new TransferLocation(destPath); + + if (options != null) + { + BlobRequestOptions requestOptions = Transfer_RequestOptions.DefaultBlobRequestOptions; + requestOptions.DisableContentMD5Validation = options.DisableContentMD5Validation; + sourceLocation.RequestOptions = requestOptions; + } + + return DownloadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Download an Azure blob from Azure Blob Storage. + /// + /// The that is the source Azure blob. + /// A object representing the destination stream. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudBlob sourceBlob, Stream destStream) + { + return DownloadAsync(sourceBlob, destStream, null, null); + } + + /// + /// Download an Azure blob from Azure Blob Storage. + /// + /// The that is the source Azure blob. + /// A object representing the destination stream. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudBlob sourceBlob, Stream destStream, DownloadOptions options, TransferContext context) + { + return DownloadAsync(sourceBlob, destStream, options, context, CancellationToken.None); + } + + /// + /// Download an Azure blob from Azure Blob Storage. + /// + /// The that is the source Azure blob. + /// A object representing the destination stream. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudBlob sourceBlob, Stream destStream, DownloadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceBlob); + TransferLocation destLocation = new TransferLocation(destStream); + + if (options != null) + { + BlobRequestOptions requestOptions = Transfer_RequestOptions.DefaultBlobRequestOptions; + requestOptions.DisableContentMD5Validation = options.DisableContentMD5Validation; + sourceLocation.RequestOptions = requestOptions; + } + + return DownloadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Download an Azure file from Azure File Storage. + /// + /// The that is the source Azure file. + /// Path to the destination file. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudFile sourceFile, string destPath) + { + return DownloadAsync(sourceFile, destPath, null, null); + } + + /// + /// Download an Azure file from Azure File Storage. + /// + /// The that is the source Azure file. + /// Path to the destination file. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudFile sourceFile, string destPath, DownloadOptions options, TransferContext context) + { + return DownloadAsync(sourceFile, destPath, options, context, CancellationToken.None); + } + + /// + /// Download an Azure file from Azure File Storage. + /// + /// The that is the source Azure file. + /// Path to the destination file. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudFile sourceFile, string destPath, DownloadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceFile); + TransferLocation destLocation = new TransferLocation(destPath); + + if (options != null) + { + FileRequestOptions requestOptions = Transfer_RequestOptions.DefaultFileRequestOptions; + requestOptions.DisableContentMD5Validation = options.DisableContentMD5Validation; + sourceLocation.RequestOptions = requestOptions; + } + + return DownloadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Download an Azure file from Azure File Storage. + /// + /// The that is the source Azure file. + /// A object representing the destination stream. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudFile sourceFile, Stream destStream) + { + return DownloadAsync(sourceFile, destStream, null, null); + } + + /// + /// Download an Azure file from Azure File Storage. + /// + /// The that is the source Azure file. + /// A object representing the destination stream. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudFile sourceFile, Stream destStream, DownloadOptions options, TransferContext context) + { + return DownloadAsync(sourceFile, destStream, options, context, CancellationToken.None); + } + + /// + /// Download an Azure file from Azure File Storage. + /// + /// The that is the source Azure file. + /// A object representing the destination stream. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task DownloadAsync(CloudFile sourceFile, Stream destStream, DownloadOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceFile); + TransferLocation destLocation = new TransferLocation(destStream); + + if (options != null) + { + FileRequestOptions requestOptions = Transfer_RequestOptions.DefaultFileRequestOptions; + requestOptions.DisableContentMD5Validation = options.DisableContentMD5Validation; + sourceLocation.RequestOptions = requestOptions; + } + + return DownloadInternalAsync(sourceLocation, destLocation, options, context, cancellationToken); + } + + /// + /// Copy content, properties and metadata of one Azure blob to another. + /// + /// The that is the source Azure blob. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudBlob sourceBlob, CloudBlob destBlob, bool isServiceCopy) + { + return CopyAsync(sourceBlob, destBlob, isServiceCopy, null, null); + } + + /// + /// Copy content, properties and metadata of one Azure blob to another. + /// + /// The that is the source Azure blob. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudBlob sourceBlob, CloudBlob destBlob, bool isServiceCopy, CopyOptions options, TransferContext context) + { + return CopyAsync(sourceBlob, destBlob, isServiceCopy, options, context, CancellationToken.None); + } + + /// + /// Copy content, properties and metadata of one Azure blob to another. + /// + /// The that is the source Azure blob. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudBlob sourceBlob, CloudBlob destBlob, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceBlob); + TransferLocation destLocation = new TransferLocation(destBlob); + return CopyInternalAsync(sourceLocation, destLocation, isServiceCopy, options, context, cancellationToken); + } + + /// + /// Copy content, properties and metadata of an Azure blob to an Azure file. + /// + /// The that is the source Azure blob. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudBlob sourceBlob, CloudFile destFile, bool isServiceCopy) + { + return CopyAsync(sourceBlob, destFile, isServiceCopy, null, null); + } + + /// + /// Copy content, properties and metadata of an Azure blob to an Azure file. + /// + /// The that is the source Azure blob. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudBlob sourceBlob, CloudFile destFile, bool isServiceCopy, CopyOptions options, TransferContext context) + { + return CopyAsync(sourceBlob, destFile, isServiceCopy, options, context, CancellationToken.None); + } + + /// + /// Copy content, properties and metadata of an Azure blob to an Azure file. + /// + /// The that is the source Azure blob. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudBlob sourceBlob, CloudFile destFile, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceBlob); + TransferLocation destLocation = new TransferLocation(destFile); + return CopyInternalAsync(sourceLocation, destLocation, isServiceCopy, options, context, cancellationToken); + } + + /// + /// Copy content, properties and metadata of an Azure file to an Azure blob. + /// + /// The that is the source Azure file. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudFile sourceFile, CloudBlob destBlob, bool isServiceCopy) + { + return CopyAsync(sourceFile, destBlob, isServiceCopy, null, null); + } + + /// + /// Copy content, properties and metadata of an Azure file to an Azure blob. + /// + /// The that is the source Azure file. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudFile sourceFile, CloudBlob destBlob, bool isServiceCopy, CopyOptions options, TransferContext context) + { + return CopyAsync(sourceFile, destBlob, isServiceCopy, options, context, CancellationToken.None); + } + + /// + /// Copy content, properties and metadata of an Azure file to an Azure blob. + /// + /// The that is the source Azure file. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudFile sourceFile, CloudBlob destBlob, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceFile); + TransferLocation destLocation = new TransferLocation(destBlob); + return CopyInternalAsync(sourceLocation, destLocation, isServiceCopy, options, context, cancellationToken); + } + + + /// + /// Copy content, properties and metadata of an Azure file to another. + /// + /// The that is the source Azure file. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudFile sourceFile, CloudFile destFile, bool isServiceCopy) + { + return CopyAsync(sourceFile, destFile, isServiceCopy, null, null); + } + + /// + /// Copy content, properties and metadata of an Azure file to another. + /// + /// The that is the source Azure file. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudFile sourceFile, CloudFile destFile, bool isServiceCopy, CopyOptions options, TransferContext context) + { + return CopyAsync(sourceFile, destFile, isServiceCopy, options, context, CancellationToken.None); + } + + /// + /// Copy content, properties and metadata of an Azure file to another. + /// + /// The that is the source Azure file. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + public static Task CopyAsync(CloudFile sourceFile, CloudFile destFile, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + TransferLocation sourceLocation = new TransferLocation(sourceFile); + TransferLocation destLocation = new TransferLocation(destFile); + return CopyInternalAsync(sourceLocation, destLocation, isServiceCopy, options, context, cancellationToken); + } + + /// + /// Copy file from an specified URI to an Azure blob. + /// + /// The of the source file. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that represents the asynchronous operation. + /// Copying from an URI to Azure blob synchronously is not supported yet. + public static Task CopyAsync(Uri sourceUri, CloudBlob destBlob, bool isServiceCopy) + { + return CopyAsync(sourceUri, destBlob, isServiceCopy, null, null); + } + + /// + /// Copy file from an specified URI to an Azure blob. + /// + /// The of the source file. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + /// Copying from an URI to Azure blob synchronously is not supported yet. + public static Task CopyAsync(Uri sourceUri, CloudBlob destBlob, bool isServiceCopy, CopyOptions options, TransferContext context) + { + return CopyAsync(sourceUri, destBlob, isServiceCopy, options, context, CancellationToken.None); + } + + /// + /// Copy file from an specified URI to an Azure blob. + /// + /// The of the source file. + /// The that is the destination Azure blob. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + /// Copying from an URI to Azure blob synchronously is not supported yet. + public static Task CopyAsync(Uri sourceUri, CloudBlob destBlob, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + if (!isServiceCopy) + { + throw new NotSupportedException(Resources.SyncCopyFromUriToAzureBlobNotSupportedException); + } + + TransferLocation sourceLocation = new TransferLocation(sourceUri); + TransferLocation destLocation = new TransferLocation(destBlob); + return CopyInternalAsync(sourceLocation, destLocation, isServiceCopy, options, context, cancellationToken); + } + + /// + /// Copy file from an specified URI to an Azure file. + /// + /// The of the source file. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that represents the asynchronous operation. + /// Copying from an URI to Azure file synchronously is not supported yet. + public static Task CopyAsync(Uri sourceUri, CloudFile destFile, bool isServiceCopy) + { + return CopyAsync(sourceUri, destFile, isServiceCopy, null, null); + } + + /// + /// Copy file from an specified URI to an Azure file. + /// + /// The of the source file. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object that represents the asynchronous operation. + /// Copying from an URI to Azure file synchronously is not supported yet. + public static Task CopyAsync(Uri sourceUri, CloudFile destFile, bool isServiceCopy, CopyOptions options, TransferContext context) + { + return CopyAsync(sourceUri, destFile, isServiceCopy, options, context, CancellationToken.None); + } + + /// + /// Copy file from an specified URI to an Azure file. + /// + /// The of the source file. + /// The that is the destination Azure file. + /// A flag indicating whether the copy is service-side asynchronous copy or not. + /// If this flag is set to true, service-side asychronous copy will be used; if this flag is set to false, + /// file is downloaded from source first, then uploaded to destination. + /// A object that specifies additional options for the operation. + /// A object that represents the context for the current operation. + /// A object to observe while waiting for a task to complete. + /// A object that represents the asynchronous operation. + /// Copying from an URI to Azure file synchronously is not supported yet. + public static Task CopyAsync(Uri sourceUri, CloudFile destFile, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + if (!isServiceCopy) + { + throw new NotSupportedException(Resources.SyncCopyFromUriToAzureFileNotSupportedException); + } + + TransferLocation sourceLocation = new TransferLocation(sourceUri); + TransferLocation destLocation = new TransferLocation(destFile); + return CopyInternalAsync(sourceLocation, destLocation, isServiceCopy, options, context, cancellationToken); + } + + private static Task UploadInternalAsync(TransferLocation sourceLocation, TransferLocation destLocation, UploadOptions options, TransferContext context, CancellationToken cancellationToken) + { + if (options != null) + { + destLocation.AccessCondition = options.DestinationAccessCondition; + } + + Transfer transfer = CreateSingleObjectTransfer(sourceLocation, destLocation, TransferMethod.SyncCopy, context); + if (options != null) + { + transfer.ContentType = options.ContentType; + } + + return DoTransfer(transfer, cancellationToken); + } + + private static Task DownloadInternalAsync(TransferLocation sourceLocation, TransferLocation destLocation, DownloadOptions options, TransferContext context, CancellationToken cancellationToken) + { + if (options != null) + { + sourceLocation.AccessCondition = options.SourceAccessCondition; + } + + Transfer transfer = CreateSingleObjectTransfer(sourceLocation, destLocation, TransferMethod.SyncCopy, context); + return DoTransfer(transfer, cancellationToken); + } + + private static Task CopyInternalAsync(TransferLocation sourceLocation, TransferLocation destLocation, bool isServiceCopy, CopyOptions options, TransferContext context, CancellationToken cancellationToken) + { + if (options != null) + { + sourceLocation.AccessCondition = options.SourceAccessCondition; + destLocation.AccessCondition = options.DestinationAccessCondition; + } + + Transfer transfer = CreateSingleObjectTransfer(sourceLocation, destLocation, isServiceCopy ? TransferMethod.AsyncCopy : TransferMethod.SyncCopy, context); + return DoTransfer(transfer, cancellationToken); + } + + private static async Task DoTransfer(Transfer transfer, CancellationToken cancellationToken) + { + if (!TryAddTransfer(transfer)) + { + throw new TransferException(TransferErrorCode.TransferAlreadyExists, Resources.TransferAlreadyExists); + } + + try + { + await transfer.ExecuteAsync(scheduler, cancellationToken); + } + finally + { + RemoveTransfer(transfer); + } + } + + private static Transfer CreateSingleObjectTransfer(TransferLocation sourceLocation, TransferLocation destLocation, TransferMethod transferMethod, TransferContext transferContext) + { + Transfer transfer = GetTransfer(sourceLocation, destLocation, transferMethod, transferContext); + if (transfer == null) + { + transfer = new SingleObjectTransfer(sourceLocation, destLocation, transferMethod); + if (transferContext != null) + { + transferContext.Checkpoint.AddTransfer(transfer); + } + } + + if (transferContext != null) + { + transfer.ProgressTracker.Parent = transferContext.OverallProgressTracker; + transfer.Context = transferContext; + } + + return transfer; + } + + private static Transfer GetTransfer(TransferLocation sourceLocation, TransferLocation destLocation, TransferMethod transferMethod, TransferContext transferContext) + { + Transfer transfer = null; + if (transferContext != null) + { + transfer = transferContext.Checkpoint.GetTransfer(sourceLocation, destLocation, transferMethod); + if (transfer != null) + { + // update transfer location information + UpdateTransferLocation(transfer.Source, sourceLocation); + UpdateTransferLocation(transfer.Destination, destLocation); + } + } + + return transfer; + } + + private static bool TryAddTransfer(Transfer transfer) + { + return allTransfers.TryAdd(new TransferKey(transfer.Source, transfer.Destination), transfer); + } + + private static void RemoveTransfer(Transfer transfer) + { + Transfer unused = null; + allTransfers.TryRemove(new TransferKey(transfer.Source, transfer.Destination), out unused); + } + + private static void UpdateTransferLocation(TransferLocation targetLocation, TransferLocation location) + { + // update storage credentials + if (targetLocation.TransferLocationType == TransferLocationType.AzureBlob) + { + targetLocation.UpdateCredentials(location.Blob.ServiceClient.Credentials); + } + else if (targetLocation.TransferLocationType == TransferLocationType.AzureFile) + { + targetLocation.UpdateCredentials(location.AzureFile.ServiceClient.Credentials); + } + } + } +} diff --git a/lib/TransferOptions/CopyOptions.cs b/lib/TransferOptions/CopyOptions.cs new file mode 100644 index 00000000..f088fd9d --- /dev/null +++ b/lib/TransferOptions/CopyOptions.cs @@ -0,0 +1,23 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + /// + /// Represents a set of options that may be specified for copy operation + /// + public sealed class CopyOptions + { + /// + /// Gets or sets an object that represents the access conditions for the source object. If null, no condition is used. + /// + public AccessCondition SourceAccessCondition { get; set; } + + /// + /// Gets or sets an object that represents the access conditions for the destination object. If null, no condition is used. + /// + public AccessCondition DestinationAccessCondition { get; set; } + } +} diff --git a/lib/TransferOptions/DownloadOptions.cs b/lib/TransferOptions/DownloadOptions.cs new file mode 100644 index 00000000..74e5146b --- /dev/null +++ b/lib/TransferOptions/DownloadOptions.cs @@ -0,0 +1,25 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + /// + /// Represents a set of options that may be specified for download operation + /// + public sealed class DownloadOptions + { + /// + /// Gets or sets an object that represents the access conditions for the source object. If null, no condition is used. + /// + public AccessCondition SourceAccessCondition { get; set; } + + /// + /// Gets or sets a flag that indicates whether to validate content MD5 or not when reading data from the source object. + /// If set to true, source object content MD5 will be validated; otherwise, source object content MD5 will not be validated. + /// If not specified, it defaults to false. + /// + public bool DisableContentMD5Validation { get; set; } + } +} diff --git a/lib/TransferOptions/UploadOptions.cs b/lib/TransferOptions/UploadOptions.cs new file mode 100644 index 00000000..ef481a5b --- /dev/null +++ b/lib/TransferOptions/UploadOptions.cs @@ -0,0 +1,23 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + /// + /// Represents a set of options that may be specified for upload operation + /// + public sealed class UploadOptions + { + /// + /// Gets or sets an object that represents the access conditions for the destination object. If null, no condition is used. + /// + public AccessCondition DestinationAccessCondition { get; set; } + + /// + /// Gets or sets a string that indicates the content-type of the destination Azure blob or Azure file. + /// + public string ContentType { get; set; } + } +} diff --git a/lib/TransferProgress.cs b/lib/TransferProgress.cs new file mode 100644 index 00000000..77ccd046 --- /dev/null +++ b/lib/TransferProgress.cs @@ -0,0 +1,49 @@ +//----------------------------------------------------------------------------- +// +// Copyright (c) Microsoft Corporation +// +//----------------------------------------------------------------------------- +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + /// + /// Transfer progress + /// + public sealed class TransferProgress + { + /// + /// Gets the number of bytes that have been transferred. + /// + public long BytesTransferred + { + get; + internal set; + } + + /// + /// Gets the number of files that have been transferred. + /// + public long NumberOfFilesTransferred + { + get; + internal set; + } + + /// + /// Gets the number of files that are skipped to be transferred. + /// + public long NumberOfFilesSkipped + { + get; + internal set; + } + + /// + /// Gets the number of files that are failed to be transferred. + /// + public long NumberOfFilesFailed + { + get; + internal set; + } + } +} diff --git a/lib/TransferScheduler.cs b/lib/TransferScheduler.cs new file mode 100644 index 00000000..bac76c27 --- /dev/null +++ b/lib/TransferScheduler.cs @@ -0,0 +1,424 @@ +//----------------------------------------------------------------------------- +// +// Copyright (c) Microsoft Corporation +// +//----------------------------------------------------------------------------- +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Concurrent; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers; + using System.Diagnostics; + + /// + /// TransferScheduler class, used for transferring Microsoft Azure + /// Storage objects. + /// + internal sealed class TransferScheduler : IDisposable + { + /// + /// Main collection of transfer controllers. + /// + private BlockingCollection controllerQueue; + + /// + /// Internal queue for the main controllers collection. + /// + private ConcurrentQueue internalControllerQueue; + + /// + /// A buffer from which we select a transfer controller and add it into + /// active tasks when the bucket of active tasks is not full. + /// + private ConcurrentDictionary activeControllerItems = + new ConcurrentDictionary(); + + /// + /// CancellationToken source. + /// + private CancellationTokenSource cancellationTokenSource = + new CancellationTokenSource(); + + /// + /// Transfer options that this manager will pass to transfer controllers. + /// + private TransferConfigurations transferOptions; + + /// + /// Wait handle event for completion. + /// + private ManualResetEventSlim controllerResetEvent = + new ManualResetEventSlim(); + + /// + /// A pool of memory buffer objects, used to limit total consumed memory. + /// + private MemoryManager memoryManager; + + /// + /// Random object to generate random numbers. + /// + private Random randomGenerator; + + /// + /// Used to lock disposing to avoid race condition between different disposing and other method calls. + /// + private object disposeLock = new object(); + + private SemaphoreSlim scheduleSemaphore; + + /// + /// Indicate whether the instance has been disposed. + /// + private bool isDisposed = false; + + /// + /// Initializes a new instance of the + /// class. + /// + public TransferScheduler() + : this(null) + { + } + + /// + /// Initializes a new instance of the + /// class. + /// + /// BlobTransfer options. + public TransferScheduler(TransferConfigurations options) + { + // If no options specified create a default one. + this.transferOptions = options ?? new TransferConfigurations(); + + this.internalControllerQueue = new ConcurrentQueue(); + this.controllerQueue = new BlockingCollection( + this.internalControllerQueue); + this.memoryManager = new MemoryManager( + this.transferOptions.MaximumCacheSize, + this.transferOptions.BlockSize); + + this.randomGenerator = new Random(); + + this.scheduleSemaphore = new SemaphoreSlim( + this.transferOptions.ParallelOperations, + this.transferOptions.ParallelOperations); + + this.StartSchedule(); + } + + /// + /// Finalizes an instance of the + /// class. + /// + ~TransferScheduler() + { + this.Dispose(false); + } + + /// + /// Gets the transfer options that this manager will pass to + /// transfer controllers. + /// + internal TransferConfigurations TransferOptions + { + get + { + return this.transferOptions; + } + } + + internal CancellationTokenSource CancellationTokenSource + { + get + { + return this.cancellationTokenSource; + } + } + + internal MemoryManager MemoryManager + { + get + { + return this.memoryManager; + } + } + + /// + /// Public dispose method to release all resources owned. + /// + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Execute a transfer job asynchronously. + /// + /// Transfer job to be executed. + /// Token used to notify the job that it should stop. + public Task ExecuteJobAsync( + TransferJob job, + CancellationToken cancellationToken) + { + if (null == job) + { + throw new ArgumentNullException("job"); + } + + lock (this.disposeLock) + { + this.CheckDisposed(); + + return ExecuteJobInternalAsync(job, cancellationToken); + } + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "Instances will be disposed in other place.")] + private async Task ExecuteJobInternalAsync( + TransferJob job, + CancellationToken cancellationToken) + { + Debug.Assert( + job.Status == TransferJobStatus.NotStarted || + job.Status == TransferJobStatus.Monitor || + job.Status == TransferJobStatus.Transfer); + + TransferControllerBase controller = null; + switch (job.Transfer.TransferMethod) + { + case TransferMethod.SyncCopy: + controller = new SyncTransferController(this, job, cancellationToken); + break; + + case TransferMethod.AsyncCopy: + controller = AsyncCopyController.CreateAsyncCopyController(this, job, cancellationToken); + break; + } + + Utils.CheckCancellation(this.cancellationTokenSource); + this.controllerQueue.Add(controller, this.cancellationTokenSource.Token); + + try + { + await controller.TaskCompletionSource.Task; + } + catch(StorageException sex) + { + throw new TransferException(TransferErrorCode.Unknown, Resources.UncategorizedException, sex); + } + finally + { + controller.Dispose(); + } + } + + private void FillInQueue( + ConcurrentDictionary activeItems, + BlockingCollection collection, + CancellationToken token) + { + while (!token.IsCancellationRequested + && activeItems.Count < this.transferOptions.ParallelOperations) + { + if (activeItems.Count >= this.transferOptions.ParallelOperations) + { + return; + } + + ITransferController transferItem = null; + + try + { + if (!collection.TryTake(out transferItem) + || null == transferItem) + { + return; + } + } + catch (ObjectDisposedException) + { + return; + } + + activeItems.TryAdd(transferItem, null); + } + } + + /// + /// Blocks until the queue is empty and all transfers have been + /// completed. + /// + private void WaitForCompletion() + { + this.controllerResetEvent.Wait(); + } + + /// + /// Cancels any remaining queued work. + /// + private void CancelWork() + { + this.cancellationTokenSource.Cancel(); + this.controllerQueue.CompleteAdding(); + + // Move following to Cancel method. + // there might be running "work" when the transfer is cancelled. + // wait until all running "work" is done. + SpinWait sw = new SpinWait(); + while (this.scheduleSemaphore.CurrentCount != this.transferOptions.ParallelOperations) + { + sw.SpinOnce(); + } + + this.controllerResetEvent.Set(); + } + + /// + /// Private dispose method to release managed/unmanaged objects. + /// If disposing is true clean up managed resources as well as + /// unmanaged resources. + /// If disposing is false only clean up unmanaged resources. + /// + /// Indicates whether or not to dispose + /// managed resources. + private void Dispose(bool disposing) + { + if (!this.isDisposed) + { + lock (this.disposeLock) + { + // We got the lock, isDisposed is true, means that the disposing has been finished. + if (this.isDisposed) + { + return; + } + + this.isDisposed = true; + + this.CancelWork(); + this.WaitForCompletion(); + + if (disposing) + { + if (null != this.controllerQueue) + { + this.controllerQueue.Dispose(); + this.controllerQueue = null; + } + + if (null != this.cancellationTokenSource) + { + this.cancellationTokenSource.Dispose(); + this.cancellationTokenSource = null; + } + + if (null != this.controllerResetEvent) + { + this.controllerResetEvent.Dispose(); + this.controllerResetEvent = null; + } + + if (null != this.scheduleSemaphore) + { + this.scheduleSemaphore.Dispose(); + this.scheduleSemaphore = null; + } + + this.memoryManager = null; + } + } + } + else + { + this.WaitForCompletion(); + } + } + + private void CheckDisposed() + { + if (this.isDisposed) + { + throw new ObjectDisposedException("TransferScheduler"); + } + } + + private void StartSchedule() + { + Task.Run(() => + { + SpinWait sw = new SpinWait(); + while (!this.cancellationTokenSource.Token.IsCancellationRequested && + (!this.controllerQueue.IsCompleted || this.activeControllerItems.Any())) + { + FillInQueue( + this.activeControllerItems, + this.controllerQueue, + this.cancellationTokenSource.Token); + + if (!this.cancellationTokenSource.Token.IsCancellationRequested) + { + // If we don't have the requested amount of active tasks + // running, get a task item from any active transfer item + // that has work available. + if (!this.DoWorkFrom(this.activeControllerItems)) + { + sw.SpinOnce(); + } + else + { + sw.Reset(); + continue; + } + } + } + }); + } + + private void FinishedWorkItem( + ITransferController transferController) + { + object dummy; + this.activeControllerItems.TryRemove(transferController, out dummy); + } + + private bool DoWorkFrom( + ConcurrentDictionary activeItems) + { + // Filter items with work only. + List> activeItemsWithWork = + new List>( + activeItems.Where(item => item.Key.HasWork && !item.Key.IsFinished)); + + if (0 != activeItemsWithWork.Count) + { + // Select random item and get work delegate. + int idx = this.randomGenerator.Next(activeItemsWithWork.Count); + ITransferController transferController = activeItemsWithWork[idx].Key; + + DoControllerWork(transferController); + + return true; + } + + return false; + } + + private async void DoControllerWork(ITransferController controller) + { + this.scheduleSemaphore.Wait(); + bool finished = await controller.DoWorkAsync(); + this.scheduleSemaphore.Release(); + + if (finished) + { + this.FinishedWorkItem(controller); + } + } + } +} diff --git a/lib/TransferStatusHelpers/Attributes.cs b/lib/TransferStatusHelpers/Attributes.cs new file mode 100644 index 00000000..29c6b0e4 --- /dev/null +++ b/lib/TransferStatusHelpers/Attributes.cs @@ -0,0 +1,58 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Text; + using System.Threading.Tasks; + + internal class Attributes + { + /// + /// Gets or sets the cache-control value stored for blob/azure file. + /// + public string CacheControl { get; set; } + + /// + /// Gets or sets the content-disposition value stored for blob/azure file. + /// + public string ContentDisposition { get; set; } + + /// + /// Gets or sets the content-encoding value stored for blob/azure file. + /// + public string ContentEncoding { get; set; } + + /// + /// Gets or sets the content-language value stored for blob/azure file. + /// + public string ContentLanguage { get; set; } + + /// + /// Gets or sets the content-MD5 value stored for blob/azure file. + /// + public string ContentMD5 { get; set; } + + /// + /// Gets or sets the content-type value stored for blob/azure file. + /// + public string ContentType { get; set; } + + /// + /// Gets or sets the user-defined metadata for blob/azure file. + /// + public IDictionary Metadata { get; set; } + + /// + /// Gets or sets a value to indicate whether to overwrite all attribute on destination, + /// or keep its original value if it's not set. + /// + public bool OverWriteAll { get; set; } + } +} diff --git a/lib/TransferStatusHelpers/ReadDataState.cs b/lib/TransferStatusHelpers/ReadDataState.cs new file mode 100644 index 00000000..b384cce3 --- /dev/null +++ b/lib/TransferStatusHelpers/ReadDataState.cs @@ -0,0 +1,61 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System.IO; + + /// + /// Keep the state of reading a single block from the input stream. + /// + internal class ReadDataState : TransferDataState + { + /// + /// Gets or sets the memory stream used to encapsulate the memory + /// buffer for passing the methods such as PutBlock, WritePages, + /// DownloadToStream and DownloadRangeToStream, as these methods + /// requires a stream and doesn't allow for a byte array as input. + /// + public MemoryStream MemoryStream + { + get; + set; + } + + /// + /// Gets or sets the memory manager that controls global memory + /// allocation. + /// + public MemoryManager MemoryManager + { + get; + set; + } + + /// + /// Private dispose method to release managed/unmanaged objects. + /// If disposing = true clean up managed resources as well as unmanaged resources. + /// If disposing = false only clean up unmanaged resources. + /// + /// Indicates whether or not to dispose managed resources. + protected override void Dispose(bool disposing) + { + if (disposing) + { + if (null != this.MemoryStream) + { + this.MemoryStream.Dispose(); + this.MemoryStream = null; + } + + if (null != this.MemoryBuffer) + { + this.MemoryManager.ReleaseBuffer(this.MemoryBuffer); + this.MemoryManager = null; + } + } + } + } +} diff --git a/lib/TransferStatusHelpers/SharedTransferData.cs b/lib/TransferStatusHelpers/SharedTransferData.cs new file mode 100644 index 00000000..2c3b4eba --- /dev/null +++ b/lib/TransferStatusHelpers/SharedTransferData.cs @@ -0,0 +1,45 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Concurrent; + + internal class SharedTransferData + { + /// + /// Gets or sets length of source. + /// + public long TotalLength { get; set; } + + /// + /// Gets or sets the job instance representing the transfer. + /// + public TransferJob TransferJob { get; set; } + + /// + /// Gest or sets list of available transfer data from source. + /// + public ConcurrentDictionary AvailableData { get; set; } + + /// + /// Gets or sets a value indicating whether should disable validation of content md5. + /// The reader should get this value from source's RequestOptions, + /// the writer should do or not do validation on content md5 according to this value. + /// + public bool DisableContentMD5Validation { get; set; } + + /// + /// Gets or sets string which representing source location. + /// + public string SourceLocation { get; set; } + + /// + /// Gets or sets attribute for blob/azure file. + /// + public Attributes Attributes { get; set; } + } +} diff --git a/lib/TransferStatusHelpers/TransferData.cs b/lib/TransferStatusHelpers/TransferData.cs new file mode 100644 index 00000000..d9102356 --- /dev/null +++ b/lib/TransferStatusHelpers/TransferData.cs @@ -0,0 +1,44 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System.IO; + + internal class TransferData : TransferDataState + { + private MemoryManager memoryManager; + + public TransferData(MemoryManager memoryManager) + { + this.memoryManager = memoryManager; + } + + public Stream Stream + { + get; + set; + } + + protected override void Dispose(bool disposing) + { + if (disposing) + { + if (null != this.Stream) + { + this.Stream.Close(); + this.Stream = null; + } + + if (null != this.MemoryBuffer) + { + this.memoryManager.ReleaseBuffer(this.MemoryBuffer); + this.MemoryBuffer = null; + } + } + } + } +} diff --git a/lib/TransferStatusHelpers/TransferDataState.cs b/lib/TransferStatusHelpers/TransferDataState.cs new file mode 100644 index 00000000..39a45291 --- /dev/null +++ b/lib/TransferStatusHelpers/TransferDataState.cs @@ -0,0 +1,69 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + + /// + /// Calculate and show transfer speed. + /// + internal abstract class TransferDataState : IDisposable + { + /// + /// Gets or sets a handle to the memory buffer to ensure the + /// memory buffer remains in memory during the entire operation. + /// + public byte[] MemoryBuffer + { + get; + set; + } + + /// + /// Gets or sets the starting offset of this part of data. + /// + public long StartOffset + { + get; + set; + } + + /// + /// Gets or sets the length of this part of data. + /// + public int Length + { + get; + set; + } + + /// + /// Gets or sets how many bytes have been read. + /// + public int BytesRead + { + get; + set; + } + + /// + /// Public dispose method to release all resources owned. + /// + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + /// + /// Private dispose method to release managed/unmanaged objects. + /// If disposing = true clean up managed resources as well as unmanaged resources. + /// If disposing = false only clean up unmanaged resources. + /// + /// Indicates whether or not to dispose managed resources. + protected abstract void Dispose(bool disposing); + } +} diff --git a/lib/TransferStatusHelpers/TransferDownloadBuffer.cs b/lib/TransferStatusHelpers/TransferDownloadBuffer.cs new file mode 100644 index 00000000..0da5a318 --- /dev/null +++ b/lib/TransferStatusHelpers/TransferDownloadBuffer.cs @@ -0,0 +1,67 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System.Threading; + + class TransferDownloadBuffer + { + private int finishedLength = 0; + + private int processed = 0; + + public TransferDownloadBuffer(long startOffset, int expectedLength, byte[] buffer) + { + this.Length = expectedLength; + this.StartOffset = startOffset; + this.MemoryBuffer = buffer; + } + + public int Length + { + get; + private set; + } + + public long StartOffset + { + get; + private set; + } + + public byte[] MemoryBuffer + { + get; + private set; + } + + public bool Finished + { + get + { + return this.finishedLength == this.Length; + } + } + + /// + /// Mark this buffer as processed. The return value indicates whether the buffer + /// is marked as processed by invocation of this method. This method returns true + /// exactly once. The caller is supposed to invoke this method before processing + /// the buffer and proceed only if this method returns true. + /// + /// Whether this instance is marked as processed by invocation of this method. + public bool MarkAsProcessed() + { + return 0 == Interlocked.CompareExchange(ref this.processed, 1, 0); + } + + public void ReadFinish(int length) + { + Interlocked.Add(ref this.finishedLength, length); + } + } +} diff --git a/lib/TransferStatusHelpers/TransferDownloadStream.cs b/lib/TransferStatusHelpers/TransferDownloadStream.cs new file mode 100644 index 00000000..23b8cce0 --- /dev/null +++ b/lib/TransferStatusHelpers/TransferDownloadStream.cs @@ -0,0 +1,262 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.IO; + + class TransferDownloadStream : Stream + { + TransferDownloadBuffer firstBuffer; + MemoryStream firstStream; + int firstOffset; + + TransferDownloadBuffer secondBuffer; + MemoryStream secondStream; + int secondOffset; + + bool onSecondStream = false; + + MemoryManager memoryManager; + + public TransferDownloadStream(MemoryManager memoryManager, TransferDownloadBuffer buffer, int offset, int count) + :this(memoryManager, buffer, offset, count, null, 0, 0) + { + } + + public TransferDownloadStream( + MemoryManager memoryManager, + TransferDownloadBuffer firstBuffer, + int firstOffset, + int firstCount, + TransferDownloadBuffer secondBuffer, + int secondOffset, + int secondCount) + { + this.memoryManager = memoryManager; + this.firstBuffer = firstBuffer; + this.firstOffset = firstOffset; + this.firstStream = new MemoryStream(this.firstBuffer.MemoryBuffer, firstOffset, firstCount); + + if (null != secondBuffer) + { + this.secondBuffer = secondBuffer; + this.secondOffset = secondOffset; + this.secondStream = new MemoryStream(this.secondBuffer.MemoryBuffer, secondOffset, secondCount); + } + } + + public override bool CanRead + { + get + { + return false; + } + } + + public override bool CanWrite + { + get + { + return true; + } + } + + public override bool CanSeek + { + get + { + return true; + } + } + + public bool ReserveBuffer + { + get; + set; + } + + public override long Length + { + get + { + if (null == this.secondStream) + { + return this.firstStream.Length; + } + + return this.firstStream.Length + this.secondStream.Length; + } + } + + public override long Position + { + get + { + if (!this.onSecondStream) + { + return this.firstStream.Position; + } + else + { + Debug.Assert(null != this.secondStream, "Second stream should exist when position is on the second stream"); + return this.firstStream.Length + this.secondStream.Position; + } + } + + set + { + long position = value; + + if (position < this.firstStream.Length) + { + this.onSecondStream = false; + this.firstStream.Position = position; + } + else + { + position -= this.firstStream.Length; + this.onSecondStream = true; + this.secondStream.Position = position; + } + } + } + + public override long Seek(long offset, SeekOrigin origin) + { + long position = 0; + + switch (origin) + { + case SeekOrigin.End: + position = this.Length + offset; + break; + case SeekOrigin.Current: + position = this.Position + offset; + break; + default: + position = offset; + break; + } + + this.Position = position; + return position; + } + + public override void SetLength(long value) + { + throw new NotSupportedException(); + } + + public override void Flush() + { + // do nothing + } + + public override int Read(byte[] buffer, int offset, int count) + { + throw new NotSupportedException(); + } + + public override void Write(byte[] buffer, int offset, int count) + { + int length = count; + int firstLength = 0; + if (!this.onSecondStream) + { + firstLength = Math.Min(length, (int)(this.firstStream.Length - this.firstStream.Position)); + this.firstStream.Write(buffer, offset, firstLength); + length -= firstLength; + if (0 == length) + { + return; + } + else + { + if (null == this.secondStream) + { + throw new NotSupportedException(Resources.StreamNotExpandable); + } + + this.onSecondStream = true; + } + } + + Debug.Assert(null != this.secondStream, "Position is on the second stream, it should not be null"); + + this.secondStream.Write(buffer, offset + firstLength, length); + } + + public void SetAllZero() + { + Array.Clear(this.firstBuffer.MemoryBuffer, this.firstOffset, (int)this.firstStream.Length); + + if (null != this.secondBuffer) + { + Array.Clear(this.secondBuffer.MemoryBuffer, this.secondOffset, (int)this.secondStream.Length); + } + } + + public void FinishWrite() + { + this.firstBuffer.ReadFinish((int)this.firstStream.Length); + + if (null != this.secondBuffer) + { + this.secondBuffer.ReadFinish((int)this.secondStream.Length); + } + } + + public IEnumerable GetBuffers() + { + yield return this.firstBuffer; + + if (null != this.secondBuffer) + { + yield return this.secondBuffer; + } + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + + if (null != this.firstStream) + { + this.firstStream.Dispose(); + this.firstStream = null; + } + + if (null != this.secondStream) + { + this.secondStream.Dispose(); + this.secondStream = null; + } + + if (!this.ReserveBuffer) + { + if (null != this.firstBuffer) + { + this.memoryManager.ReleaseBuffer(this.firstBuffer.MemoryBuffer); + this.firstBuffer = null; + } + + if (null != this.secondBuffer) + { + this.memoryManager.ReleaseBuffer(this.secondBuffer.MemoryBuffer); + this.secondBuffer = null; + } + } + } + } + } +} diff --git a/lib/TransferStatusHelpers/TransferProgressTracker.cs b/lib/TransferStatusHelpers/TransferProgressTracker.cs new file mode 100644 index 00000000..817204d0 --- /dev/null +++ b/lib/TransferStatusHelpers/TransferProgressTracker.cs @@ -0,0 +1,277 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Runtime.Serialization; + using System.Threading; + + /// + /// Calculate transfer progress. + /// + [Serializable] + internal class TransferProgressTracker : ISerializable + { + private const string BytesTransferredName = "BytesTransferred"; + private const string FilesTransferredName = "FilesTransferred"; + private const string FilesSkippedName = "FilesSkipped"; + private const string FilesFailedName = "FilesFailed"; + + /// + /// Stores the number of bytes that have been transferred. + /// + private long bytesTransferred; + + /// + /// Stores the number of files that have been transferred. + /// + private long numberOfFilesTransferred; + + /// + /// Stores the number of files that are failed to be transferred. + /// + private long numberOfFilesSkipped; + + /// + /// Stores the number of files that are skipped. + /// + private long numberOfFilesFailed; + + /// + /// A flag indicating whether the progress handler is being invoked + /// + private int invokingProgressHandler; + + /// + /// Initializes a new instance of the class. + /// + public TransferProgressTracker() + { + this.bytesTransferred = 0; + this.numberOfFilesTransferred = 0; + this.numberOfFilesSkipped = 0; + this.numberOfFilesFailed = 0; + } + + /// + /// Initializes a new instance of the class. + /// + /// Serialization information. + /// Streaming context. + protected TransferProgressTracker(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new System.ArgumentNullException("info"); + } + + this.bytesTransferred = info.GetInt64(BytesTransferredName); + this.numberOfFilesTransferred = info.GetInt64(FilesTransferredName); + this.numberOfFilesSkipped = info.GetInt64(FilesSkippedName); + this.numberOfFilesFailed = info.GetInt64(FilesFailedName); + } + + /// + /// Initializes a new instance of the class. + /// + private TransferProgressTracker(TransferProgressTracker other) + { + this.bytesTransferred = other.bytesTransferred; + this.numberOfFilesTransferred = other.numberOfFilesTransferred; + this.numberOfFilesSkipped = other.numberOfFilesSkipped; + this.numberOfFilesFailed = other.numberOfFilesFailed; + } + + /// + /// Gets or sets the parent progress tracker + /// + public TransferProgressTracker Parent + { + get; + set; + } + + /// + /// Gets or sets the progress handler + /// + public IProgress ProgressHandler + { + get; + set; + } + + /// + /// Gets the number of bytes that have been transferred. + /// + public long BytesTransferred + { + get + { + return Interlocked.Read(ref this.bytesTransferred); + } + } + + /// + /// Gets the number of files that have been transferred. + /// + public long NumberOfFilesTransferred + { + get + { + return Interlocked.Read(ref this.numberOfFilesTransferred); + } + + } + + /// + /// Gets the number of files that are skipped to be transferred. + /// + public long NumberOfFilesSkipped + { + get + { + return Interlocked.Read(ref this.numberOfFilesSkipped); + } + } + + /// + /// Gets the number of files that are failed to be transferred. + /// + public long NumberOfFilesFailed + { + get + { + return Interlocked.Read(ref this.numberOfFilesFailed); + } + } + + /// + /// Updates the current status by indicating the bytes transferred. + /// + /// Indicating by how much the bytes transferred increased. + public void AddBytesTransferred(long bytesToIncrease) + { + if (bytesToIncrease != 0) + { + Interlocked.Add(ref this.bytesTransferred, bytesToIncrease); + + if (this.Parent != null) + { + this.Parent.AddBytesTransferred(bytesToIncrease); + } + } + + this.InvokeProgressHandler(); + } + + /// + /// Updates the number of files that have been transferred. + /// + /// Indicating by how much the number of file that have been transferred increased. + public void AddNumberOfFilesTransferred(long numberOfFilesToIncrease) + { + if (numberOfFilesToIncrease != 0) + { + Interlocked.Add(ref this.numberOfFilesTransferred, numberOfFilesToIncrease); + + if (this.Parent != null) + { + this.Parent.AddNumberOfFilesTransferred(numberOfFilesToIncrease); + } + } + + this.InvokeProgressHandler(); + } + + /// + /// Updates the number of files that are skipped. + /// + /// Indicating by how much the number of file that are skipped increased. + public void AddNumberOfFilesSkipped(long numberOfFilesToIncrease) + { + if (numberOfFilesToIncrease != 0) + { + Interlocked.Add(ref this.numberOfFilesSkipped, numberOfFilesToIncrease); + + if (this.Parent != null) + { + this.Parent.AddNumberOfFilesSkipped(numberOfFilesToIncrease); + } + } + + this.InvokeProgressHandler(); + } + + /// + /// Updates the number of files that are failed to be transferred. + /// + /// Indicating by how much the number of file that are failed to be transferred increased. + public void AddNumberOfFilesFailed(long numberOfFilesToIncrease) + { + if (numberOfFilesToIncrease != 0) + { + Interlocked.Add(ref this.numberOfFilesFailed, numberOfFilesToIncrease); + + if (this.Parent != null) + { + this.Parent.AddNumberOfFilesFailed(numberOfFilesToIncrease); + } + } + + this.InvokeProgressHandler(); + } + + /// + /// Gets a copy of this transfer progress tracker object. + /// + /// A copy of current TransferProgressTracker object + public TransferProgressTracker Copy() + { + return new TransferProgressTracker(this); + } + + /// + /// Serializes transfer progress. + /// + /// Serialization info object. + /// Streaming context. + public virtual void GetObjectData(SerializationInfo info, StreamingContext context) + { + if (info == null) + { + throw new ArgumentNullException("info"); + } + + info.AddValue(BytesTransferredName, this.BytesTransferred); + info.AddValue(FilesTransferredName, this.NumberOfFilesTransferred); + info.AddValue(FilesSkippedName, this.NumberOfFilesSkipped); + info.AddValue(FilesFailedName, this.NumberOfFilesFailed); + } + + private void InvokeProgressHandler() + { + if (this.ProgressHandler != null) + { + if ( 0 == Interlocked.CompareExchange(ref this.invokingProgressHandler, 1, 0)) + { + lock (this.ProgressHandler) + { + Interlocked.Exchange(ref this.invokingProgressHandler, 0); + + this.ProgressHandler.Report( + new TransferProgress() + { + BytesTransferred = this.BytesTransferred, + NumberOfFilesTransferred = this.NumberOfFilesTransferred, + NumberOfFilesSkipped = this.NumberOfFilesSkipped, + NumberOfFilesFailed = this.NumberOfFilesFailed, + }); + } + } + } + } + } +} diff --git a/lib/Transfer_RequestOptions.cs b/lib/Transfer_RequestOptions.cs new file mode 100644 index 00000000..1a203cc5 --- /dev/null +++ b/lib/Transfer_RequestOptions.cs @@ -0,0 +1,305 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Diagnostics; + using System.Net; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using Microsoft.WindowsAzure.Storage.RetryPolicies; + using Microsoft.WindowsAzure.Storage.Table; + + /// + /// Defines default RequestOptions for every type of transfer job. + /// + internal static class Transfer_RequestOptions + { + /// + /// Stores the default client retry count in x-ms error. + /// + private const int DefaultRetryCountXMsError = 10; + + /// + /// Stores the default client retry count in non x-ms error. + /// + private const int DefaultRetryCountOtherError = 3; + + /// + /// Stores the default maximum execution time across all potential retries. + /// + private static readonly TimeSpan DefaultMaximumExecutionTime = + TimeSpan.FromSeconds(900); + + /// + /// Stores the default server timeout. + /// + private static readonly TimeSpan DefaultServerTimeout = + TimeSpan.FromSeconds(300); + + /// + /// Stores the default back-off. + /// Increases exponentially used with ExponentialRetry: 3, 9, 21, 45, 93, 120, 120, 120, ... + /// + private static TimeSpan retryPoliciesDefaultBackoff = + TimeSpan.FromSeconds(3.0); + + /// + /// Gets the default . + /// + /// The default + public static BlobRequestOptions DefaultBlobRequestOptions + { + get + { + IRetryPolicy defaultRetryPolicy = new TransferRetryPolicy( + retryPoliciesDefaultBackoff, + DefaultRetryCountXMsError, + DefaultRetryCountOtherError); + + return new BlobRequestOptions() + { + MaximumExecutionTime = DefaultMaximumExecutionTime, + RetryPolicy = defaultRetryPolicy, + ServerTimeout = DefaultServerTimeout, + UseTransactionalMD5 = true + }; + } + } + + /// + /// Gets the default . + /// + /// The default + public static FileRequestOptions DefaultFileRequestOptions + { + get + { + IRetryPolicy defaultRetryPolicy = new TransferRetryPolicy( + retryPoliciesDefaultBackoff, + DefaultRetryCountXMsError, + DefaultRetryCountOtherError); + + return new FileRequestOptions() + { + MaximumExecutionTime = DefaultMaximumExecutionTime, + RetryPolicy = defaultRetryPolicy, + ServerTimeout = DefaultServerTimeout, + UseTransactionalMD5 = true + }; + } + } + + /// + /// Gets the default . + /// + /// The default + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode", Justification = "It will be called in TableDataMovement project.")] + public static TableRequestOptions DefaultTableRequestOptions + { + get + { + IRetryPolicy defaultRetryPolicy = new TransferRetryPolicy( + retryPoliciesDefaultBackoff, + DefaultRetryCountXMsError, + DefaultRetryCountOtherError); + + return new TableRequestOptions + { + MaximumExecutionTime = DefaultMaximumExecutionTime, + RetryPolicy = defaultRetryPolicy, + ServerTimeout = DefaultServerTimeout, + PayloadFormat = TablePayloadFormat.Json + }; + } + } + + /// + /// Define retry policy used in blob transfer. + /// + private class TransferRetryPolicy : IExtendedRetryPolicy + { + /// + /// Prefix of Azure Storage response keys. + /// + private const string XMsPrefix = "x-ms"; + + /// + /// Max retry count in non x-ms error. + /// + private int maxAttemptsOtherError; + + /// + /// ExponentialRetry retry policy object. + /// + private ExponentialRetry retryPolicy; + + /// + /// Indicate whether has met x-ms once or more. + /// + private bool gotXMsError = false; + + /// + /// Initializes a new instance of the class. + /// + /// Back-off in ExponentialRetry retry policy. + /// Max retry count when meets x-ms error. + /// Max retry count when meets non x-ms error. + public TransferRetryPolicy(TimeSpan deltaBackoff, int maxAttemptsXMsError, int maxAttemptsOtherError) + { + Debug.Assert( + maxAttemptsXMsError >= maxAttemptsOtherError, + "We should retry more times when meets x-ms errors than the other errors."); + + this.retryPolicy = new ExponentialRetry(deltaBackoff, maxAttemptsXMsError); + this.maxAttemptsOtherError = maxAttemptsOtherError; + } + + /// + /// Initializes a new instance of the class. + /// + /// ExponentialRetry object. + /// Max retry count when meets non x-ms error. + private TransferRetryPolicy(ExponentialRetry retryPolicy, int maxAttemptsInOtherError) + { + this.retryPolicy = retryPolicy; + this.maxAttemptsOtherError = maxAttemptsInOtherError; + } + + /// + /// Generates a new retry policy for the current request attempt. + /// + /// An IRetryPolicy object that represents the retry policy for the current request attempt. + public IRetryPolicy CreateInstance() + { + return new TransferRetryPolicy( + this.retryPolicy.CreateInstance() as ExponentialRetry, + this.maxAttemptsOtherError); + } + + /// + /// Determines whether the operation should be retried and the interval until the next retry. + /// + /// + /// A RetryContext object that indicates the number of retries, the results of the last request, + /// and whether the next retry should happen in the primary or secondary location, and specifies the location mode. + /// An OperationContext object for tracking the current operation. + /// + /// A RetryInfo object that indicates the location mode, + /// and whether the next retry should happen in the primary or secondary location. + /// If null, the operation will not be retried. + public RetryInfo Evaluate(RetryContext retryContext, OperationContext operationContext) + { + if (null == retryContext) + { + throw new ArgumentNullException("retryContext"); + } + + if (null == operationContext) + { + throw new ArgumentNullException("operationContext"); + } + + RetryInfo retryInfo = this.retryPolicy.Evaluate(retryContext, operationContext); + + if (null != retryInfo) + { + if (this.ShouldRetry(retryContext.CurrentRetryCount, retryContext.LastRequestResult.Exception)) + { + return retryInfo; + } + } + + return null; + } + + /// + /// Determines if the operation should be retried and how long to wait until the next retry. + /// + /// The number of retries for the given operation. + /// The status code for the last operation. + /// An Exception object that represents the last exception encountered. + /// The interval to wait until the next retry. + /// An OperationContext object for tracking the current operation. + /// True if the operation should be retried; otherwise, false. + public bool ShouldRetry( + int currentRetryCount, + int statusCode, + Exception lastException, + out TimeSpan retryInterval, + OperationContext operationContext) + { + if (!this.retryPolicy.ShouldRetry(currentRetryCount, statusCode, lastException, out retryInterval, operationContext)) + { + return false; + } + + return this.ShouldRetry(currentRetryCount, lastException); + } + + /// + /// Determines if the operation should be retried. + /// This function uses http header to determine whether the error is returned from Windows Azure. + /// If it's from Windows Azure (with x-ms in header), the request will retry 10 times at most. + /// Otherwise, the request will retry 3 times at most. + /// + /// The number of retries for the given operation. + /// An Exception object that represents the last exception encountered. + /// True if the operation should be retried; otherwise, false. + private bool ShouldRetry( + int currentRetryCount, + Exception lastException) + { + if (this.gotXMsError) + { + return true; + } + + StorageException storageException = lastException as StorageException; + + if (null != storageException) + { + WebException webException = storageException.InnerException as WebException; + + if (null != webException) + { + if (WebExceptionStatus.ConnectionClosed == webException.Status) + { + return true; + } + + HttpWebResponse response = webException.Response as HttpWebResponse; + + if (null != response) + { + if (null != response.Headers) + { + if (null != response.Headers.AllKeys) + { + for (int i = 0; i < response.Headers.AllKeys.Length; ++i) + { + if (response.Headers.AllKeys[i].StartsWith(XMsPrefix, StringComparison.OrdinalIgnoreCase)) + { + this.gotXMsError = true; + return true; + } + } + } + } + } + } + } + + if (currentRetryCount < this.maxAttemptsOtherError) + { + return true; + } + + return false; + } + } + } +} diff --git a/lib/Utils.cs b/lib/Utils.cs new file mode 100644 index 00000000..0b8e9f64 --- /dev/null +++ b/lib/Utils.cs @@ -0,0 +1,393 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace Microsoft.WindowsAzure.Storage.DataMovement +{ + using System; + using System.Collections.Generic; + using System.Globalization; + using System.Threading; + using Microsoft.WindowsAzure.Storage.Auth; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + + /// + /// Class for various utils. + /// + internal static class Utils + { + private const int RequireBufferMaxRetryCount = 10; + + /// + /// Define the various possible size postfixes. + /// + private static readonly string[] SizeFormats = + { + Resources.ReadableSizeFormatBytes, + Resources.ReadableSizeFormatKiloBytes, + Resources.ReadableSizeFormatMegaBytes, + Resources.ReadableSizeFormatGigaBytes, + Resources.ReadableSizeFormatTeraBytes, + Resources.ReadableSizeFormatPetaBytes, + Resources.ReadableSizeFormatExaBytes + }; + + /// + /// Translate a size in bytes to human readable form. + /// + /// Size in bytes. + /// Human readable form string. + public static string BytesToHumanReadableSize(double size) + { + int order = 0; + + while (size >= 1024 && order + 1 < SizeFormats.Length) + { + ++order; + size /= 1024; + } + + return string.Format(CultureInfo.CurrentCulture, SizeFormats[order], size); + } + + public static void CheckCancellation(CancellationTokenSource cancellationTokenSource) + { + if (cancellationTokenSource.IsCancellationRequested) + { + throw new OperationCanceledException(Resources.BlobTransferCancelledException); + } + } + + /// + /// Generate an AccessCondition instance of IfMatchETag with customer condition. + /// For download/copy, if it succeeded at the first operation to fetching attribute with customer condition, + /// it means that the blob totally meet the condition. + /// Here, only need to keep LeaseId in the customer condition for the following operations. + /// + /// ETag string. + /// Condition customer input in TransferLocation. + /// To specify whether have already verified the custom access condition against the blob. + /// AccessCondition instance of IfMatchETag with customer condition's LeaseId. + public static AccessCondition GenerateIfMatchConditionWithCustomerCondition( + string etag, + AccessCondition customCondition, + bool checkedCustomAC = true) + { + if (!checkedCustomAC) + { + return customCondition; + } + + AccessCondition accessCondition = AccessCondition.GenerateIfMatchCondition(etag); + + if (null != customCondition) + { + accessCondition.LeaseId = customCondition.LeaseId; + } + + return accessCondition; + } + + public static bool DictionaryEquals( + this IDictionary firstDic, IDictionary secondDic) + { + if (firstDic == secondDic) + { + return true; + } + + if (firstDic == null || secondDic == null) + { + return false; + } + + if (firstDic.Count != secondDic.Count) + { + return false; + } + + foreach (var pair in firstDic) + { + string secondValue; + if (!secondDic.TryGetValue(pair.Key, out secondValue)) + { + return false; + } + + if (!string.Equals(pair.Value, secondValue, StringComparison.Ordinal)) + { + return false; + } + } + + return true; + } + + public static Attributes GenerateAttributes(CloudBlob blob) + { + return new Attributes() + { + CacheControl = blob.Properties.CacheControl, + ContentDisposition = blob.Properties.ContentDisposition, + ContentEncoding = blob.Properties.ContentEncoding, + ContentLanguage = blob.Properties.ContentLanguage, + ContentMD5 = blob.Properties.ContentMD5, + ContentType = blob.Properties.ContentType, + Metadata = blob.Metadata, + OverWriteAll = true + }; + } + + public static Attributes GenerateAttributes(CloudFile file) + { + return new Attributes() + { + CacheControl = file.Properties.CacheControl, + ContentDisposition = file.Properties.ContentDisposition, + ContentEncoding = file.Properties.ContentEncoding, + ContentLanguage = file.Properties.ContentLanguage, + ContentMD5 = file.Properties.ContentMD5, + ContentType = file.Properties.ContentType, + Metadata = file.Metadata, + OverWriteAll = true + }; + } + + public static void SetAttributes(CloudBlob blob, Attributes attributes) + { + if (attributes.OverWriteAll) + { + blob.Properties.CacheControl = attributes.CacheControl; + blob.Properties.ContentDisposition = attributes.ContentDisposition; + blob.Properties.ContentEncoding = attributes.ContentEncoding; + blob.Properties.ContentLanguage = attributes.ContentLanguage; + blob.Properties.ContentMD5 = attributes.ContentMD5; + blob.Properties.ContentType = attributes.ContentType; + + blob.Metadata.Clear(); + + foreach (var metadataPair in attributes.Metadata) + { + blob.Metadata.Add(metadataPair); + } + } + else + { + blob.Properties.ContentMD5 = attributes.ContentMD5; + if (null != attributes.ContentType) + { + blob.Properties.ContentType = attributes.ContentType; + } + } + } + + public static void SetAttributes(CloudFile file, Attributes attributes) + { + if (attributes.OverWriteAll) + { + file.Properties.CacheControl = attributes.CacheControl; + file.Properties.ContentDisposition = attributes.ContentDisposition; + file.Properties.ContentEncoding = attributes.ContentEncoding; + file.Properties.ContentLanguage = attributes.ContentLanguage; + file.Properties.ContentMD5 = attributes.ContentMD5; + file.Properties.ContentType = attributes.ContentType; + + file.Metadata.Clear(); + + foreach (var metadataPair in attributes.Metadata) + { + file.Metadata.Add(metadataPair); + } + } + else + { + file.Properties.ContentMD5 = attributes.ContentMD5; + + if (null != attributes.ContentType) + { + file.Properties.ContentType = attributes.ContentType; + } + } + } + + /// + /// Generate an AccessCondition instance with lease id customer condition. + /// For upload/copy, if it succeeded at the first operation to fetching destination attribute with customer condition, + /// it means that the blob totally meet the condition. + /// Here, only need to keep LeaseId in the customer condition for the following operations. + /// + /// Condition customer input in TransferLocation. + /// To specify whether have already verified the custom access condition against the blob. + /// AccessCondition instance with customer condition's LeaseId. + public static AccessCondition GenerateConditionWithCustomerCondition( + AccessCondition customCondition, + bool checkedCustomAC = true) + { + if (!checkedCustomAC) + { + return customCondition; + } + + if ((null != customCondition) + && !string.IsNullOrEmpty(customCondition.LeaseId)) + { + return AccessCondition.GenerateLeaseCondition(customCondition.LeaseId); + } + + return null; + } + + /// + /// Generate a BlobRequestOptions with custom BlobRequestOptions. + /// We have default MaximumExecutionTime, ServerTimeout and RetryPolicy. + /// If user doesn't set these properties, we should use the default ones. + /// Others, we should the custom ones. + /// + /// BlobRequestOptions customer input in TransferLocation. + /// BlobRequestOptions instance with custom BlobRequestOptions properties. + public static BlobRequestOptions GenerateBlobRequestOptions( + BlobRequestOptions customRequestOptions) + { + if (null == customRequestOptions) + { + return Transfer_RequestOptions.DefaultBlobRequestOptions; + } + else + { + BlobRequestOptions requestOptions = Transfer_RequestOptions.DefaultBlobRequestOptions; + + AssignToRequestOptions(requestOptions, customRequestOptions); + + if (null != customRequestOptions.UseTransactionalMD5) + { + requestOptions.UseTransactionalMD5 = customRequestOptions.UseTransactionalMD5; + } + + requestOptions.DisableContentMD5Validation = customRequestOptions.DisableContentMD5Validation; + return requestOptions; + } + } + + /// + /// Generate a FileRequestOptions with custom FileRequestOptions. + /// We have default MaximumExecutionTime, ServerTimeout and RetryPolicy. + /// If user doesn't set these properties, we should use the default ones. + /// Others, we should the custom ones. + /// + /// FileRequestOptions customer input in TransferLocation. + /// FileRequestOptions instance with custom FileRequestOptions properties. + public static FileRequestOptions GenerateFileRequestOptions( + FileRequestOptions customRequestOptions) + { + if (null == customRequestOptions) + { + return Transfer_RequestOptions.DefaultFileRequestOptions; + } + else + { + FileRequestOptions requestOptions = Transfer_RequestOptions.DefaultFileRequestOptions; + + AssignToRequestOptions(requestOptions, customRequestOptions); + + if (null != customRequestOptions.UseTransactionalMD5) + { + requestOptions.UseTransactionalMD5 = customRequestOptions.UseTransactionalMD5; + } + + requestOptions.DisableContentMD5Validation = customRequestOptions.DisableContentMD5Validation; + return requestOptions; + } + } + + /// + /// Generate an OperationContext from the the specified TransferContext. + /// + /// Transfer context + /// An object. + public static OperationContext GenerateOperationContext( + TransferContext transferContext) + { + if (transferContext == null) + { + return null; + } + + return new OperationContext() + { + ClientRequestID = transferContext.ClientRequestId, + LogLevel = transferContext.LogLevel, + }; + } + + public static CloudBlob GetBlobReference(Uri blobUri, StorageCredentials credentials, BlobType blobType) + { + switch (blobType) + { + case BlobType.BlockBlob: + return new CloudBlockBlob(blobUri, credentials); + case BlobType.PageBlob: + return new CloudPageBlob(blobUri, credentials); + case BlobType.AppendBlob: + return new CloudAppendBlob(blobUri, credentials); + default: + throw new InvalidOperationException( + string.Format( + CultureInfo.CurrentCulture, + Resources.NotSupportedBlobType, + blobType)); + } + } + + public static byte[] RequireBuffer(MemoryManager memoryManager, Action checkCancellation) + { + byte[] buffer; + buffer = memoryManager.RequireBuffer(); + + if (null == buffer) + { + int retryCount = 0; + int retryInterval = 100; + while ((retryCount < RequireBufferMaxRetryCount) + && (null == buffer)) + { + checkCancellation(); + retryInterval <<= 1; + Thread.Sleep(retryInterval); + buffer = memoryManager.RequireBuffer(); + ++retryCount; + } + } + + if (null == buffer) + { + throw new TransferException( + TransferErrorCode.FailToAllocateMemory, + Resources.FailedToAllocateMemoryException); + } + + return buffer; + } + + private static void AssignToRequestOptions(IRequestOptions targetRequestOptions, IRequestOptions customRequestOptions) + { + if (null != customRequestOptions.MaximumExecutionTime) + { + targetRequestOptions.MaximumExecutionTime = customRequestOptions.MaximumExecutionTime; + } + + if (null != customRequestOptions.RetryPolicy) + { + targetRequestOptions.RetryPolicy = customRequestOptions.RetryPolicy; + } + + if (null != customRequestOptions.ServerTimeout) + { + targetRequestOptions.ServerTimeout = customRequestOptions.ServerTimeout; + } + + targetRequestOptions.LocationMode = customRequestOptions.LocationMode; + } + } +} diff --git a/lib/packages.config b/lib/packages.config new file mode 100644 index 00000000..f48ace35 --- /dev/null +++ b/lib/packages.config @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/samples/DataMovementSamples/DataMovementSamples.sln b/samples/DataMovementSamples/DataMovementSamples.sln new file mode 100644 index 00000000..bc55af7b --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples.sln @@ -0,0 +1,22 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 2013 +VisualStudioVersion = 12.0.21005.1 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DataMovementSamples", "DataMovementSamples\DataMovementSamples.csproj", "{6004824E-4A84-463E-9094-451B253470CC}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {6004824E-4A84-463E-9094-451B253470CC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6004824E-4A84-463E-9094-451B253470CC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6004824E-4A84-463E-9094-451B253470CC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6004824E-4A84-463E-9094-451B253470CC}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/samples/DataMovementSamples/DataMovementSamples/App.config b/samples/DataMovementSamples/DataMovementSamples/App.config new file mode 100644 index 00000000..9d612be5 --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples/App.config @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/samples/DataMovementSamples/DataMovementSamples/DataMovementSamples.csproj b/samples/DataMovementSamples/DataMovementSamples/DataMovementSamples.csproj new file mode 100644 index 00000000..877afa04 --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples/DataMovementSamples.csproj @@ -0,0 +1,97 @@ + + + + + Debug + AnyCPU + {6004824E-4A84-463E-9094-451B253470CC} + Exe + Properties + DataMovementSamples + DataMovementSamples + v4.5 + 512 + + + AnyCPU + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\packages\Microsoft.Data.Edm.5.6.4\lib\net40\Microsoft.Data.Edm.dll + True + + + ..\packages\Microsoft.Data.OData.5.6.4\lib\net40\Microsoft.Data.OData.dll + True + + + ..\packages\Microsoft.Data.Services.Client.5.6.4\lib\net40\Microsoft.Data.Services.Client.dll + True + + + ..\packages\Microsoft.WindowsAzure.ConfigurationManager.1.8.0.0\lib\net35-full\Microsoft.WindowsAzure.Configuration.dll + True + + + ..\packages\WindowsAzure.Storage.5.0.0\lib\net40\Microsoft.WindowsAzure.Storage.dll + True + + + ..\packages\Microsoft.Azure.Storage.DataMovement.0.0.76\lib\net45\Microsoft.WindowsAzure.Storage.DataMovement.dll + True + + + ..\packages\Newtonsoft.Json.6.0.8\lib\net45\Newtonsoft.Json.dll + True + + + + + ..\packages\System.Spatial.5.6.4\lib\net40\System.Spatial.dll + True + + + + + + + + + + + + + + + + + + + PreserveNewest + + + + + \ No newline at end of file diff --git a/samples/DataMovementSamples/DataMovementSamples/Properties/AssemblyInfo.cs b/samples/DataMovementSamples/DataMovementSamples/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..81f9e3f3 --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples/Properties/AssemblyInfo.cs @@ -0,0 +1,42 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("DataMovementSamples")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("DataMovementSamples")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("d4c65175-d41e-4b9e-a898-ff45d07ee90c")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/samples/DataMovementSamples/DataMovementSamples/Samples.cs b/samples/DataMovementSamples/DataMovementSamples/Samples.cs new file mode 100644 index 00000000..896621ab --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples/Samples.cs @@ -0,0 +1,258 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DataMovementSamples +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement; + using Microsoft.WindowsAzure.Storage.File; + + public class Samples + { + public static void Main(string[] args) + { + Console.WriteLine("Data movement upload sample."); + UploadSample().Wait(); + + Console.WriteLine(); + Console.WriteLine("Data movement copy sample."); + CopySample().Wait(); + + Console.WriteLine(); + Console.WriteLine("Data movement download sample."); + DownloadSample().Wait(); + + Console.WriteLine(); + Console.WriteLine("Cleanup generated data."); + Cleanup(); + } + + /// + /// Container name used in this sample. + /// + private const string ContainerName = "samplecontainer"; + + /// + /// Share name used in this sample. + /// + private const string ShareName = "sampleshare"; + + /// + /// Upload a local picture to azure storage. + /// 1. Upload a local picture as a block blob. + /// 2. Set its content type to "image/png". + /// + private static async Task UploadSample() + { + string sourceFileName = "azure.png"; + string destinationBlobName = "azure_blockblob.png"; + + // Create the destination CloudBlob instance + CloudBlob destinationBlob = Util.GetCloudBlob(ContainerName, destinationBlobName, BlobType.BlockBlob); + + // Use UploadOptions to set ContentType of destination CloudBlob + UploadOptions options = new UploadOptions(); + options.ContentType = "image/png"; + + // Start the upload + await TransferManager.UploadAsync(sourceFileName, destinationBlob, options, null /* context */); + Console.WriteLine("File {0} is uploaded to {1} successfully.", sourceFileName, destinationBlob.Uri.ToString()); + } + + /// + /// Copy data between Azure storage. + /// 1. Copy a CloudBlob as a CloudFile. + /// 2. Cancel the transfer before it finishes with a CancellationToken + /// 3. Store the transfer checkpoint after transfer being cancelled + /// 4. Resume the transfer with the stored checkpoint + /// + private static async Task CopySample() + { + string sourceBlobName = "azure_blockblob.png"; + string destinationFileName = "azure_cloudfile.png"; + + // Create the source CloudBlob instance + CloudBlob sourceBlob = Util.GetCloudBlob(ContainerName, sourceBlobName, BlobType.BlockBlob); + + // Create the destination CloudFile instance + CloudFile destinationFile = Util.GetCloudFile(ShareName, destinationFileName); + + // Create CancellationTokenSource used to cancel the transfer + CancellationTokenSource cancellationSource = new CancellationTokenSource(); + + TransferCheckpoint checkpoint = null; + TransferContext context = new TransferContext(); + + // Cancel the transfer after there's any progress reported + Progress progress = new Progress( + (transferProgress) => { + if (!cancellationSource.IsCancellationRequested) + { + Console.WriteLine("Cancel the transfer."); + + // Cancel the transfer + cancellationSource.Cancel(); + + // Store the transfer checkpoint + checkpoint = context.LastCheckpoint; + } + }); + + context.ProgressHandler = progress; + + // Start the transfer + try + { + await TransferManager.CopyAsync(sourceBlob, destinationFile, false /* isServiceCopy */, null /* options */, context, cancellationSource.Token); + } + catch (TaskCanceledException e) + { + Console.WriteLine("The transfer is cancelled: {0}", e.Message); + } + + // Create a new TransferContext with the store checkpoint + TransferContext resumeContext = new TransferContext(checkpoint); + + // Resume transfer from the stored checkpoint + Console.WriteLine("Resume the cancelled transfer."); + await TransferManager.CopyAsync(sourceBlob, destinationFile, false /* isServiceCopy */, null /* options */, resumeContext); + Console.WriteLine("CloudBlob {0} is copied to {1} successfully.", sourceBlob.Uri.ToString(), destinationFile.Uri.ToString()); + } + + /// + /// Download data from Azure storage. + /// 1. Download a CloudBlob to an exsiting local file + /// 2. Query the user to overwrite the local file or not in the OverwriteCallback + /// 3. Download a CloudFile to local with content MD5 validation disabled + /// 4. Show the overall progress of both transfers + /// + private static async Task DownloadSample() + { + string sourceBlobName = "azure_blockblob.png"; + string sourceFileName = "azure_cloudfile.png"; + string destinationFileName1 = "azure.png"; + string destinationFileName2 = "azure_new.png"; + + // Create the source CloudBlob instance + CloudBlob sourceBlob = Util.GetCloudBlob(ContainerName, sourceBlobName, BlobType.BlockBlob); + + // Create the source CloudFile instance + CloudFile sourceFile = Util.GetCloudFile(ShareName, sourceFileName); + + // Create a TransferContext shared by both transfers + TransferContext sharedTransferContext = new TransferContext(); + + // Show overwrite prompt in console when OverwriteCallback is triggered + sharedTransferContext.OverwriteCallback = (source, destination) => + { + Console.WriteLine("{0} already exists. Do you want to overwrite it with {1}? (Y/N)", destination, source); + + while (true) + { + ConsoleKeyInfo keyInfo = Console.ReadKey(true); + char key = keyInfo.KeyChar; + + if (key == 'y' || key == 'Y') + { + Console.WriteLine("User choose to overwrite the destination."); + return true; + } + else if (key == 'n' || key == 'N') + { + Console.WriteLine("User choose NOT to overwrite the destination."); + return false; + } + + Console.WriteLine("Please press 'y' or 'n'."); + } + }; + + // Record the overall progress + ProgressRecorder recorder = new ProgressRecorder(); + sharedTransferContext.ProgressHandler = recorder; + + // Start the blob download + Task task1 = TransferManager.DownloadAsync(sourceBlob, destinationFileName1, null /* options */, sharedTransferContext); + + // Create a DownloadOptions to disable md5 check after data is downloaded. Otherwise, data movement + // library will check the md5 checksum stored in the ContentMD5 property of the source CloudFile/CloudBlob + // You can uncomment following codes, enable ContentMD5Validation and have a try. + // sourceFile.Properties.ContentMD5 = "WrongMD5"; + // sourceFile.SetProperties(); + DownloadOptions options = new DownloadOptions(); + options.DisableContentMD5Validation = true; + + // Start the download + Task task2 = TransferManager.DownloadAsync(sourceFile, destinationFileName2, options, sharedTransferContext); + + // Wait for both transfers to finish + try + { + await task1; + } + catch(TransferException e) + { + // Data movement library will throw a TransferException when user choose to not overwrite the existing destination + Console.WriteLine(e.Message); + } + + await task2; + + // Print out the final transfer state + Console.WriteLine("Final transfer state: {0}", recorder.ToString()); + } + + /// + /// Cleanup all data generated by this sample. + /// + private static void Cleanup() + { + Console.Write("Deleting container..."); + Util.DeleteContainer(ContainerName); + Console.WriteLine("Done"); + + Console.Write("Deleting share..."); + Util.DeleteShare(ShareName); + Console.WriteLine("Done"); + + Console.Write("Deleting local file..."); + File.Delete("azure_new.png"); + Console.WriteLine("Done"); + } + + /// + /// A helper class to record progress reported by data movement library in console. + /// + class ProgressRecorder : IProgress + { + private long latestBytesTransferred; + private long latestNumberOfFilesTransferred; + private long latestNumberOfFilesSkipped; + private long latestNumberOfFilesFailed; + + public void Report(TransferProgress progress) + { + this.latestBytesTransferred = progress.BytesTransferred; + this.latestNumberOfFilesTransferred = progress.NumberOfFilesTransferred; + this.latestNumberOfFilesSkipped = progress.NumberOfFilesSkipped; + this.latestNumberOfFilesFailed = progress.NumberOfFilesFailed; + } + + public override string ToString() + { + return string.Format("Transferred bytes: {0}; Transfered: {1}; Skipped: {2}, Failed: {3}", + this.latestBytesTransferred, + this.latestNumberOfFilesTransferred, + this.latestNumberOfFilesSkipped, + this.latestNumberOfFilesFailed); + } + } + } +} diff --git a/samples/DataMovementSamples/DataMovementSamples/Util.cs b/samples/DataMovementSamples/DataMovementSamples/Util.cs new file mode 100644 index 00000000..728e14b7 --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples/Util.cs @@ -0,0 +1,131 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DataMovementSamples +{ + using System; + using Microsoft.WindowsAzure; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + + /// + /// A helper class provides convenient operations against storage account configured in the App.config. + /// + public class Util + { + private static CloudStorageAccount storageAccount; + private static CloudBlobClient blobClient; + private static CloudFileClient fileClient; + + /// + /// Get a CloudBlob instance with the specified name and type in the given container. + /// + /// Container name. + /// Blob name. + /// Type of blob. + /// A CloudBlob instance with the specified name and type in the given container. + public static CloudBlob GetCloudBlob(string containerName, string blobName, BlobType blobType) + { + CloudBlobClient client = GetCloudBlobClient(); + CloudBlobContainer container = client.GetContainerReference(containerName); + container.CreateIfNotExists(); + + CloudBlob cloudBlob; + switch (blobType) + { + case BlobType.AppendBlob: + cloudBlob = container.GetAppendBlobReference(blobName); + break; + case BlobType.BlockBlob: + cloudBlob = container.GetBlockBlobReference(blobName); + break; + case BlobType.PageBlob: + cloudBlob = container.GetPageBlobReference(blobName); + break; + case BlobType.Unspecified: + default: + throw new ArgumentException(string.Format("Invalid blob type {0}", blobType.ToString()), "blobType"); + } + + return cloudBlob; + } + + /// + /// Get a CloudFile instance with the specified name in the given share. + /// + /// Share name. + /// File name. + /// A CloudFile instance with the specified name in the given share. + public static CloudFile GetCloudFile(string shareName, string fileName) + { + CloudFileClient client = GetCloudFileClient(); + CloudFileShare share = client.GetShareReference(shareName); + share.CreateIfNotExists(); + + CloudFileDirectory rootDirectory = share.GetRootDirectoryReference(); + return rootDirectory.GetFileReference(fileName); + } + + /// + /// Delete the share with the specified name if it exists. + /// + /// Name of share to delete. + public static void DeleteShare(string shareName) + { + CloudFileClient client = GetCloudFileClient(); + CloudFileShare share = client.GetShareReference(shareName); + share.DeleteIfExists(); + } + + /// + /// Delete the container with the specified name if it exists. + /// + /// Name of container to delete. + public static void DeleteContainer(string containerName) + { + CloudBlobClient client = GetCloudBlobClient(); + CloudBlobContainer container = client.GetContainerReference(containerName); + container.DeleteIfExists(); + } + + private static CloudBlobClient GetCloudBlobClient() + { + if (Util.blobClient == null) + { + Util.blobClient = GetStorageAccount().CreateCloudBlobClient(); + } + + return Util.blobClient; + } + + private static CloudFileClient GetCloudFileClient() + { + if (Util.fileClient == null) + { + Util.fileClient = GetStorageAccount().CreateCloudFileClient(); + } + + return Util.fileClient; + } + + private static string LoadConnectionStringFromConfigration() + { + // How to create a storage connection string: http://msdn.microsoft.com/en-us/library/azure/ee758697.aspx + return CloudConfigurationManager.GetSetting("StorageConnectionString"); + } + + private static CloudStorageAccount GetStorageAccount() + { + if (Util.storageAccount == null) + { + string connectionString = LoadConnectionStringFromConfigration(); + Util.storageAccount = CloudStorageAccount.Parse(connectionString); + } + + return Util.storageAccount; + } + } +} diff --git a/samples/DataMovementSamples/DataMovementSamples/azure.png b/samples/DataMovementSamples/DataMovementSamples/azure.png new file mode 100644 index 00000000..f50ba5b7 Binary files /dev/null and b/samples/DataMovementSamples/DataMovementSamples/azure.png differ diff --git a/samples/DataMovementSamples/DataMovementSamples/packages.config b/samples/DataMovementSamples/DataMovementSamples/packages.config new file mode 100644 index 00000000..3c3755c4 --- /dev/null +++ b/samples/DataMovementSamples/DataMovementSamples/packages.config @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/test/DMLibTest/Cases/AccessConditionTest.cs b/test/DMLibTest/Cases/AccessConditionTest.cs new file mode 100644 index 00000000..4c8addfb --- /dev/null +++ b/test/DMLibTest/Cases/AccessConditionTest.cs @@ -0,0 +1,132 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest.Cases +{ + using System; + using System.Net; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class AccessConditionTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.CloudBlobSource)] + public void TestSourceAccessCondition() + { + this.TestAccessCondition(SourceOrDest.Source); + } + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.CloudBlobDest)] + public void TestDestAccessCondition() + { + this.TestAccessCondition(SourceOrDest.Dest); + } + + private void TestAccessCondition(SourceOrDest sourceOrDest) + { + string eTag = "notmatch"; + AccessCondition accessCondition = new AccessCondition() + { + IfMatchETag = eTag + }; + + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, DMLibTestBase.FileName, 1024); + + DMLibDataInfo destDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, DMLibTestBase.FileName, 1024); + + var options = new TestExecutionOptions(); + + if (sourceOrDest == SourceOrDest.Dest) + { + options.DestTransferDataInfo = destDataInfo; + } + + options.TransferItemModifier = (fileNode, transferItem) => + { + dynamic transferOptions = DefaultTransferOptions; + + if (sourceOrDest == SourceOrDest.Source) + { + transferOptions.SourceAccessCondition = accessCondition; + } + else + { + transferOptions.DestinationAccessCondition = accessCondition; + } + + transferItem.Options = transferOptions; + }; + + var result = this.ExecuteTestCase(sourceDataInfo, options); + + if (sourceOrDest == SourceOrDest.Dest) + { + Test.Assert(DMLibDataHelper.Equals(destDataInfo, result.DataInfo), "Verify no file is transferred."); + } + else + { + if (DMLibTestContext.DestType != DMLibDataType.Stream) + { + Test.Assert(DMLibDataHelper.Equals(new DMLibDataInfo(string.Empty), result.DataInfo), "Verify no file is transferred."); + } + else + { + foreach(var fileNode in result.DataInfo.EnumerateFileNodes()) + { + Test.Assert(fileNode.SizeInByte == 0, "Verify file {0} is empty", fileNode.Name); + } + } + } + + // Verify TransferException + if (result.Exceptions.Count != 1) + { + Test.Error("There should be exactly one exceptions."); + return; + } + + Exception exception = result.Exceptions[0]; + VerificationHelper.VerifyTransferException(exception, TransferErrorCode.Unknown); + + // Verify innner StorageException + VerificationHelper.VerifyStorageException(exception.InnerException, (int)HttpStatusCode.PreconditionFailed, + "The condition specified using HTTP conditional header(s) is not met."); + } + } +} diff --git a/test/DMLibTest/Cases/AllTransferDirectionTest.cs b/test/DMLibTest/Cases/AllTransferDirectionTest.cs new file mode 100644 index 00000000..eb5c9d04 --- /dev/null +++ b/test/DMLibTest/Cases/AllTransferDirectionTest.cs @@ -0,0 +1,418 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + [TestClass] + public class AllTransferDirectionTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + private Dictionary PrepareSourceData(long fileSizeInB) + { + var sourceFileNodes = new Dictionary(); + var sourceDataInfos = new Dictionary(); + + // Prepare source data info + foreach (DMLibTransferDirection direction in GetAllValidDirections()) + { + string fileName = GetTransferFileName(direction); + + DMLibDataInfo sourceDataInfo; + string sourceDataInfoKey; + if (direction.SourceType != DMLibDataType.URI) + { + sourceDataInfoKey = direction.SourceType.ToString(); + } + else + { + sourceDataInfoKey = GetTransferFileName(direction); + } + + if (sourceDataInfos.ContainsKey(sourceDataInfoKey)) + { + sourceDataInfo = sourceDataInfos[sourceDataInfoKey]; + } + else + { + sourceDataInfo = new DMLibDataInfo(string.Empty); + sourceDataInfos[sourceDataInfoKey] = sourceDataInfo; + } + + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, fileName, fileSizeInB); + + FileNode sourceFileNode = sourceDataInfo.RootNode.GetFileNode(fileName); + sourceFileNodes.Add(fileName, sourceFileNode); + } + + // Generate source data + foreach (var pair in sourceDataInfos) + { + DMLibDataType sourceDataType; + if (Enum.TryParse(pair.Key, out sourceDataType)) + { + DataAdaptor sourceAdaptor = GetSourceAdaptor(sourceDataType); + sourceAdaptor.Cleanup(); + sourceAdaptor.CreateIfNotExists(); + + sourceAdaptor.GenerateData(pair.Value); + } + } + + // Generate source data for URI source separately since it's destination related + DataAdaptor uriSourceAdaptor = GetSourceAdaptor(DMLibDataType.URI); + uriSourceAdaptor.Cleanup(); + uriSourceAdaptor.CreateIfNotExists(); + + DMLibTestContext.SourceType = DMLibDataType.URI; + DMLibTestContext.IsAsync = true; + + DMLibDataType[] uriDestDataTypes = { DMLibDataType.CloudFile, DMLibDataType.BlockBlob, DMLibDataType.PageBlob, DMLibDataType.AppendBlob }; + foreach (DMLibDataType uriDestDataType in uriDestDataTypes) + { + DMLibTestContext.DestType = uriDestDataType; + string sourceDataInfoKey = GetTransferFileName(DMLibDataType.URI, uriDestDataType, true); + + uriSourceAdaptor.GenerateData(sourceDataInfos[sourceDataInfoKey]); + } + + // Clean up destination + foreach (DMLibDataType destDataType in DataTypes) + { + if (destDataType != DMLibDataType.URI) + { + DataAdaptor destAdaptor = GetDestAdaptor(destDataType); + destAdaptor.Cleanup(); + destAdaptor.CreateIfNotExists(); + } + } + + return sourceFileNodes; + } + + private List GetTransformItemsForAllDirections(Dictionary fileNodes) + { + List allItems = new List(); + foreach (DMLibTransferDirection direction in GetAllValidDirections()) + { + string fileName = GetTransferFileName(direction); + DataAdaptor sourceAdaptor = GetSourceAdaptor(direction.SourceType); + DataAdaptor destAdaptor = GetDestAdaptor(direction.DestType); + + FileNode fileNode = fileNodes[fileName]; + TransferItem item = new TransferItem() + { + SourceObject = sourceAdaptor.GetTransferObject(fileNode), + DestObject = destAdaptor.GetTransferObject(fileNode), + SourceType = direction.SourceType, + DestType = direction.DestType, + IsServiceCopy = direction.IsAsync, + }; + allItems.Add(item); + } + + return allItems; + } + + [TestMethod] + [TestCategory(Tag.Function)] + public void ResumeInAllDirections() + { + long fileSizeInByte = 10 * 1024 * 1024; + Dictionary sourceFileNodes = this.PrepareSourceData(fileSizeInByte); + List allItems = this.GetTransformItemsForAllDirections(sourceFileNodes); + + int fileCount = sourceFileNodes.Keys.Count; + + // Execution and store checkpoints + CancellationTokenSource tokenSource = new CancellationTokenSource(); + + var transferContext = new TransferContext(); + var progressChecker = new ProgressChecker(fileCount, fileSizeInByte * fileCount); + transferContext.ProgressHandler = progressChecker.GetProgressHandler(); + allItems.ForEach(item => + { + item.CancellationToken = tokenSource.Token; + item.TransferContext = transferContext; + }); + + var options = new TestExecutionOptions(); + options.DisableDestinationFetch = true; + + // Checkpoint names + const string PartialStarted = "PartialStarted", + AllStarted = "AllStarted", + AllStartedAndWait = "AllStartedAndWait", + BeforeCancel = "BeforeCancel", + AfterCancel = "AfterCancel"; + Dictionary checkpoints = new Dictionary(); + + TransferItem randomItem = allItems[random.Next(0, allItems.Count)]; + + randomItem.AfterStarted = () => + { + Test.Info("Store check point after transfer item: {0}.", randomItem.ToString()); + checkpoints.Add(PartialStarted, transferContext.LastCheckpoint); + }; + + options.AfterAllItemAdded = () => + { + progressChecker.DataTransferred.WaitOne(); + checkpoints.Add(AllStarted, transferContext.LastCheckpoint); + Thread.Sleep(1000); + checkpoints.Add(AllStartedAndWait, transferContext.LastCheckpoint); + Thread.Sleep(1000); + checkpoints.Add(BeforeCancel, transferContext.LastCheckpoint); + tokenSource.Cancel(); + checkpoints.Add(AfterCancel, transferContext.LastCheckpoint); + }; + + var result = this.RunTransferItems(allItems, options); + + // Resume with stored checkpoints in random order + var checkpointList = new List>(); + checkpointList.AddRange(checkpoints); + checkpointList.Shuffle(); + + foreach(var pair in checkpointList) + { + Test.Info("===Resume with checkpoint '{0}'===", pair.Key); + options = new TestExecutionOptions(); + options.DisableDestinationFetch = true; + + progressChecker.Reset(); + transferContext = new TransferContext(pair.Value) + { + ProgressHandler = progressChecker.GetProgressHandler(), + + // The checkpoint can be stored when DMLib doesn't check overwrite callback yet. + // So it will case an skip file error if the desination file already exists and + // We don't have overwrite callback here. + OverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackY() + }; + + List itemsToResume = allItems.Select(item => + { + TransferItem itemToResume = item.Clone(); + itemToResume.TransferContext = transferContext; + return itemToResume; + }).ToList(); + + result = this.RunTransferItems(itemsToResume, options); + + int resumeFailCount = 0; + foreach (DMLibDataType destDataType in DataTypes) + { + DataAdaptor destAdaptor = GetSourceAdaptor(destDataType); + DMLibDataInfo destDataInfo = destAdaptor.GetTransferDataInfo(string.Empty); + + foreach (FileNode destFileNode in destDataInfo.EnumerateFileNodes()) + { + string fileName = destFileNode.Name; + if (!fileName.Contains(DMLibDataType.Stream.ToString())) + { + FileNode sourceFileNode = sourceFileNodes[fileName]; + Test.Assert(DMLibDataHelper.Equals(sourceFileNode, destFileNode), "Verify transfer result."); + } + else + { + resumeFailCount++; + } + } + } + + Test.Assert(result.Exceptions.Count == resumeFailCount, "Verify resume failure count: expected {0}, actual {1}.", resumeFailCount, result.Exceptions.Count); + + foreach (var resumeException in result.Exceptions) + { + Test.Assert(resumeException is NotSupportedException, "Verify resume exception is NotSupportedException."); + } + } + } + + [TestMethod] + [TestCategory(Tag.BVT)] + public void TransferInAllDirections() + { + // Prepare source data + Dictionary sourceFileNodes = this.PrepareSourceData(10 * 1024 * 1024); + List allItems = this.GetTransformItemsForAllDirections(sourceFileNodes); + + // Execution + var result = this.RunTransferItems(allItems, new TestExecutionOptions()); + + // Verify all files are transfered successfully + Test.Assert(result.Exceptions.Count == 0, "Verify no exception occurs."); + foreach (DMLibDataType destDataType in DataTypes) + { + DataAdaptor destAdaptor = GetSourceAdaptor(destDataType); + DMLibDataInfo destDataInfo = destAdaptor.GetTransferDataInfo(string.Empty); + + foreach (FileNode destFileNode in destDataInfo.EnumerateFileNodes()) + { + FileNode sourceFileNode = sourceFileNodes[destFileNode.Name]; + Test.Assert(DMLibDataHelper.Equals(sourceFileNode, destFileNode), "Verify transfer result."); + } + } + } + + private static string GetTransferFileName(DMLibTransferDirection direction) + { + return GetTransferFileName(direction.SourceType, direction.DestType, direction.IsAsync); + } + + private static string GetTransferFileName(DMLibDataType sourceType, DMLibDataType destType, bool isAsync) + { + return sourceType.ToString() + destType.ToString() + (isAsync ? "async" : ""); + } + + private static IEnumerable GetAllValidDirections() + { + for (int sourceIndex = 0; sourceIndex < DataTypes.Length; ++sourceIndex) + { + for (int destIndex = 0; destIndex < DataTypes.Length; ++destIndex) + { + DMLibDataType sourceDataType = DataTypes[sourceIndex]; + DMLibDataType destDataType = DataTypes[destIndex]; + + if (validSyncDirections[sourceIndex][destIndex]) + { + yield return new DMLibTransferDirection() + { + SourceType = sourceDataType, + DestType = destDataType, + IsAsync = false, + }; + } + + if (validAsyncDirections[sourceIndex][destIndex]) + { + yield return new DMLibTransferDirection() + { + SourceType = sourceDataType, + DestType = destDataType, + IsAsync = true, + }; + } + } + } + } + + // [SourceType][DestType] + private static bool[][] validSyncDirections = + { + // stream, uri, local, xsmb, block, page, append + new bool[] {false, false, false, true, true, true, true}, // stream + new bool[] {false, false, false, false, false, false, false}, // uri + new bool[] {false, false, false, true, true, true, true}, // local + new bool[] {true, false, true, true, true, true, true}, // xsmb + new bool[] {true, false, true, true, true, false, false}, // block + new bool[] {true, false, true, true, false, true, false}, // page + new bool[] {true, false, true, true, false, false, true}, // append + }; + + // [SourceType][DestType] + private static bool[][] validAsyncDirections = + { + // stream, uri, local, xsmb, block, page, append + new bool[] {false, false, false, false, false, false, false}, // stream + new bool[] {false, false, false, true, true, true, true}, // uri + new bool[] {false, false, false, false, false, false, false}, // local + new bool[] {false, false, false, true, true, false, false}, // xsmb + new bool[] {false, false, false, true, true, false, false}, // block + new bool[] {false, false, false, true, false, true, false}, // page + new bool[] {false, false, false, true, false, false, true}, // append + }; + + private static DMLibDataType[] DataTypes = + { + DMLibDataType.Stream, + DMLibDataType.URI, + DMLibDataType.Local, + DMLibDataType.CloudFile, + DMLibDataType.BlockBlob, + DMLibDataType.PageBlob, + DMLibDataType.AppendBlob + }; + + private static int GetValidDirectionsIndex(DMLibDataType dataType) + { + switch (dataType) + { + case DMLibDataType.Stream: + return 0; + case DMLibDataType.URI: + return 1; + case DMLibDataType.Local: + return 2; + case DMLibDataType.CloudFile: + return 3; + case DMLibDataType.BlockBlob: + return 4; + case DMLibDataType.PageBlob: + return 5; + case DMLibDataType.AppendBlob: + return 6; + default: + throw new ArgumentException(string.Format("Invalid data type {0}", dataType), "dataType");; + } + } + } + + internal class DMLibTransferDirection + { + public DMLibDataType SourceType + { + get; + set; + } + + public DMLibDataType DestType + { + get; + set; + } + + public bool IsAsync + { + get; + set; + } + } +} diff --git a/test/DMLibTest/Cases/BVT.cs b/test/DMLibTest/Cases/BVT.cs new file mode 100644 index 00000000..989402f5 --- /dev/null +++ b/test/DMLibTest/Cases/BVT.cs @@ -0,0 +1,179 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading.Tasks; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class BVT : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + BVT.UnicodeFileName = FileOp.NextString(random, random.Next(6, 10)); + Test.Info("Use file name {0} in BVT.", UnicodeFileName); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + private static string UnicodeFileName; + + [TestCategory(Tag.BVT)] + [DMLibTestMethodSet(DMLibTestMethodSet.AllValidDirection)] + public void TransferDifferentSizeObject() + { + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddMultipleFilesNormalSize(sourceDataInfo.RootNode, BVT.UnicodeFileName); + + var options = new TestExecutionOptions(); + options.AfterDataPrepared = () => + { + if ((DMLibTestContext.SourceType == DMLibDataType.CloudFile || DMLibTestContext.SourceType == DMLibDataType.PageBlob) && + !DMLibTestContext.IsAsync) + { + string sparseFileName = "SparseFile"; + + DMLibDataHelper.AddOneFile(sourceDataInfo.RootNode, sparseFileName, 1); + FileNode sparseFileNode = sourceDataInfo.RootNode.GetFileNode(sparseFileName); + + if (DMLibTestContext.SourceType == DMLibDataType.CloudFile) + { + CloudFileDataAdaptor cloudFileDataAdaptor = SourceAdaptor as CloudFileDataAdaptor; + CloudFile sparseCloudFile = cloudFileDataAdaptor.GetCloudFileReference(sparseFileNode); + this.PrepareCloudFileWithDifferentSizeRange(sparseCloudFile); + sparseFileNode.MD5 = sparseCloudFile.Properties.ContentMD5; + sparseFileNode.Metadata = sparseCloudFile.Metadata; + } + else if (DMLibTestContext.SourceType == DMLibDataType.PageBlob) + { + CloudBlobDataAdaptor cloudBlobDataAdaptor = SourceAdaptor as CloudBlobDataAdaptor; + CloudPageBlob sparsePageBlob = cloudBlobDataAdaptor.GetCloudBlobReference(sparseFileNode) as CloudPageBlob; + this.PreparePageBlobWithDifferenSizePage(sparsePageBlob); + sparseFileNode.MD5 = sparsePageBlob.Properties.ContentMD5; + sparseFileNode.Metadata = sparsePageBlob.Metadata; + } + } + }; + + var result = this.ExecuteTestCase(sourceDataInfo, options); + + // For sync copy, recalculate md5 of destination by downloading the file to local. + if (IsCloudService(DMLibTestContext.DestType) && !DMLibTestContext.IsAsync) + { + DMLibDataHelper.SetCalculatedFileMD5(result.DataInfo, DestAdaptor); + } + + Test.Assert(result.Exceptions.Count == 0, "Verify no exception is thrown."); + Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); + } + + private void PreparePageBlobWithDifferenSizePage(CloudPageBlob pageBlob) + { + List ranges = new List(); + List gaps = new List(); + + // Add one 4MB - 16MB page, align with 512 byte + ranges.Add(random.Next(4 * 2 * 1024, 16 * 2 * 1024) * 512); + + // Add one 512B page + ranges.Add(512); + + int remainingPageNumber = random.Next(10, 20); + + // Add ten - twenty 512B - 4MB page, align with 512 byte + for (int i = 0; i < remainingPageNumber; ++i) + { + ranges.Add(random.Next(1, 4 * 2 * 1024) * 512); + } + + // Add one 4M - 8M gap, align with 512 byte + gaps.Add(random.Next(4 * 2 * 1024, 8 * 2 * 1024) * 512); + + // Add 512B - 2048B gaps, align with 512 byte + for (int i = 1; i < ranges.Count - 1; ++i) + { + gaps.Add(random.Next(1, 5) * 512); + } + + ranges.Shuffle(); + gaps.Shuffle(); + + CloudBlobHelper.GeneratePageBlobWithRangedData(pageBlob, ranges, gaps); + } + + private void PrepareCloudFileWithDifferentSizeRange(CloudFile cloudFile) + { + List ranges = new List(); + List gaps = new List(); + + // Add one 4MB - 16MB range + ranges.Add(random.Next(4 * 1024 * 1024, 16 * 1024 * 1024)); + + // Add one 1B range + ranges.Add(1); + + int remainingPageNumber = random.Next(10, 20); + + // Add ten - twenty 1B - 4MB range + for (int i = 0; i < remainingPageNumber; ++i) + { + ranges.Add(random.Next(1, 4 * 1024 * 1024)); + } + + // Add one 4M - 8M gap + gaps.Add(random.Next(4 * 1024 * 1024, 8 * 1024 * 1024)); + + // Add 512B - 2048B gaps + for (int i = 1; i < ranges.Count - 1; ++i) + { + gaps.Add(random.Next(1, 512 * 4)); + } + + if (DMLibTestContext.DestType == DMLibDataType.PageBlob) + { + int totalSize = ranges.Sum() + gaps.Sum(); + int remainder = totalSize % 512; + + if (remainder != 0) + { + ranges[ranges.Count - 1] += 512 - remainder; + } + } + + ranges.Shuffle(); + gaps.Shuffle(); + + CloudFileHelper.GenerateCloudFileWithRangedData(cloudFile, ranges, gaps); + } + } +} diff --git a/test/DMLibTest/Cases/BigFileTest.cs b/test/DMLibTest/Cases/BigFileTest.cs new file mode 100644 index 00000000..8e39fc9b --- /dev/null +++ b/test/DMLibTest/Cases/BigFileTest.cs @@ -0,0 +1,57 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Threading.Tasks; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class BigFileTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.AllValidDirection)] + public void TransferBigSizeObject() + { + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddMultipleFilesBigSize(sourceDataInfo.RootNode, DMLibTestBase.FileName); + + var result = this.ExecuteTestCase(sourceDataInfo, new TestExecutionOptions()); + + Test.Assert(result.Exceptions.Count == 0, "Verify no exception is thrown."); + Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); + } + } +} diff --git a/test/DMLibTest/Cases/CheckContentMD5Test.cs b/test/DMLibTest/Cases/CheckContentMD5Test.cs new file mode 100644 index 00000000..f3c08352 --- /dev/null +++ b/test/DMLibTest/Cases/CheckContentMD5Test.cs @@ -0,0 +1,93 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest.Cases +{ + using System; +using DMLibTestCodeGen; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Microsoft.WindowsAzure.Storage.DataMovement; +using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class CheckContentMD5Test : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.LocalDest)] + public void TestCheckContentMD5() + { + long fileSize = 10 * 1024 * 1024; + string wrongMD5 = "wrongMD5"; + + string checkWrongMD5File = "checkWrongMD5File"; + string notCheckWrongMD5File = "notCheckWrongMD5File"; + string checkCorrectMD5File = "checkCorrectMD5File"; + string notCheckCorrectMD5File = "notCheckCorrectMD5File"; + + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, checkWrongMD5File, fileSize); + FileNode tmpFileNode = sourceDataInfo.RootNode.GetFileNode(checkWrongMD5File); + tmpFileNode.MD5 = wrongMD5; + + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, notCheckWrongMD5File, fileSize); + tmpFileNode = sourceDataInfo.RootNode.GetFileNode(notCheckWrongMD5File); + tmpFileNode.MD5 = wrongMD5; + + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, checkCorrectMD5File, fileSize); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, notCheckCorrectMD5File, fileSize); + + var options = new TestExecutionOptions(); + options.TransferItemModifier = (fileNode, transferItem) => + { + string fileName = fileNode.Name; + DownloadOptions downloadOptions = new DownloadOptions(); + if (fileName.Equals(checkWrongMD5File) || fileName.Equals(checkCorrectMD5File)) + { + downloadOptions.DisableContentMD5Validation = false; + } + else if (fileName.Equals(notCheckWrongMD5File) || fileName.Equals(notCheckCorrectMD5File)) + { + downloadOptions.DisableContentMD5Validation = true; + } + + transferItem.Options = downloadOptions; + }; + + var result = this.ExecuteTestCase(sourceDataInfo, options); + + Test.Assert(result.Exceptions.Count == 1, "Verify there's one exception."); + Exception exception = result.Exceptions[0]; + + Test.Assert(exception is InvalidOperationException, "Verify it's an invalid operation exception."); + VerificationHelper.VerifyExceptionErrorMessage(exception, "The MD5 hash calculated from the downloaded data does not match the MD5 hash stored", checkWrongMD5File); + } + } +} diff --git a/test/DMLibTest/Cases/MetadataTest.cs b/test/DMLibTest/Cases/MetadataTest.cs new file mode 100644 index 00000000..247e4bcb --- /dev/null +++ b/test/DMLibTest/Cases/MetadataTest.cs @@ -0,0 +1,71 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace DMLibTest +{ + using System.Collections.Generic; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class MetadataTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.Cloud2Cloud)] + public void TestMetadata() + { + Dictionary metadata = new Dictionary(); + metadata.Add(FileOp.NextCIdentifierString(random), FileOp.NextNormalString(random)); + metadata.Add(FileOp.NextCIdentifierString(random), FileOp.NextNormalString(random)); + + Test.Info("Metadata is ====================="); + foreach (var keyValue in metadata) + { + Test.Info("name:{0} value:{1}", keyValue.Key, keyValue.Value); + } + + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + FileNode fileNode = new FileNode(DMLibTestBase.FileName) + { + SizeInByte = DMLibTestBase.FileSizeInKB * 1024L, + Metadata = metadata + }; + sourceDataInfo.RootNode.AddFileNode(fileNode); + + var result = this.ExecuteTestCase(sourceDataInfo, new TestExecutionOptions()); + + Test.Assert(result.Exceptions.Count == 0, "Verify no exception is thrown."); + Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); + } + } +} diff --git a/test/DMLibTest/Cases/OverwriteTest.cs b/test/DMLibTest/Cases/OverwriteTest.cs new file mode 100644 index 00000000..a81d2e68 --- /dev/null +++ b/test/DMLibTest/Cases/OverwriteTest.cs @@ -0,0 +1,115 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest.Cases +{ + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class OverwriteTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.AllValidDirection)] + public void OverwriteDestination() + { + string destExistYName = "destExistY"; + string destExistNName = "destExistN"; + string destNotExistYName = "destNotExistY"; + + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistYName, 1024); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistNName, 1024); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destNotExistYName, 1024); + + DMLibDataInfo destDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistYName, 1024); + DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistNName, 1024); + + var options = new TestExecutionOptions(); + if (DMLibTestContext.DestType != DMLibDataType.Stream) + { + options.DestTransferDataInfo = destDataInfo; + } + + options.TransferItemModifier = (fileNode, transferItem) => + { + string fileName = fileNode.Name; + TransferContext transferContext = new TransferContext(); + + if (fileName.Equals(destExistYName)) + { + transferContext.OverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackY(); + } + else if (fileName.Equals(destExistNName)) + { + transferContext.OverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackN(); + } + else if (fileName.Equals(destNotExistYName)) + { + transferContext.OverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackY(); + } + + transferItem.TransferContext = transferContext; + }; + + var result = this.ExecuteTestCase(sourceDataInfo, options); + + DMLibDataInfo expectedDataInfo = new DMLibDataInfo(string.Empty); + if (DMLibTestContext.DestType != DMLibDataType.Stream) + { + expectedDataInfo.RootNode.AddFileNode(sourceDataInfo.RootNode.GetFileNode(destExistYName)); + expectedDataInfo.RootNode.AddFileNode(destDataInfo.RootNode.GetFileNode(destExistNName)); + expectedDataInfo.RootNode.AddFileNode(sourceDataInfo.RootNode.GetFileNode(destNotExistYName)); + } + else + { + expectedDataInfo = sourceDataInfo; + } + + // Verify transfer result + Test.Assert(DMLibDataHelper.Equals(expectedDataInfo, result.DataInfo), "Verify transfer result."); + + // Verify exception + if (DMLibTestContext.DestType != DMLibDataType.Stream) + { + Test.Assert(result.Exceptions.Count == 1, "Verify there's only one exceptions."); + TransferException transferException = result.Exceptions[0] as TransferException; + Test.Assert(transferException != null, "Verify the exception is a TransferException"); + + VerificationHelper.VerifyTransferException(transferException, TransferErrorCode.NotOverwriteExistingDestination, + "Skiped file", destExistNName); + } + } + } +} diff --git a/test/DMLibTest/Cases/ProgressHandlerTest.cs b/test/DMLibTest/Cases/ProgressHandlerTest.cs new file mode 100644 index 00000000..90da1272 --- /dev/null +++ b/test/DMLibTest/Cases/ProgressHandlerTest.cs @@ -0,0 +1,66 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest.Cases +{ + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class ProgressHandlerTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.AllValidDirection)] + public void TestProgressHandlerTest() + { + long fileSize = 10 * 1024 * 1024; + + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, DMLibTestBase.FileName, fileSize); + + var options = new TestExecutionOptions(); + options.TransferItemModifier = (fileNode, transferItem) => + { + TransferContext transferContext = new TransferContext(); + ProgressChecker progressChecker = new ProgressChecker(1, fileNode.SizeInByte); + transferContext.ProgressHandler = progressChecker.GetProgressHandler(); + transferItem.TransferContext = transferContext; + }; + + var result = this.ExecuteTestCase(sourceDataInfo, options); + + Test.Assert(result.Exceptions.Count == 0, "Verify no exception is thrown."); + Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); + } + } +} diff --git a/test/DMLibTest/Cases/ResumeTest.cs b/test/DMLibTest/Cases/ResumeTest.cs new file mode 100644 index 00000000..bad569f3 --- /dev/null +++ b/test/DMLibTest/Cases/ResumeTest.cs @@ -0,0 +1,146 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Threading; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class ResumeTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.AllSync)] + public void TestResume() + { + int fileSizeInKB = 100 * 1024; + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFile(sourceDataInfo.RootNode, DMLibTestBase.FileName, fileSizeInKB); + + CancellationTokenSource tokenSource = new CancellationTokenSource(); + + TransferItem transferItem = null; + var options = new TestExecutionOptions(); + options.LimitSpeed = true; + var transferContext = new TransferContext(); + var progressChecker = new ProgressChecker(1, fileSizeInKB * 1024); + transferContext.ProgressHandler = progressChecker.GetProgressHandler(); + options.TransferItemModifier = (fileName, item) => + { + item.CancellationToken = tokenSource.Token; + item.TransferContext = transferContext; + transferItem = item; + }; + + TransferCheckpoint firstCheckpoint = null, secondCheckpoint = null; + options.AfterAllItemAdded = () => + { + // Wait until there are data transferred + progressChecker.DataTransferred.WaitOne(); + + // Store the first checkpoint + firstCheckpoint = transferContext.LastCheckpoint; + Thread.Sleep(1000); + + // Cancel the transfer and store the second checkpoint + tokenSource.Cancel(); + secondCheckpoint = transferContext.LastCheckpoint; + }; + + // Cancel and store checkpoint for resume + var result = this.ExecuteTestCase(sourceDataInfo, options); + + Test.Assert(result.Exceptions.Count == 1, "Verify job is cancelled"); + Exception exception = result.Exceptions[0]; + VerificationHelper.VerifyExceptionErrorMessage(exception, "A task was canceled."); + + TransferCheckpoint firstResumeCheckpoint = null, secondResumeCheckpoint = null; + + // DMLib doesn't support to resume transfer from a checkpoint which is inconsistent with + // the actual transfer progress when the destination is an append blob. + if (Helper.RandomBoolean() && DMLibTestContext.DestType != DMLibDataType.AppendBlob) + { + Test.Info("Resume with the first checkpoint first."); + firstResumeCheckpoint = firstCheckpoint; + secondResumeCheckpoint = secondCheckpoint; + } + else + { + Test.Info("Resume with the second checkpoint first."); + firstResumeCheckpoint = secondCheckpoint; + secondResumeCheckpoint = firstCheckpoint; + } + + // resume with firstResumeCheckpoint + TransferItem resumeItem = transferItem.Clone(); + progressChecker.Reset(); + TransferContext resumeContext = new TransferContext(firstResumeCheckpoint) + { + ProgressHandler = progressChecker.GetProgressHandler() + }; + resumeItem.TransferContext = resumeContext; + + result = this.RunTransferItems(new List() { resumeItem }, new TestExecutionOptions()); + + VerificationHelper.VerifySingleObjectResumeResult(result, sourceDataInfo); + + // resume with secondResumeCheckpoint + resumeItem = transferItem.Clone(); + progressChecker.Reset(); + resumeContext = new TransferContext(secondResumeCheckpoint) + { + ProgressHandler = progressChecker.GetProgressHandler() + }; + resumeItem.TransferContext = resumeContext; + + result = this.RunTransferItems(new List() { resumeItem }, new TestExecutionOptions()); + + if (DMLibTestContext.DestType != DMLibDataType.AppendBlob || DMLibTestContext.SourceType == DMLibDataType.Stream) + { + VerificationHelper.VerifySingleObjectResumeResult(result, sourceDataInfo); + } + else + { + Test.Assert(result.Exceptions.Count == 1, "Verify reumse fails when checkpoint is inconsistent with the actual progress when destination is append blob."); + exception = result.Exceptions[0]; + Test.Assert(exception is InvalidOperationException, "Verify reumse fails when checkpoint is inconsistent with the actual progress when destination is append blob."); + VerificationHelper.VerifyExceptionErrorMessage(exception, "Destination might be changed by other process or application."); + } + } + } +} diff --git a/test/DMLibTest/Cases/SetContentTypeTest.cs b/test/DMLibTest/Cases/SetContentTypeTest.cs new file mode 100644 index 00000000..4817ea72 --- /dev/null +++ b/test/DMLibTest/Cases/SetContentTypeTest.cs @@ -0,0 +1,68 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest.Cases +{ + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class SetContentTypeTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethodSet(DMLibTestMethodSet.LocalSource)] + public void TestSetContentType() + { + string contentType = "contenttype"; + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFile(sourceDataInfo.RootNode, DMLibTestBase.FileName, 1024); + + var options = new TestExecutionOptions(); + options.TransferItemModifier = (fileNode, transferItem) => + { + UploadOptions uploadOptions = new UploadOptions(); + uploadOptions.ContentType = "contenttype"; + + transferItem.Options = uploadOptions; + }; + + var result = this.ExecuteTestCase(sourceDataInfo, options); + + Test.Assert(result.Exceptions.Count == 0, "Verify no exception is thrown."); + Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); + + FileNode destFileNode = result.DataInfo.RootNode.GetFileNode(DMLibTestBase.FileName); + Test.Assert(contentType.Equals(destFileNode.ContentType), "Verify content type: {0}, expected {1}", destFileNode.ContentType, contentType); + } + } +} diff --git a/test/DMLibTest/Cases/UnsupportedDirectionTest.cs b/test/DMLibTest/Cases/UnsupportedDirectionTest.cs new file mode 100644 index 00000000..7d82d231 --- /dev/null +++ b/test/DMLibTest/Cases/UnsupportedDirectionTest.cs @@ -0,0 +1,91 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest.Cases +{ + using System; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using MS.Test.Common.MsTestLib; + + [MultiDirectionTestClass] + public class UnsupportedDirectionTest : DMLibTestBase + { + #region Additional test attributes + [ClassInitialize()] + public static void MyClassInitialize(TestContext testContext) + { + DMLibTestBase.BaseClassInitialize(testContext); + } + + [ClassCleanup()] + public static void MyClassCleanup() + { + DMLibTestBase.BaseClassCleanup(); + } + + [TestInitialize()] + public void MyTestInitialize() + { + base.BaseTestInitialize(); + } + + [TestCleanup()] + public void MyTestCleanup() + { + base.BaseTestCleanup(); + } + #endregion + + [TestCategory(Tag.Function)] + [DMLibTestMethod(DMLibDataType.BlockBlob, DMLibDataType.CloudBlob & ~DMLibDataType.BlockBlob)] + [DMLibTestMethod(DMLibDataType.AppendBlob, DMLibDataType.CloudBlob & ~DMLibDataType.AppendBlob)] + [DMLibTestMethod(DMLibDataType.PageBlob, DMLibDataType.CloudBlob & ~DMLibDataType.PageBlob)] + [DMLibTestMethod(DMLibDataType.BlockBlob, DMLibDataType.CloudBlob & ~DMLibDataType.BlockBlob, isAsync: true)] + [DMLibTestMethod(DMLibDataType.AppendBlob, DMLibDataType.CloudBlob & ~DMLibDataType.AppendBlob, isAsync: true)] + [DMLibTestMethod(DMLibDataType.PageBlob, DMLibDataType.CloudBlob & ~DMLibDataType.PageBlob, isAsync: true)] + [DMLibTestMethod(DMLibDataType.CloudFile, DMLibDataType.PageBlob, isAsync: true)] + [DMLibTestMethod(DMLibDataType.CloudFile, DMLibDataType.AppendBlob, isAsync: true)] + [DMLibTestMethod(DMLibDataType.URI, DMLibDataType.CloudFile)] + [DMLibTestMethod(DMLibDataType.URI, DMLibDataType.CloudBlob)] + public void TestUnsupportedDirection() + { + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); + DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, DMLibTestBase.FileName, 1024); + + var result = this.ExecuteTestCase(sourceDataInfo, new TestExecutionOptions()); + + Test.Assert(result.Exceptions.Count == 1, "Verify no exception is thrown."); + + Exception exception = result.Exceptions[0]; + + if (DMLibTestContext.SourceType == DMLibDataType.URI) + { + Test.Assert(exception is NotSupportedException, "Verify exception is NotSupportedException."); + if (DMLibTestContext.DestType == DMLibDataType.CloudFile) + { + VerificationHelper.VerifyExceptionErrorMessage(exception, "Copying from uri to Azure File Storage synchronously is not supported"); + } + else + { + VerificationHelper.VerifyExceptionErrorMessage(exception, "Copying from uri to Azure Blob Storage synchronously is not supported"); + } + } + else if (DMLibTestBase.IsCloudBlob(DMLibTestContext.SourceType) && DMLibTestBase.IsCloudBlob(DMLibTestContext.DestType)) + { + Test.Assert(exception is InvalidOperationException, "Verify exception is InvalidOperationException."); + VerificationHelper.VerifyExceptionErrorMessage(exception, "Blob type of source and destination must be the same."); + } + else + { + Test.Assert(exception is InvalidOperationException, "Verify exception is InvalidOperationException."); + VerificationHelper.VerifyExceptionErrorMessage(exception, + string.Format("Copying from File Storage to {0} Blob Storage asynchronously is not supported.", MapBlobDataTypeToBlobType(DMLibTestContext.DestType))); + } + + Test.Assert(DMLibDataHelper.Equals(new DMLibDataInfo(string.Empty), result.DataInfo), "Verify no file is transfered."); + } + } +} diff --git a/test/DMLibTest/DMLibTest.csproj b/test/DMLibTest/DMLibTest.csproj new file mode 100644 index 00000000..8c671b24 --- /dev/null +++ b/test/DMLibTest/DMLibTest.csproj @@ -0,0 +1,209 @@ + + + + Debug + AnyCPU + {2A4656A4-F744-4653-A9D6-15112E9AB352} + Library + Properties + DMLibTest + DMLibTest + v4.5 + 512 + {3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} + 10.0 + $(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion) + $(ProgramFiles)\Common Files\microsoft shared\VSTT\$(VisualStudioVersion)\UITestExtensionPackages + False + UnitTest + ..\..\ + true + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + false + + + true + + + ..\DMLibTestCodeGen\ + $(CodeGenPath)DMLibTestCodeGen.csproj + Generated + + + ..\..\tools\strongnamekeys\fake\windows.snk + + + + + False + ..\..\packages\Microsoft.Data.Edm.5.6.4\lib\net40\Microsoft.Data.Edm.dll + + + False + ..\..\packages\Microsoft.Data.OData.5.6.4\lib\net40\Microsoft.Data.OData.dll + + + False + ..\..\packages\Microsoft.Data.Services.Client.5.6.4\lib\net40\Microsoft.Data.Services.Client.dll + + + False + ..\..\packages\Microsoft.WindowsAzure.ConfigurationManager.1.8.0.0\lib\net35-full\Microsoft.WindowsAzure.Configuration.dll + + + False + ..\..\packages\WindowsAzure.Storage.5.0.0\lib\net40\Microsoft.WindowsAzure.Storage.dll + + + False + ..\..\packages\Newtonsoft.Json.6.0.8\lib\net45\Newtonsoft.Json.dll + + + + + False + ..\..\packages\System.Spatial.5.6.4\lib\net40\System.Spatial.dll + + + + + + + + + + + + + + + + + + + + + SharedAssemblyInfo.cs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {b821e031-09cc-48f0-bdc6-2793228d4027} + DataMovement + + + {7018ee4e-d389-424e-a8dd-f9b4ffda5194} + DMLibTestCodeGen + + + {ac39b50f-dc27-4411-9ed4-a4a137190acb} + MsTestLib + + + + + PreserveNewest + + + + + + + + + + False + + + False + + + False + + + False + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/DMLibTest/Framework/AssemblyInitCleanup.cs b/test/DMLibTest/Framework/AssemblyInitCleanup.cs new file mode 100644 index 00000000..aac1f483 --- /dev/null +++ b/test/DMLibTest/Framework/AssemblyInitCleanup.cs @@ -0,0 +1,31 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using MS.Test.Common.MsTestLib; + + [TestClass] + public class AssemblyInitCleanup + { + [AssemblyInitialize] + public static void TestInit(TestContext testContext) + { + // init loggers and load test config data + String config = testContext.Properties["config"] as string; + Test.Init(config); + // set the assertfail delegate to report failure in VS + Test.AssertFail = new AssertFailDelegate(Assert.Fail); + } + + [AssemblyCleanup] + public static void TestCleanup() + { + Test.Close(); + } + } +} diff --git a/test/DMLibTest/Framework/BlobDataAdaptorBase.cs b/test/DMLibTest/Framework/BlobDataAdaptorBase.cs new file mode 100644 index 00000000..ff9955e9 --- /dev/null +++ b/test/DMLibTest/Framework/BlobDataAdaptorBase.cs @@ -0,0 +1,174 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Text; + using Microsoft.WindowsAzure.Storage.Blob; + using BlobTypeConst = DMLibTest.BlobType; + + public abstract class BlobDataAdaptorBase : DataAdaptor where TDataInfo : IDataInfo + { + private string delimiter; + private readonly string defaultContainerName; + private string containerName; + + public override string StorageKey + { + get + { + return this.BlobHelper.Account.Credentials.ExportBase64EncodedKey(); + } + } + + public string ContainerName + { + get + { + return this.containerName; + } + + set + { + this.containerName = value; + } + } + + public CloudBlobHelper BlobHelper + { + get; + private set; + } + + protected TestAccount TestAccount + { + get; + private set; + } + + protected string TempFolder + { + get; + private set; + } + + protected virtual string BlobType + { + get; + private set; + } + + public BlobDataAdaptorBase(TestAccount testAccount, string containerName, string blobType, SourceOrDest sourceOrDest, string delimiter = "/") + { + if (BlobTypeConst.Block != blobType && BlobTypeConst.Page != blobType && BlobTypeConst.Append != blobType) + { + throw new ArgumentException("blobType"); + } + + this.TestAccount = testAccount; + this.BlobHelper = new CloudBlobHelper(testAccount.Account); + this.delimiter = delimiter; + this.containerName = containerName; + this.defaultContainerName = containerName; + this.TempFolder = Guid.NewGuid().ToString(); + this.BlobType = blobType; + this.SourceOrDest = sourceOrDest; + } + + public override string GetAddress(params string[] list) + { + return this.GetAddress(this.TestAccount.GetEndpointBaseUri(EndpointType.Blob), list); + } + + public override string GetSecondaryAddress(params string[] list) + { + return this.GetAddress(this.TestAccount.GetEndpointBaseUri(EndpointType.Blob, true), list); + } + + public override void CreateIfNotExists() + { + this.BlobHelper.CreateContainer(this.containerName); + } + + public override bool Exists() + { + return this.BlobHelper.Exists(this.containerName); + } + + private string GetAddress(string baseUri, params string[] list) + { + StringBuilder builder = new StringBuilder(); + builder.Append(baseUri + "/" + this.containerName + "/"); + + foreach (string token in list) + { + if (!string.IsNullOrEmpty(token)) + { + builder.Append(token); + builder.Append(this.delimiter); + } + } + + return builder.ToString(); + } + + public override void WaitForGEO() + { + CloudBlobContainer container = this.BlobHelper.GetGRSContainer(this.containerName); + Helper.WaitForTakingEffect(container.ServiceClient); + } + + public override void Cleanup() + { + this.BlobHelper.CleanupContainer(this.containerName); + } + + public override void DeleteLocation() + { + this.BlobHelper.DeleteContainer(this.containerName); + } + + public override void Reset() + { + if (this.Exists()) + { + this.BlobHelper.SetContainerAccessType(this.containerName, BlobContainerPublicAccessType.Off); + } + + this.containerName = this.defaultContainerName; + } + + public override string GenerateSAS(SharedAccessPermissions sap, int validatePeriod, string policySignedIdentifier = null) + { + if (null == policySignedIdentifier) + { + if (this.SourceOrDest == SourceOrDest.Dest) + { + this.BlobHelper.CreateContainer(this.containerName); + } + + return this.BlobHelper.GetSASofContainer(this.containerName, sap.ToBlobPermissions(), validatePeriod, false); + } + else + { + this.BlobHelper.CreateContainer(this.containerName); + return this.BlobHelper.GetSASofContainer(this.containerName, sap.ToBlobPermissions(), validatePeriod, true, policySignedIdentifier); + } + } + + public override void RevokeSAS() + { + this.BlobHelper.ClearSASPolicyofContainer(this.containerName); + } + + public override void MakePublic() + { + this.BlobHelper.SetContainerAccessType(this.containerName, BlobContainerPublicAccessType.Container); + + DMLibTestHelper.WaitForACLTakeEffect(); + } + } +} diff --git a/test/DMLibTest/Framework/CloudBlobDataAdaptor.cs b/test/DMLibTest/Framework/CloudBlobDataAdaptor.cs new file mode 100644 index 00000000..ba760bd3 --- /dev/null +++ b/test/DMLibTest/Framework/CloudBlobDataAdaptor.cs @@ -0,0 +1,243 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.IO; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.RetryPolicies; + using StorageBlob = Microsoft.WindowsAzure.Storage.Blob; + + internal class CloudBlobDataAdaptor : BlobDataAdaptorBase + { + public CloudBlobDataAdaptor(TestAccount testAccount, string containerName, string blobType, SourceOrDest sourceOrDest, string delimiter = "/") + : base(testAccount, containerName, blobType, sourceOrDest, delimiter) + { + } + + public override object GetTransferObject(FileNode fileNode) + { + return this.GetCloudBlobReference(fileNode); + } + + public override object GetTransferObject(DirNode dirNode) + { + return this.GetCloudBlobDirReference(dirNode); + } + + protected override void GenerateDataImp(DMLibDataInfo dataInfo) + { + this.BlobHelper.CreateContainer(this.ContainerName); + + using (TemporaryTestFolder localTemp = new TemporaryTestFolder(this.TempFolder)) + { + CloudBlobDirectory rootCloudBlobDir = this.BlobHelper.GetDirReference(this.ContainerName, dataInfo.RootPath); + this.GenerateDir(dataInfo.RootNode, rootCloudBlobDir); + } + } + + public override DMLibDataInfo GetTransferDataInfo(string rootDir) + { + CloudBlobDirectory blobDir = this.BlobHelper.QueryBlobDirectory(this.ContainerName, rootDir); + if (blobDir == null) + { + return null; + } + + DMLibDataInfo dataInfo = new DMLibDataInfo(rootDir); + + this.BuildDirNode(blobDir, dataInfo.RootNode); + return dataInfo; + } + + public string LeaseBlob(FileNode fileNode, TimeSpan? leaseTime) + { + var blob = this.GetCloudBlobReference(fileNode); + return blob.AcquireLease(leaseTime, null, options: HelperConst.DefaultBlobOptions); + } + + public void ReleaseLease(FileNode fileNode, string leaseId) + { + var blob = this.GetCloudBlobReference(fileNode); + blob.ReleaseLease(AccessCondition.GenerateLeaseCondition(leaseId), options: HelperConst.DefaultBlobOptions); + } + + public CloudBlob GetCloudBlobReference(FileNode fileNode) + { + var container = this.BlobHelper.BlobClient.GetContainerReference(this.ContainerName); + var blobName = fileNode.GetURLRelativePath(); + if (blobName.StartsWith("/")) + { + blobName = blobName.Substring(1, blobName.Length - 1); + } + + return CloudBlobHelper.GetCloudBlobReference(container, blobName, this.BlobType); + } + + public CloudBlobDirectory GetCloudBlobDirReference(DirNode dirNode) + { + var container = this.BlobHelper.BlobClient.GetContainerReference(this.ContainerName); + var dirName = dirNode.GetURLRelativePath(); + if (dirName.StartsWith("/")) + { + dirName = dirName.Substring(1, dirName.Length - 1); + } + + return container.GetDirectoryReference(dirName); + } + + private void GenerateDir(DirNode dirNode, CloudBlobDirectory cloudBlobDir) + { + DMLibDataHelper.CreateLocalDirIfNotExists(this.TempFolder); + + foreach (var subDir in dirNode.DirNodes) + { + CloudBlobDirectory subCloudBlobDir = cloudBlobDir.GetDirectoryReference(subDir.Name); + this.GenerateDir(subDir, subCloudBlobDir); + } + + List snapshotList = new List(); + + foreach (var file in dirNode.FileNodes) + { + CloudBlob cloudBlob = CloudBlobHelper.GetCloudBlobReference(cloudBlobDir, file.Name, this.BlobType); + this.GenerateFile(file, cloudBlob, snapshotList); + } + + foreach (var snapshot in snapshotList) + { + dirNode.AddFileNode(snapshot); + } + } + + private void CheckFileNode(FileNode fileNode) + { + if (fileNode.LastModifiedTime != null) + { + throw new InvalidOperationException("Can't set LastModifiedTime to cloud blob"); + } + + if (fileNode.FileAttr != null) + { + throw new InvalidOperationException("Can't set file attribute to cloud blob"); + } + } + + private void GenerateFile(FileNode fileNode, CloudBlob cloudBlob, List snapshotList) + { + this.CheckFileNode(fileNode); + + if ((StorageBlob.BlobType.PageBlob == cloudBlob.BlobType) && (fileNode.SizeInByte % 512 != 0)) + { + throw new InvalidOperationException(string.Format("Can only generate page blob which size is multiple of 512bytes. Expected size is {0}", fileNode.SizeInByte)); + } + + string tempFileName = Guid.NewGuid().ToString(); + string localFilePath = Path.Combine(this.TempFolder, tempFileName); + DMLibDataHelper.CreateLocalFile(fileNode, localFilePath); + + BlobRequestOptions storeMD5Options = new BlobRequestOptions() + { + RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(90), 3), + StoreBlobContentMD5 = true, + }; + + cloudBlob.UploadFromFile(localFilePath, FileMode.Open, options: storeMD5Options); + + if (null != fileNode.MD5 || + null != fileNode.ContentType || + null != fileNode.CacheControl || + null != fileNode.ContentDisposition || + null != fileNode.ContentEncoding || + null != fileNode.ContentLanguage) + { + cloudBlob.Properties.ContentMD5 = fileNode.MD5; + cloudBlob.Properties.ContentType = fileNode.ContentType; + cloudBlob.Properties.CacheControl = fileNode.CacheControl; + cloudBlob.Properties.ContentDisposition = fileNode.ContentDisposition; + cloudBlob.Properties.ContentEncoding = fileNode.ContentEncoding; + cloudBlob.Properties.ContentLanguage = fileNode.ContentLanguage; + cloudBlob.SetProperties(options: HelperConst.DefaultBlobOptions); + } + + if (null != fileNode.Metadata && fileNode.Metadata.Count > 0) + { + cloudBlob.Metadata.Clear(); + foreach (var metaData in fileNode.Metadata) + { + cloudBlob.Metadata.Add(metaData); + } + + cloudBlob.SetMetadata(options: HelperConst.DefaultBlobOptions); + } + + cloudBlob.FetchAttributes(options: HelperConst.DefaultBlobOptions); + this.BuildFileNode(cloudBlob, fileNode); + + for (int i = 0; i < fileNode.SnapshotsCount; ++i) + { + CloudBlob snapshot = cloudBlob.Snapshot(); + snapshotList.Add(this.BuildSnapshotFileNode(snapshot, fileNode.Name)); + } + } + + private void BuildDirNode(CloudBlobDirectory cloudDir, DirNode dirNode) + { + foreach (IListBlobItem item in cloudDir.ListBlobs(false, BlobListingDetails.Metadata, HelperConst.DefaultBlobOptions)) + { + CloudBlob cloudBlob = item as CloudBlob; + CloudBlobDirectory subCloudDir = item as CloudBlobDirectory; + + if (cloudBlob != null) + { + if (CloudBlobHelper.MapStorageBlobTypeToBlobType(cloudBlob.BlobType) == this.BlobType) + { + FileNode fileNode = new FileNode(cloudBlob.GetShortName()); + this.BuildFileNode(cloudBlob, fileNode); + dirNode.AddFileNode(fileNode); + } + } + else if (subCloudDir != null) + { + var subDirName = subCloudDir.GetShortName(); + DirNode subDirNode = dirNode.GetDirNode(subDirName); + + // A blob directory could be listed more than once if it's across table servers. + if (subDirNode == null) + { + subDirNode = new DirNode(subDirName); + this.BuildDirNode(subCloudDir, subDirNode); + dirNode.AddDirNode(subDirNode); + } + } + } + } + + private FileNode BuildSnapshotFileNode(CloudBlob cloudBlob, string fileName) + { + FileNode fileNode = new FileNode(DMLibTestHelper.AppendSnapShotTimeToFileName(fileName, cloudBlob.SnapshotTime)); + this.BuildFileNode(cloudBlob, fileNode); + return fileNode; + } + + private void BuildFileNode(CloudBlob cloudBlob, FileNode fileNode) + { + fileNode.SizeInByte = cloudBlob.Properties.Length; + fileNode.MD5 = cloudBlob.Properties.ContentMD5; + fileNode.ContentType = cloudBlob.Properties.ContentType; + fileNode.CacheControl = cloudBlob.Properties.CacheControl; + fileNode.ContentDisposition = cloudBlob.Properties.ContentDisposition; + fileNode.ContentEncoding = cloudBlob.Properties.ContentEncoding; + fileNode.ContentLanguage = cloudBlob.Properties.ContentLanguage; + fileNode.Metadata = cloudBlob.Metadata; + + DateTimeOffset dateTimeOffset = (DateTimeOffset)cloudBlob.Properties.LastModified; + fileNode.LastModifiedTime = dateTimeOffset.UtcDateTime; + } + } +} diff --git a/test/DMLibTest/Framework/CloudFileDataAdaptor.cs b/test/DMLibTest/Framework/CloudFileDataAdaptor.cs new file mode 100644 index 00000000..b8cf82f2 --- /dev/null +++ b/test/DMLibTest/Framework/CloudFileDataAdaptor.cs @@ -0,0 +1,352 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.IO; + using System.Text; + using System.Text.RegularExpressions; + using Microsoft.WindowsAzure.Storage.File; + using Microsoft.WindowsAzure.Storage.RetryPolicies; + using MS.Test.Common.MsTestLib; + + internal class CloudFileDataAdaptor : DataAdaptor + { + private TestAccount testAccount; + private CloudFileHelper fileHelper; + private string tempFolder; + private readonly string defaultShareName; + private string shareName; + + public override string StorageKey + { + get + { + return this.fileHelper.Account.Credentials.ExportBase64EncodedKey(); + } + } + + public CloudFileDataAdaptor(TestAccount testAccount, string shareName, SourceOrDest sourceOrDest) + { + this.testAccount = testAccount; + this.fileHelper = new CloudFileHelper(testAccount.Account); + this.shareName = shareName; + this.defaultShareName = shareName; + this.tempFolder = Guid.NewGuid().ToString(); + this.SourceOrDest = sourceOrDest; + } + + public string ShareName + { + get + { + return this.shareName; + } + + set + { + this.shareName = value; + } + } + + public override object GetTransferObject(FileNode fileNode) + { + return this.GetCloudFileReference(fileNode); + } + + public override object GetTransferObject(DirNode dirNode) + { + return this.GetCloudFileDirReference(dirNode); + } + + public override string GetAddress(params string[] list) + { + StringBuilder builder = new StringBuilder(); + builder.Append(this.testAccount.GetEndpointBaseUri(EndpointType.File) + "/" + this.shareName + "/"); + + foreach (string token in list) + { + if (!string.IsNullOrEmpty(token)) + { + builder.Append(token); + builder.Append("/"); + } + } + + return builder.ToString(); + } + + public override string GetSecondaryAddress(params string[] list) + { + throw new NotSupportedException("GetSecondaryAddress is not supported in CloudFileDataAdaptor."); + } + + public override void CreateIfNotExists() + { + this.fileHelper.CreateShare(this.shareName); + } + + public override bool Exists() + { + return this.fileHelper.Exists(this.shareName); + } + + public override void WaitForGEO() + { + throw new NotSupportedException("WaitForGEO is not supported in CloudFileDataAdaptor."); + } + + public string MountFileShare() + { + this.fileHelper.CreateShare(this.shareName); + CloudFileShare share = this.fileHelper.FileClient.GetShareReference(this.shareName); + + string cmd = "net"; + string args = string.Format( + "use * {0} {1} /USER:{2}", + string.Format(@"\\{0}\{1}", share.Uri.Host, this.shareName), + this.fileHelper.Account.Credentials.ExportBase64EncodedKey(), + this.fileHelper.Account.Credentials.AccountName); + + string stdout, stderr; + int ret = TestHelper.RunCmd(cmd, args, out stdout, out stderr); + Test.Assert(0 == ret, "mounted to xsmb share successfully"); + Test.Info("stdout={0}, stderr={1}", stdout, stderr); + + Regex r = new Regex(@"Drive (\S+) is now connected to"); + Match m = r.Match(stdout); + if (m.Success) + { + return m.Groups[1].Value; + } + else + { + return null; + } + } + + public void UnmountFileShare(string deviceName) + { + string cmd = "net"; + string args = string.Format("use {0} /DELETE", deviceName); + string stdout, stderr; + int ret = TestHelper.RunCmd(cmd, args, out stdout, out stderr); + Test.Assert(0 == ret, "unmounted {0} successfully", deviceName); + Test.Info("stdout={0}, stderr={1}", stdout, stderr); + } + + public CloudFile GetCloudFileReference(FileNode fileNode) + { + var share = this.fileHelper.FileClient.GetShareReference(this.shareName); + string fileName = fileNode.GetURLRelativePath(); + if (fileName.StartsWith("/")) + { + fileName = fileName.Substring(1, fileName.Length - 1); + } + + return share.GetRootDirectoryReference().GetFileReference(fileName); + } + + public CloudFileDirectory GetCloudFileDirReference(DirNode dirNode) + { + var share = this.fileHelper.FileClient.GetShareReference(this.shareName); + string dirName = dirNode.GetURLRelativePath(); + if (dirName.StartsWith("/")) + { + dirName = dirName.Substring(1, dirName.Length - 1); + } + + return share.GetRootDirectoryReference().GetDirectoryReference(dirName); + } + + protected override void GenerateDataImp(DMLibDataInfo dataInfo) + { + fileHelper.CreateShare(this.shareName); + + using (TemporaryTestFolder localTemp = new TemporaryTestFolder(this.tempFolder)) + { + CloudFileDirectory rootCloudFileDir = this.fileHelper.GetDirReference(this.shareName, dataInfo.RootPath); + this.GenerateDir(dataInfo.RootNode, rootCloudFileDir, this.tempFolder); + } + } + + private void GenerateDir(DirNode dirNode, CloudFileDirectory cloudFileDir, string parentPath) + { + string dirPath = Path.Combine(parentPath, dirNode.Name); + DMLibDataHelper.CreateLocalDirIfNotExists(dirPath); + cloudFileDir.CreateIfNotExists(HelperConst.DefaultFileOptions); + + foreach (var subDir in dirNode.DirNodes) + { + CloudFileDirectory subCloudFileDir = cloudFileDir.GetDirectoryReference(subDir.Name); + this.GenerateDir(subDir, subCloudFileDir, dirPath); + } + + foreach (var file in dirNode.FileNodes) + { + CloudFile cloudFile = cloudFileDir.GetFileReference(file.Name); + this.GenerateFile(file, cloudFile, dirPath); + } + } + + private void CheckFileNode(FileNode fileNode) + { + if (fileNode.LastModifiedTime != null) + { + throw new InvalidOperationException("Can't set LastModifiedTime to cloud file"); + } + + if (fileNode.FileAttr != null) + { + throw new InvalidOperationException("Can't set file attribute to cloud file"); + } + } + + private void GenerateFile(FileNode fileNode, CloudFile cloudFile, string parentPath) + { + this.CheckFileNode(fileNode); + + string tempFileName = Guid.NewGuid().ToString(); + string localFilePath = Path.Combine(parentPath, tempFileName); + DMLibDataHelper.CreateLocalFile(fileNode, localFilePath); + + FileRequestOptions storeMD5Options = new FileRequestOptions() + { + RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(90), 3), + StoreFileContentMD5 = true, + }; + + cloudFile.UploadFromFile(localFilePath, FileMode.Open, options: storeMD5Options); + + if (null != fileNode.MD5 || + null != fileNode.ContentType || + null != fileNode.CacheControl || + null != fileNode.ContentDisposition || + null != fileNode.ContentEncoding || + null != fileNode.ContentLanguage) + { + // set user defined MD5 to cloud file + cloudFile.Properties.ContentMD5 = fileNode.MD5; + cloudFile.Properties.ContentType = fileNode.ContentType; + cloudFile.Properties.CacheControl = fileNode.CacheControl; + cloudFile.Properties.ContentDisposition = fileNode.ContentDisposition; + cloudFile.Properties.ContentEncoding = fileNode.ContentEncoding; + cloudFile.Properties.ContentLanguage = fileNode.ContentLanguage; + cloudFile.SetProperties(options: HelperConst.DefaultFileOptions); + } + + if (null != fileNode.Metadata && fileNode.Metadata.Count > 0) + { + cloudFile.Metadata.Clear(); + foreach (var metaData in fileNode.Metadata) + { + cloudFile.Metadata.Add(metaData); + } + + cloudFile.SetMetadata(options: HelperConst.DefaultFileOptions); + } + + this.BuildFileNode(cloudFile, fileNode); + } + + public override DMLibDataInfo GetTransferDataInfo(string rootDir) + { + CloudFileDirectory fileDir = fileHelper.QueryFileDirectory(this.shareName, rootDir); + if (fileDir == null) + { + return null; + } + + DMLibDataInfo dataInfo = new DMLibDataInfo(rootDir); + + this.BuildDirNode(fileDir, dataInfo.RootNode); + return dataInfo; + } + + public override void Cleanup() + { + this.fileHelper.CleanupShare(this.shareName); + } + + public override void DeleteLocation() + { + this.fileHelper.DeleteShare(this.shareName); + } + + public override void MakePublic() + { + throw new NotSupportedException("MakePublic is not supported in CloudFileDataAdaptor."); + } + + public override void Reset() + { + this.shareName = defaultShareName; + } + + public override string GenerateSAS(SharedAccessPermissions sap, int validatePeriod, string policySignedIdentifier = null) + { + if (null == policySignedIdentifier) + { + if (this.SourceOrDest == SourceOrDest.Dest) + { + this.fileHelper.CreateShare(this.shareName); + } + + return this.fileHelper.GetSASofShare(this.shareName, sap.ToFilePermissions(), validatePeriod, false); + } + else + { + this.fileHelper.CreateShare(this.shareName); + return this.fileHelper.GetSASofShare(this.shareName, sap.ToFilePermissions(), validatePeriod, true, policySignedIdentifier); + } + } + + public override void RevokeSAS() + { + this.fileHelper.ClearSASPolicyofShare(this.shareName); + } + + private void BuildDirNode(CloudFileDirectory cloudDir, DirNode dirNode) + { + foreach (IListFileItem item in cloudDir.ListFilesAndDirectories(HelperConst.DefaultFileOptions)) + { + CloudFile cloudFile = item as CloudFile; + CloudFileDirectory subCloudDir = item as CloudFileDirectory; + + if (cloudFile != null) + { + // Cannot fetch attributes while listing, so do it for each cloud file. + cloudFile.FetchAttributes(options: HelperConst.DefaultFileOptions); + + FileNode fileNode = new FileNode(cloudFile.Name); + this.BuildFileNode(cloudFile, fileNode); + dirNode.AddFileNode(fileNode); + } + else if (subCloudDir != null) + { + DirNode subDirNode = new DirNode(subCloudDir.Name); + this.BuildDirNode(subCloudDir, subDirNode); + dirNode.AddDirNode(subDirNode); + } + } + } + + private void BuildFileNode(CloudFile cloudFile, FileNode fileNode) + { + fileNode.SizeInByte = cloudFile.Properties.Length; + fileNode.MD5 = cloudFile.Properties.ContentMD5; + fileNode.ContentType = cloudFile.Properties.ContentType; + fileNode.CacheControl = cloudFile.Properties.CacheControl; + fileNode.ContentDisposition = cloudFile.Properties.ContentDisposition; + fileNode.ContentEncoding = cloudFile.Properties.ContentEncoding; + fileNode.ContentLanguage = cloudFile.Properties.ContentLanguage; + fileNode.Metadata = cloudFile.Metadata; + + DateTimeOffset dateTimeOffset = (DateTimeOffset)cloudFile.Properties.LastModified; + fileNode.LastModifiedTime = dateTimeOffset.UtcDateTime; + } + } +} diff --git a/test/DMLibTest/Framework/CloudObjectExtensions.cs b/test/DMLibTest/Framework/CloudObjectExtensions.cs new file mode 100644 index 00000000..372f6da6 --- /dev/null +++ b/test/DMLibTest/Framework/CloudObjectExtensions.cs @@ -0,0 +1,79 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using Microsoft.WindowsAzure.Storage.RetryPolicies; + using Microsoft.WindowsAzure.Storage.Table; + + internal static class CloudObjectExtensions + { + public static string GetShortName(this CloudBlob cloudBlob) + { + CloudBlobDirectory parentDir = cloudBlob.Parent; + + if (null == parentDir) + { + // Root directory + return cloudBlob.Name; + } + + return GetShortNameFromUri(cloudBlob.Uri.ToString(), parentDir.Uri.ToString()); + } + + public static string GetShortName(this CloudBlobDirectory cloudBlobDirectory) + { + CloudBlobDirectory parentDir = cloudBlobDirectory.Parent; + + if (null == parentDir) + { + // Root directory + return String.Empty; + } + + return GetShortNameFromUri(cloudBlobDirectory.Uri.ToString(), parentDir.Uri.ToString()); + } + + private static string GetShortNameFromUri(string uri, string parentUri) + { + string delimiter = "/"; + + if (!parentUri.EndsWith(delimiter, StringComparison.Ordinal)) + { + parentUri += delimiter; + } + + string shortName = uri.Substring(parentUri.Length); + + if (shortName.EndsWith(delimiter, StringComparison.Ordinal)) + { + shortName = shortName.Substring(0, shortName.Length - delimiter.Length); + } + + return Uri.UnescapeDataString(shortName); + } + } + + internal static class HelperConst + { + public static BlobRequestOptions DefaultBlobOptions = new BlobRequestOptions + { + RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(90), 3), + }; + + public static FileRequestOptions DefaultFileOptions = new FileRequestOptions + { + RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(90), 3), + }; + + public static TableRequestOptions DefaultTableOptions = new TableRequestOptions + { + RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(90), 3), + }; + } +} diff --git a/test/DMLibTest/Framework/CopyWrapper.cs b/test/DMLibTest/Framework/CopyWrapper.cs new file mode 100644 index 00000000..fda81eee --- /dev/null +++ b/test/DMLibTest/Framework/CopyWrapper.cs @@ -0,0 +1,43 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.DataMovement; + + internal class CopyWrapper : DMLibWrapper + { + public CopyWrapper() + { + } + + protected override Task DoTransferImp(TransferItem item) + { + return this.Copy(item.SourceObject, item.DestObject, item); + } + + private Task Copy(dynamic sourceObject, dynamic destObject, TransferItem item) + { + CopyOptions copyOptions = item.Options as CopyOptions; + TransferContext transferContext = item.TransferContext; + CancellationToken cancellationToken = item.CancellationToken; + + if (cancellationToken != null && cancellationToken != CancellationToken.None) + { + return TransferManager.CopyAsync(sourceObject, destObject, item.IsServiceCopy, copyOptions, transferContext, cancellationToken); + } + else if (transferContext != null || copyOptions != null) + { + return TransferManager.CopyAsync(sourceObject, destObject, item.IsServiceCopy, copyOptions, transferContext); + } + else + { + return TransferManager.CopyAsync(sourceObject, destObject, item.IsServiceCopy); + } + } + } +} diff --git a/test/DMLibTest/Framework/DMLibDataHelper.cs b/test/DMLibTest/Framework/DMLibDataHelper.cs new file mode 100644 index 00000000..c94693d5 --- /dev/null +++ b/test/DMLibTest/Framework/DMLibDataHelper.cs @@ -0,0 +1,472 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.Linq; + using System.Text; + using DMLibTestCodeGen; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using MS.Test.Common.MsTestLib; + + internal static class DMLibDataHelper + { + public static void AddOneFile(DirNode dirNode, string fileName, long fileSizeInKB, FileAttributes? fa = null, DateTime? lmt = null) + { + AddOneFileInBytes(dirNode, fileName, 1024L * fileSizeInKB, fa, lmt); + } + + public static void AddOneFileInBytes(DirNode dirNode, string fileName, long fileSizeInB, FileAttributes? fa = null, DateTime? lmt = null) + { + FileNode fileNode = new FileNode(fileName) + { + SizeInByte = fileSizeInB, + FileAttr = fa, + LastModifiedTime = lmt, + }; + + dirNode.AddFileNode(fileNode); + } + + public static FileNode RemoveOneFile(DirNode dirNode, string fileName) + { + return dirNode.DeleteFileNode(fileName); + } + + public static DirNode RemoveOneDir(DirNode parentNode, string dirNodeToDelete) + { + return parentNode.DeleteDirNode(dirNodeToDelete); + } + + public static void AddMultipleFiles(DirNode dirNode, string filePrefix, int fileNumber, int fileSizeInKB, FileAttributes? fa = null, DateTime? lmt = null) + { + DMLibDataHelper.AddTree(dirNode, string.Empty, filePrefix, fileNumber, 0, fileSizeInKB, fa, lmt); + } + + public static void AddMultipleFilesNormalSize(DirNode dirNode, string filePrefix) + { + int[] fileSizes = new int[] { 0, 1, 4000, 4 * 1024, 10000 }; + AddMultipleFilesDifferentSize(dirNode, filePrefix, fileSizes); + } + + public static void AddMultipleFilesBigSize(DirNode dirNode, string filePrefix) + { + int[] fileSizes = new int[] { 32000, 64 * 1024 }; + AddMultipleFilesDifferentSize(dirNode, filePrefix, fileSizes); + } + + public static void AddMultipleFilesDifferentSize(DirNode dirNode, string filePrefix, int[] fileSizes) + { + for (int i = 0; i < fileSizes.Length; ++i) + { + FileNode fileNode = new FileNode(filePrefix + "_" + i) + { + SizeInByte = fileSizes[i] * 1024 + }; + + dirNode.AddFileNode(fileNode); + } + } + + public static void AddMultipleFilesTotalSize(DirNode dirNode, string filePrefix, int fileNumber, int totalSizeInKB, DateTime? lmt = null) + { + int fileSizeInKB = totalSizeInKB / fileNumber; + fileSizeInKB = fileSizeInKB == 0 ? 1 : fileSizeInKB; + DMLibDataHelper.AddMultipleFiles(dirNode, filePrefix, fileNumber, fileSizeInKB, lmt: lmt); + } + + public static void AddTree(DirNode dirNode, string dirPrefix, string filePrefix, int width, int depth, int fileSizeInKB, FileAttributes? fa = null, DateTime? lmt = null) + { + for (int i = 0; i < width; ++i) + { + string fileName = i == 0 ? filePrefix : filePrefix + "_" + i; + FileNode fileNode = new FileNode(fileName) + { + SizeInByte = 1024L * fileSizeInKB, + FileAttr = fa, + LastModifiedTime = lmt, + }; + + dirNode.AddFileNode(fileNode); + } + + if (depth > 0) + { + for (int i = 0; i < width; ++i) + { + string dirName = i == 0 ? dirPrefix : dirPrefix + "_" + i; + DirNode subDirNode = dirNode.GetDirNode(dirName); + if (subDirNode == null) + { + subDirNode = new DirNode(dirName); + dirNode.AddDirNode(subDirNode); + } + + DMLibDataHelper.AddTree(subDirNode, dirPrefix, filePrefix, width, depth - 1, fileSizeInKB, fa, lmt: lmt); + } + } + } + + public static void AddTreeTotalSize(DirNode dirNode, string dirPrefix, string filePrefix, int width, int depth, int totalSizeInKB, DateTime? lmt = null) + { + int fileNumber; + if (width <= 1) + { + fileNumber = (depth + 1) * width; + } + else + { + int widthPowDepth = width; + for (int i = 0; i < depth; ++i) + { + widthPowDepth *= width; + } + + fileNumber = width * (widthPowDepth - 1) / (width - 1); + } + + int fileSizeInKB = totalSizeInKB / fileNumber; + fileSizeInKB = fileSizeInKB == 0 ? 1 : fileSizeInKB; + + DMLibDataHelper.AddTree(dirNode, dirPrefix, filePrefix, width, depth, fileSizeInKB, lmt: lmt); + } + + public static void CreateLocalDirIfNotExists(string dirPath) + { + if (!String.Equals(string.Empty, dirPath) && !Directory.Exists(dirPath)) + { + Directory.CreateDirectory(dirPath); + } + } + + public static void CreateLocalFile(FileNode fileNode, string filePath) + { + Helper.GenerateFileInBytes(filePath, fileNode.SizeInByte); + fileNode.AbsolutePath = filePath; + + if (fileNode.LastModifiedTime != null) + { + // Set last modified time + FileInfo fileInfo = new FileInfo(filePath); + fileInfo.LastWriteTimeUtc = (DateTime)fileNode.LastModifiedTime; + } + + if (fileNode.FileAttr != null) + { + // remove default file attribute + FileOp.RemoveFileAttribute(filePath, FileAttributes.Archive); + + // Set file Attributes + FileOp.SetFileAttribute(filePath, (FileAttributes)fileNode.FileAttr); + Test.Info("{0} attr is {1}", filePath, File.GetAttributes(filePath).ToString()); + } + } + + public static FileNode GetFileNode(DirNode dirNode, params string[] tokens) + { + DirNode currentDirNode = dirNode; + + for (int i = 0; i < tokens.Length; ++i) + { + if (i == tokens.Length - 1) + { + FileNode fileNode = currentDirNode.GetFileNode(tokens[i]); + if (fileNode == null) + { + Test.Error("FileNode {0} doesn't exist.", tokens[i]); + return null; + } + + return fileNode; + } + else + { + currentDirNode = currentDirNode.GetDirNode(tokens[i]); + if (currentDirNode == null) + { + Test.Error("DirNode {0} doesn't exist.", tokens[i]); + return null; + } + } + } + + return null; + } + + public static void RemoveAllFileNodesExcept(DirNode rootNode, HashSet except) + { + List nodesToRemove = new List(); + foreach (FileNode fileNode in rootNode.EnumerateFileNodesRecursively()) + { + if (!except.Contains(fileNode)) + { + nodesToRemove.Add(fileNode); + } + } + + foreach(FileNode nodeToRemove in nodesToRemove) + { + nodeToRemove.Parent.DeleteFileNode(nodeToRemove.Name); + } + } + + public static string DetailedInfo(this DMLibDataInfo dataInfo) + { + StringBuilder builder = new StringBuilder(); + builder.AppendLine(string.Format("TransferDataInfo root: {0}", dataInfo.RootPath)); + + foreach (FileNode fileNode in dataInfo.EnumerateFileNodes()) + { + builder.AppendLine(fileNode.DetailedInfo()); + } + + return builder.ToString(); + } + + public static string DetailedInfo(this FileNode fileNode) + { + StringBuilder builder = new StringBuilder(); + builder.AppendFormat("FileNode {0}: MD5 ({1}), LMT ({2})", fileNode.GetURLRelativePath(), fileNode.MD5, fileNode.LastModifiedTime); + + return builder.ToString(); + } + + public static bool Equals(DMLibDataInfo infoA, DMLibDataInfo infoB) + { + bool result; + + bool aIsEmpty = infoA == null || infoA.RootNode.IsEmpty; + bool bIsEmpty = infoB == null || infoB.RootNode.IsEmpty; + + if (aIsEmpty && bIsEmpty) + { + result = true; + } + else if(aIsEmpty || bIsEmpty) + { + result = false; + } + else + { + result = Equals(infoA.RootNode, infoB.RootNode); + } + + if (!result) + { + Test.Info("-----Data Info A-----"); + MultiDirectionTestHelper.PrintTransferDataInfo(infoA); + + Test.Info("-----Data Info B-----"); + MultiDirectionTestHelper.PrintTransferDataInfo(infoB); + } + + return result; + } + + public static bool Equals(DirNode dirNodeA, DirNode dirNodeB) + { + // The same node + if (dirNodeA == dirNodeB) + { + return true; + } + + // Empty node equals to null + if ((dirNodeA == null || dirNodeA.IsEmpty) && + (dirNodeB == null || dirNodeB.IsEmpty)) + { + return true; + } + + // Compare two nodes + if (null != dirNodeA && null != dirNodeB) + { + if (dirNodeA.FileNodeCount != dirNodeB.FileNodeCount || + dirNodeA.NonEmptyDirNodeCount != dirNodeB.NonEmptyDirNodeCount) + { + return false; + } + + foreach(FileNode fileNodeA in dirNodeA.FileNodes) + { + FileNode fileNodeB = dirNodeB.GetFileNode(fileNodeA.Name); + + if (!DMLibDataHelper.Equals(fileNodeA, fileNodeB)) + { + return false; + } + } + + foreach(DirNode subDirNodeA in dirNodeA.DirNodes) + { + DirNode subDirNodeB = dirNodeB.GetDirNode(subDirNodeA.Name); + if (!DMLibDataHelper.Equals(subDirNodeA, subDirNodeB)) + { + return false; + } + } + + return true; + } + + return false; + } + + public static bool Equals(FileNode fileNodeA, FileNode fileNodeB) + { + if (fileNodeA == fileNodeB) + { + return true; + } + + if (null != fileNodeA && null != fileNodeB) + { + Test.Info(string.Format("Verify file: ({0},{1}); ({2},{3})", fileNodeA.Name, fileNodeA.MD5, fileNodeB.Name, fileNodeB.MD5)); + + if (!string.Equals(fileNodeA.Name, fileNodeB.Name, StringComparison.Ordinal) || + !PropertiesStringEquals(fileNodeA.MD5, fileNodeB.MD5) || + !PropertiesStringEquals(fileNodeA.CacheControl, fileNodeB.CacheControl) || + !PropertiesStringEquals(fileNodeA.ContentDisposition, fileNodeB.ContentDisposition) || + !PropertiesStringEquals(fileNodeA.ContentEncoding, fileNodeB.ContentEncoding) || + !PropertiesStringEquals(fileNodeA.ContentLanguage, fileNodeB.ContentLanguage)) + { + return false; + } + + if (!MetadataEquals(fileNodeA.Metadata, fileNodeB.Metadata)) + { + return false; + } + + foreach (var keyValuePair in fileNodeA.Metadata) + { + if (!fileNodeB.Metadata.Contains(keyValuePair)) + { + return false; + } + } + + return true; + } + + string name; + if (fileNodeA != null) + { + name = fileNodeA.Name; + } + else + { + name = fileNodeB.Name; + } + + Test.Info("Fail to verify file: {0}", name); + return false; + } + + private static bool MetadataEquals(IDictionary metadataA, IDictionary metadataB) + { + if (metadataA == metadataB) + { + return true; + } + + if (metadataA == null || metadataB == null) + { + return false; + } + + if (metadataA.Count != metadataB.Count) + { + return false; + } + + foreach (var keyValuePair in metadataB) + { + if (!metadataB.Contains(keyValuePair)) + { + return false; + } + } + + return true; + } + + private static bool PropertiesStringEquals(string valueA, string ValueB) + { + if (string.IsNullOrEmpty(valueA)) + { + if (string.IsNullOrEmpty(ValueB)) + { + return true; + } + + return false; + } + + return string.Equals(valueA, ValueB, StringComparison.Ordinal); + } + + public static string GetLocalRelativePath(this DataInfoNode node) + { + return Path.Combine(node.PathComponents.ToArray()); + } + + public static string GetURLRelativePath(this DataInfoNode node) + { + return String.Join("/", node.PathComponents); + } + + public static string GetSourceRelativePath(this DataInfoNode node) + { + if (DMLibTestContext.SourceType == DMLibDataType.Local) + { + return node.GetLocalRelativePath(); + } + else + { + return node.GetURLRelativePath(); + } + } + + public static string GetDestRelativePath(this DataInfoNode node) + { + if (DMLibTestContext.DestType == DMLibDataType.Local) + { + return node.GetLocalRelativePath(); + } + else + { + return node.GetURLRelativePath(); + } + } + + public static void SetCalculatedFileMD5(DMLibDataInfo dataInfo, DataAdaptor destAdaptor, bool disableMD5Check = false) + { + foreach (FileNode fileNode in dataInfo.EnumerateFileNodes()) + { + if (DMLibTestBase.IsCloudBlob(DMLibTestContext.DestType)) + { + CloudBlobDataAdaptor cloudBlobDataAdaptor = destAdaptor as CloudBlobDataAdaptor; + CloudBlob cloudBlob = cloudBlobDataAdaptor.GetCloudBlobReference(fileNode); + + fileNode.MD5 = CloudBlobHelper.CalculateMD5ByDownloading(cloudBlob, disableMD5Check); + } + else if (DMLibTestContext.DestType == DMLibDataType.CloudFile) + { + CloudFileDataAdaptor cloudFileDataAdaptor = destAdaptor as CloudFileDataAdaptor; + CloudFile cloudFile = cloudFileDataAdaptor.GetCloudFileReference(fileNode); + + fileNode.MD5 = CloudFileHelper.CalculateMD5ByDownloading(cloudFile, disableMD5Check); + } + + // No need to set md5 for local destination + } + } + } +} diff --git a/test/DMLibTest/Framework/DMLibDataInfo.cs b/test/DMLibTest/Framework/DMLibDataInfo.cs new file mode 100644 index 00000000..7b339c6a --- /dev/null +++ b/test/DMLibTest/Framework/DMLibDataInfo.cs @@ -0,0 +1,441 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.Text; + + public class DMLibDataInfo : IDataInfo + { + public DMLibDataInfo(string rootPath) + { + this.RootPath = rootPath; + this.RootNode = new DirNode(string.Empty); + } + + public int FileCount + { + get + { + return this.RootNode.FileNodeCountRecursive; + } + } + + public string RootPath + { + get; + set; + } + + public DirNode RootNode + { + get; + set; + } + + public IEnumerable EnumerateFileNodes() + { + return this.RootNode.EnumerateFileNodesRecursively(); + } + + IDataInfo IDataInfo.Clone() + { + return this.Clone(); + } + + public DMLibDataInfo Clone() + { + return new DMLibDataInfo(this.RootPath) + { + RootNode = this.RootNode.Clone(), + }; + } + + public override string ToString() + { + return this.DetailedInfo(); + } + } + + public class DataInfoNode + { + public string Name + { + get; + set; + } + + public DirNode Parent + { + get; + set; + } + + public IEnumerable PathComponents + { + get + { + if (this.Parent != null) + { + foreach (string component in Parent.PathComponents) + { + yield return component; + } + + yield return this.Name; + } + } + } + } + + public class FileNode : DataInfoNode, IComparable + { + public FileNode(string name) + { + this.Name = name; + } + + public int SnapshotsCount + { + get; + set; + } + + public string MD5 + { + get; + set; + } + + public string CacheControl + { + get; + set; + } + + public string ContentDisposition + { + get; + set; + } + + public string ContentEncoding + { + get; + set; + } + + public string ContentLanguage + { + get; + set; + } + + public IDictionary Metadata + { + get; + set; + } + + public string ContentType + { + get; + set; + } + + public DateTime? LastModifiedTime + { + get; + set; + } + + public long SizeInByte + { + get; + set; + } + + public FileAttributes? FileAttr + { + get; + set; + } + + public string AbsolutePath + { + get; + set; + } + + public int CompareTo(FileNode other) + { + return string.Compare(this.Name, other.Name, StringComparison.OrdinalIgnoreCase); + } + + public FileNode Clone(string name = null) + { + // Clone metadata + Dictionary cloneMetaData = null; + if (this.Metadata != null) + { + cloneMetaData = new Dictionary(this.Metadata); + } + + return new FileNode(name ?? this.Name) + { + SnapshotsCount = this.SnapshotsCount, + CacheControl = this.CacheControl, + ContentDisposition = this.ContentDisposition, + ContentEncoding = this.ContentEncoding, + ContentLanguage = this.ContentLanguage, + ContentType = this.ContentType, + MD5 = this.MD5, + Metadata = cloneMetaData, + LastModifiedTime = this.LastModifiedTime, + SizeInByte = this.SizeInByte, + FileAttr = this.FileAttr, + AbsolutePath = this.AbsolutePath, + }; + } + } + + public class DirNode : DataInfoNode, IComparable + { + private Dictionary dirNodeMap; + private Dictionary fileNodeMap; + + public DirNode(string name) + { + this.Name = name; + this.dirNodeMap = new Dictionary(); + this.fileNodeMap = new Dictionary(); + } + + public int FileNodeCountRecursive + { + get + { + int totalCount = this.FileNodeCount; + foreach (DirNode subDirNode in this.DirNodes) + { + totalCount += subDirNode.FileNodeCountRecursive; + } + + return totalCount; + } + } + + public int FileNodeCount + { + get + { + return fileNodeMap.Count; + } + } + + public int DirNodeCount + { + get + { + return dirNodeMap.Count; + } + } + + public int NonEmptyDirNodeCount + { + get + { + int count = 0; + foreach(DirNode subDirNode in dirNodeMap.Values) + { + if (!subDirNode.IsEmpty) + { + count++; + } + } + + return count; + } + } + + public bool IsEmpty + { + get + { + if (this.FileNodeCount != 0) + { + return false; + } + + foreach(DirNode subDirNode in dirNodeMap.Values) + { + if (!subDirNode.IsEmpty) + { + return false; + } + } + + return true; + } + } + + public IEnumerable DirNodes + { + get + { + return dirNodeMap.Values; + } + } + + public IEnumerable FileNodes + { + get + { + return fileNodeMap.Values; + } + } + + public int CompareTo(DirNode other) + { + return string.Compare(this.Name, other.Name, StringComparison.OrdinalIgnoreCase); + } + + public FileNode GetFileNode(string name) + { + FileNode result = null; + if (this.fileNodeMap.TryGetValue(name, out result)) + { + return result; + } + + return null; + } + + public DirNode GetDirNode(string name) + { + DirNode result = null; + if (this.dirNodeMap.TryGetValue(name, out result)) + { + return result; + } + + return null; + } + + public void AddDirNode(DirNode dirNode) + { + dirNode.Parent = this; + this.dirNodeMap.Add(dirNode.Name, dirNode); + } + + public void AddFileNode(FileNode fileNode) + { + fileNode.Parent = this; + this.fileNodeMap.Add(fileNode.Name, fileNode); + } + + public FileNode DeleteFileNode(string name) + { + FileNode fn = null; + if (this.fileNodeMap.ContainsKey(name)) + { + fn = this.fileNodeMap[name]; + fn.Parent = null; + this.fileNodeMap.Remove(name); + } + + return fn; + } + + public DirNode DeleteDirNode(string name) + { + DirNode dn = null; + if (this.dirNodeMap.ContainsKey(name)) + { + dn = this.dirNodeMap[name]; + this.dirNodeMap.Remove(name); + } + + return dn; + } + + public DirNode Clone() + { + DirNode newDirNode = new DirNode(this.Name); + + foreach(FileNode fileNode in this.FileNodes) + { + newDirNode.AddFileNode(fileNode.Clone()); + } + + foreach(DirNode dirNode in this.DirNodes) + { + newDirNode.AddDirNode(dirNode.Clone()); + } + + return newDirNode; + } + + public IEnumerable EnumerateFileNodesRecursively() + { + foreach (var fileNode in this.FileNodes) + { + yield return fileNode; + } + + foreach (DirNode subDirNode in this.DirNodes) + { + foreach (var fileNode in subDirNode.EnumerateFileNodesRecursively()) + { + yield return fileNode; + } + } + } + + public IEnumerable EnumerateDirNodesRecursively() + { + foreach (DirNode subDirNode in this.DirNodes) + { + foreach (var dirNode in subDirNode.EnumerateDirNodesRecursively()) + { + yield return dirNode; + } + + yield return subDirNode; + } + } + + /// + /// for debug use, show DataInfo in tree format + /// + public void Display(int level) + { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < level; ++i) + sb.Append("--"); + sb.Append(this.Name); + Console.WriteLine(sb.ToString()); + + foreach (FileNode fn in fileNodeMap.Values) + { + StringBuilder fileNode = new StringBuilder(); + for (int i = 0; i < level + 1; ++i) + { + fileNode.Append("--"); + } + fileNode.Append(fn.Name); + Console.WriteLine(fileNode.ToString()); + } + + foreach (DirNode dn in dirNodeMap.Values) + { + dn.Display(level + 1); + } + } + } +} diff --git a/test/DMLibTest/Framework/DMLibInputHelper.cs b/test/DMLibTest/Framework/DMLibInputHelper.cs new file mode 100644 index 00000000..89e6a3d8 --- /dev/null +++ b/test/DMLibTest/Framework/DMLibInputHelper.cs @@ -0,0 +1,31 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + public static class DMLibInputHelper + { + public static OverwriteCallback GetDefaultOverwiteCallbackY() + { + return (sourcePath, destPath) => + { + Test.Info("Overwrite true: {0} -> {1}", sourcePath, destPath); + return true; + }; + } + + public static OverwriteCallback GetDefaultOverwiteCallbackN() + { + return (sourcePath, destPath) => + { + Test.Info("Overwrite false: {0} -> {1}", sourcePath, destPath); + return false; + }; + } + } +} diff --git a/test/DMLibTest/Framework/DMLibTestBase.cs b/test/DMLibTest/Framework/DMLibTestBase.cs new file mode 100644 index 00000000..fd519a87 --- /dev/null +++ b/test/DMLibTest/Framework/DMLibTestBase.cs @@ -0,0 +1,357 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + public class DMLibTestBase : MultiDirectionTestBase + { + private static Dictionary sourceConnectionStrings = new Dictionary(); + private static Dictionary destConnectionStrings = new Dictionary(); + + public const string FolderName = "folder"; + public const string FileName = "testfile"; + public const string DirName = "testdir"; + + public static int FileSizeInKB + { + get; + set; + } + + public static void SetSourceConnectionString(string value, DMLibDataType dataType) + { + string key = DMLibTestBase.GetLocationKey(dataType); + sourceConnectionStrings[key] = value; + } + + public static void SetDestConnectionString(string value, DMLibDataType dataType) + { + string key = DMLibTestBase.GetLocationKey(dataType); + destConnectionStrings[key] = value; + } + + public static string GetSourceConnectionString(DMLibDataType dataType) + { + return GetConnectionString(SourceOrDest.Source, dataType); + } + + public static string GetDestConnectionString(DMLibDataType dataType) + { + return GetConnectionString(SourceOrDest.Dest, dataType); + } + + private static string GetConnectionString(SourceOrDest sourceOrDest, DMLibDataType dataType) + { + IDictionary connectionStrings = SourceOrDest.Source == sourceOrDest ? sourceConnectionStrings : destConnectionStrings; + string key = DMLibTestBase.GetLocationKey(dataType); + + string connectionString; + if (connectionStrings.TryGetValue(key, out connectionString)) + { + return connectionString; + } + + if (SourceOrDest.Dest == sourceOrDest) + { + return TestAccounts.Secondary.ConnectionString; + } + else + { + return TestAccounts.Primary.ConnectionString; + } + } + + public static new void BaseClassInitialize(TestContext testContext) + { + MultiDirectionTestBase.BaseClassInitialize(testContext); + FileSizeInKB = int.Parse(Test.Data.Get("FileSize")); + DMLibTestBase.InitializeDataAdaptor(); + } + + private static void InitializeDataAdaptor() + { + var srcBlobTestAccount = new TestAccount(GetSourceConnectionString(DMLibDataType.CloudBlob)); + var destBlobTestAccount = new TestAccount(GetDestConnectionString(DMLibDataType.CloudBlob)); + + var srcFileTestAccount = new TestAccount(GetSourceConnectionString(DMLibDataType.CloudFile)); + var destFileTestAccount = new TestAccount(GetDestConnectionString(DMLibDataType.CloudFile)); + + // Initialize data adaptor for normal location + SetSourceAdaptor(DMLibDataType.Local, new LocalDataAdaptor(DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix(), SourceOrDest.Source)); + SetSourceAdaptor(DMLibDataType.Stream, new LocalDataAdaptor(DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix(), SourceOrDest.Source, useStream: true)); + SetSourceAdaptor(DMLibDataType.URI, new URIBlobDataAdaptor(srcBlobTestAccount, DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix())); + SetSourceAdaptor(DMLibDataType.BlockBlob, new CloudBlobDataAdaptor(srcBlobTestAccount, DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix(), BlobType.Block, SourceOrDest.Source)); + SetSourceAdaptor(DMLibDataType.PageBlob, new CloudBlobDataAdaptor(srcBlobTestAccount, DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix(), BlobType.Page, SourceOrDest.Source)); + SetSourceAdaptor(DMLibDataType.AppendBlob, new CloudBlobDataAdaptor(srcBlobTestAccount, DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix(), BlobType.Append, SourceOrDest.Source)); + SetSourceAdaptor(DMLibDataType.CloudFile, new CloudFileDataAdaptor(srcFileTestAccount, DMLibTestBase.SourceRoot + DMLibTestHelper.RandomNameSuffix(), SourceOrDest.Source)); + + SetDestAdaptor(DMLibDataType.Local, new LocalDataAdaptor(DMLibTestBase.DestRoot + DMLibTestHelper.RandomNameSuffix(), SourceOrDest.Dest)); + SetDestAdaptor(DMLibDataType.Stream, new LocalDataAdaptor(DMLibTestBase.DestRoot + DMLibTestHelper.RandomNameSuffix(), SourceOrDest.Dest, useStream: true)); + SetDestAdaptor(DMLibDataType.BlockBlob, new CloudBlobDataAdaptor(destBlobTestAccount, DMLibTestBase.DestRoot + DMLibTestHelper.RandomNameSuffix(), BlobType.Block, SourceOrDest.Dest)); + SetDestAdaptor(DMLibDataType.PageBlob, new CloudBlobDataAdaptor(destBlobTestAccount, DMLibTestBase.DestRoot + DMLibTestHelper.RandomNameSuffix(), BlobType.Page, SourceOrDest.Dest)); + SetDestAdaptor(DMLibDataType.AppendBlob, new CloudBlobDataAdaptor(destBlobTestAccount, DMLibTestBase.DestRoot + DMLibTestHelper.RandomNameSuffix(), BlobType.Append, SourceOrDest.Dest)); + SetDestAdaptor(DMLibDataType.CloudFile, new CloudFileDataAdaptor(destFileTestAccount, DMLibTestBase.DestRoot + DMLibTestHelper.RandomNameSuffix(), SourceOrDest.Dest)); + } + + public TestResult ExecuteTestCase(DMLibDataInfo sourceDataInfo, TestExecutionOptions options) + { + this.CleanupData(); + SourceAdaptor.CreateIfNotExists(); + DestAdaptor.CreateIfNotExists(); + + if (sourceDataInfo != null) + { + SourceAdaptor.GenerateData(sourceDataInfo); + } + + if (options.DestTransferDataInfo != null) + { + DestAdaptor.GenerateData(options.DestTransferDataInfo); + } + + if (options.AfterDataPrepared != null) + { + options.AfterDataPrepared(); + } + + List allItems = new List(); + foreach(var fileNode in sourceDataInfo.EnumerateFileNodes()) + { + TransferItem item = new TransferItem() + { + SourceObject = SourceAdaptor.GetTransferObject(fileNode), + DestObject = DestAdaptor.GetTransferObject(fileNode), + SourceType = DMLibTestContext.SourceType, + DestType = DMLibTestContext.DestType, + IsServiceCopy = DMLibTestContext.IsAsync, + }; + + if (options.TransferItemModifier != null) + { + options.TransferItemModifier(fileNode, item); + } + + allItems.Add(item); + } + + return this.RunTransferItems(allItems, options); + } + + public TestResult RunTransferItems(IEnumerable items, TestExecutionOptions options) + { + List allTasks = new List(); + var testResult = new TestResult(); + + try + { + foreach (TransferItem item in items) + { + DMLibWrapper wrapper = GetDMLibWrapper(item.SourceType, item.DestType, DMLibTestContext.IsAsync); + + if (item.BeforeStarted != null) + { + item.BeforeStarted(); + } + + try + { + if (options.LimitSpeed) + { + OperationContext.GlobalSendingRequest += this.LimitSpeed; + TransferManager.Configurations.ParallelOperations = DMLibTestConstants.LimitedSpeedNC; + } + + allTasks.Add(wrapper.DoTransfer(item)); + } + catch (Exception e) + { + testResult.AddException(e); + } + + if (item.AfterStarted != null) + { + item.AfterStarted(); + } + } + + if (options.AfterAllItemAdded != null) + { + options.AfterAllItemAdded(); + } + + try + { + Task.WaitAll(allTasks.ToArray(), options.TimeoutInMs); + } + catch (Exception e) + { + AggregateException ae = e as AggregateException; + if (ae != null) + { + ae = ae.Flatten(); + foreach (var innerE in ae.InnerExceptions) + { + testResult.AddException(innerE); + } + } + else + { + testResult.AddException(e); + } + } + } + finally + { + if (options.LimitSpeed) + { + OperationContext.GlobalSendingRequest -= this.LimitSpeed; + TransferManager.Configurations.ParallelOperations = DMLibTestConstants.DefaultNC; + } + } + + Parallel.ForEach(items, currentItem => currentItem.CloseStreamIfNecessary()); + + if (!options.DisableDestinationFetch) + { + testResult.DataInfo = DestAdaptor.GetTransferDataInfo(string.Empty); + } + + foreach (var exception in testResult.Exceptions) + { + Test.Info("Exception from DMLib: {0}", exception.ToString()); + } + + return testResult; + } + + public DMLibWrapper GetDMLibWrapper(DMLibDataType sourceType, DMLibDataType destType, bool isServiceCopy) + { + if (DMLibTestBase.IsLocal(sourceType)) + { + return new UploadWrapper(); + } + else if (DMLibTestBase.IsLocal(destType)) + { + return new DownloadWrapper(); + } + else + { + return new CopyWrapper(); + } + } + + public static object DefaultTransferOptions + { + get + { + return DMLibTestBase.GetDefaultTransferOptions(DMLibTestContext.SourceType, DMLibTestContext.DestType); + } + } + + public static object GetDefaultTransferOptions(DMLibDataType sourceType, DMLibDataType destType) + { + if (DMLibTestBase.IsLocal(sourceType)) + { + return new UploadOptions(); + } + else if (DMLibTestBase.IsLocal(destType)) + { + return new DownloadOptions(); + } + else + { + return new CopyOptions(); + } + } + + public static string MapBlobDataTypeToBlobType(DMLibDataType blobDataType) + { + switch (blobDataType) + { + case DMLibDataType.BlockBlob: + return BlobType.Block; + case DMLibDataType.PageBlob: + return BlobType.Page; + case DMLibDataType.AppendBlob: + return BlobType.Append; + default: + throw new ArgumentException("blobDataType"); + } + } + + private void LimitSpeed(object sender, RequestEventArgs e) + { + Thread.Sleep(100); + } + + public DMLibDataInfo GenerateSourceDataInfo(FileNumOption fileNumOption, string folderName = "") + { + return this.GenerateSourceDataInfo(fileNumOption, DMLibTestBase.FileSizeInKB, folderName); + } + + public DMLibDataInfo GenerateSourceDataInfo(FileNumOption fileNumOption, int totalSizeInKB, string folderName = "") + { + DMLibDataInfo sourceDataInfo = new DMLibDataInfo(folderName); + + if (fileNumOption == FileNumOption.FileTree) + { + DMLibDataHelper.AddTreeTotalSize( + sourceDataInfo.RootNode, + DMLibTestBase.DirName, + DMLibTestBase.FileName, + DMLibTestConstants.RecursiveFolderWidth, + DMLibTestConstants.RecursiveFolderDepth, + totalSizeInKB); + } + else if (fileNumOption == FileNumOption.FlatFolder) + { + DMLibDataHelper.AddMultipleFilesTotalSize( + sourceDataInfo.RootNode, + DMLibTestBase.FileName, + DMLibTestConstants.FlatFileCount, + totalSizeInKB); + } + else if (fileNumOption == FileNumOption.OneFile) + { + DMLibDataHelper.AddOneFile(sourceDataInfo.RootNode, DMLibTestBase.FileName, totalSizeInKB); + } + + return sourceDataInfo; + } + + public enum FileNumOption + { + OneFile, + FlatFolder, + FileTree, + } + + public override bool IsCloudService(DMLibDataType dataType) + { + return DMLibDataType.Cloud.HasFlag(dataType); + } + + public static bool IsLocal(DMLibDataType dataType) + { + return dataType == DMLibDataType.Stream || dataType == DMLibDataType.Local; + } + + public static bool IsCloudBlob(DMLibDataType dataType) + { + return DMLibDataType.CloudBlob.HasFlag(dataType); + } + } +} diff --git a/test/DMLibTest/Framework/DMLibWrapper.cs b/test/DMLibTest/Framework/DMLibWrapper.cs new file mode 100644 index 00000000..2d4b650f --- /dev/null +++ b/test/DMLibTest/Framework/DMLibWrapper.cs @@ -0,0 +1,21 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System.Threading.Tasks; + using MS.Test.Common.MsTestLib; + + public abstract class DMLibWrapper + { + public Task DoTransfer(TransferItem item) + { + Test.Info("Do transfer: {0}", item.ToString()); + return this.DoTransferImp(item); + } + + protected abstract Task DoTransferImp(TransferItem item); + } +} diff --git a/test/DMLibTest/Framework/DataAdaptor.cs b/test/DMLibTest/Framework/DataAdaptor.cs new file mode 100644 index 00000000..72ef546f --- /dev/null +++ b/test/DMLibTest/Framework/DataAdaptor.cs @@ -0,0 +1,65 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + public abstract class DataAdaptor where TDataInfo : IDataInfo + { + public abstract string StorageKey + { + get; + } + + public SourceOrDest SourceOrDest + { + get; + protected set; + } + + public abstract string GetAddress(params string[] list); + + public abstract string GetSecondaryAddress(params string[] list); + + public abstract object GetTransferObject(FileNode fileNode); + + public abstract object GetTransferObject(DirNode dirNode); + + public abstract void CreateIfNotExists(); + + public abstract bool Exists(); + + public abstract void WaitForGEO(); + + public void GenerateData(TDataInfo dataInfo) + { + this.GenerateDataImp(dataInfo); + + if (SourceOrDest.Source == this.SourceOrDest) + { + MultiDirectionTestInfo.GeneratedSourceDataInfos.Add(dataInfo == null ? dataInfo : dataInfo.Clone()); + } + else + { + MultiDirectionTestInfo.GeneratedDestDataInfos.Add(dataInfo == null ? dataInfo : dataInfo.Clone()); + } + } + + public abstract TDataInfo GetTransferDataInfo(string rootDir); + + public abstract void Cleanup(); + + public abstract void DeleteLocation(); + + public abstract string GenerateSAS(SharedAccessPermissions sap, int validatePeriod, string policySignedIdentifier = null); + + public abstract void RevokeSAS(); + + public abstract void MakePublic(); + + public abstract void Reset(); + + protected abstract void GenerateDataImp(TDataInfo dataInfo); + } +} diff --git a/test/DMLibTest/Framework/DownloadWrapper.cs b/test/DMLibTest/Framework/DownloadWrapper.cs new file mode 100644 index 00000000..e10becdf --- /dev/null +++ b/test/DMLibTest/Framework/DownloadWrapper.cs @@ -0,0 +1,68 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System.IO; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.DataMovement; + + internal class DownloadWrapper : DMLibWrapper + { + public DownloadWrapper() + { + + } + + protected override Task DoTransferImp(TransferItem item) + { + return this.Download(item.SourceObject, item); + } + + private Task Download(dynamic sourceObject, TransferItem item) + { + DownloadOptions downloadOptions = item.Options as DownloadOptions; + TransferContext transferContext = item.TransferContext; + CancellationToken cancellationToken = item.CancellationToken; + string destPath = item.DestObject as string; + Stream destStream = item.DestObject as Stream; + + if (cancellationToken != null && cancellationToken != CancellationToken.None) + { + if (destPath != null) + { + return TransferManager.DownloadAsync(sourceObject, destPath, downloadOptions, transferContext, cancellationToken); + } + else + { + return TransferManager.DownloadAsync(sourceObject, destStream, downloadOptions, transferContext, cancellationToken); + } + } + else if (transferContext != null || downloadOptions != null) + { + if (destPath != null) + { + return TransferManager.DownloadAsync(sourceObject, destPath, downloadOptions, transferContext); + } + else + { + return TransferManager.DownloadAsync(sourceObject, destStream, downloadOptions, transferContext); + } + } + else + { + if (destPath != null) + { + return TransferManager.DownloadAsync(sourceObject, destPath); + } + else + { + return TransferManager.DownloadAsync(sourceObject, destStream); + } + } + } + } +} diff --git a/test/DMLibTest/Framework/IDataInfo.cs b/test/DMLibTest/Framework/IDataInfo.cs new file mode 100644 index 00000000..9d141b28 --- /dev/null +++ b/test/DMLibTest/Framework/IDataInfo.cs @@ -0,0 +1,14 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + public interface IDataInfo + { + string ToString(); + + IDataInfo Clone(); + } +} diff --git a/test/DMLibTest/Framework/LocalDataAdaptor.cs b/test/DMLibTest/Framework/LocalDataAdaptor.cs new file mode 100644 index 00000000..c24a3f76 --- /dev/null +++ b/test/DMLibTest/Framework/LocalDataAdaptor.cs @@ -0,0 +1,163 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.IO; + + internal class LocalDataAdaptor : LocalDataAdaptorBase + { + private bool useStream; + + public LocalDataAdaptor(string basePath, SourceOrDest sourceOrDest, bool useStream = false) + : base(basePath, sourceOrDest) + { + this.useStream = useStream; + } + + public override object GetTransferObject(FileNode fileNode) + { + string filePath = Path.Combine(this.BasePath, fileNode.GetLocalRelativePath()); + + if (this.useStream) + { + if (SourceOrDest.Source == this.SourceOrDest) + { + return new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); + } + else + { + return new FileStream(filePath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); + } + } + else + { + return filePath; + } + } + + public override object GetTransferObject(DirNode dirNode) + { + if (this.useStream) + { + throw new InvalidOperationException("Can't get directory transfer object in stream data adaptor."); + } + + return Path.Combine(this.BasePath, dirNode.GetLocalRelativePath()); + } + + protected override void GenerateDataImp(DMLibDataInfo dataInfo) + { + this.GenerateDir(dataInfo.RootNode, Path.Combine(this.BasePath, dataInfo.RootPath)); + } + + public override DMLibDataInfo GetTransferDataInfo(string rootDir) + { + DirectoryInfo rootDirInfo = new DirectoryInfo(Path.Combine(this.BasePath, rootDir)); + if (!rootDirInfo.Exists) + { + return null; + } + + DMLibDataInfo dataInfo = new DMLibDataInfo(rootDir); + this.BuildDirNode(rootDirInfo, dataInfo.RootNode); + + return dataInfo; + } + + private void GenerateDir(DirNode dirNode, string parentPath) + { + string dirPath = Path.Combine(parentPath, dirNode.Name); + DMLibDataHelper.CreateLocalDirIfNotExists(dirPath); + + foreach (var subDir in dirNode.DirNodes) + { + GenerateDir(subDir, dirPath); + } + + foreach (var file in dirNode.FileNodes) + { + GenerateFile(file, dirPath); + } + } + + private void CheckFileNode(FileNode fileNode) + { + if (fileNode.MD5 != null) + { + throw new InvalidOperationException("Can't set MD5 to local file"); + } + + if (fileNode.ContentType != null) + { + throw new InvalidOperationException("Can't set ContentType to local file"); + } + + if (fileNode.CacheControl != null) + { + throw new InvalidOperationException("Can't set CacheControl to local file"); + } + + if (fileNode.ContentDisposition != null) + { + throw new InvalidOperationException("Can't set ContentDisposition to local file"); + } + + if (fileNode.ContentEncoding != null) + { + throw new InvalidOperationException("Can't set ContentEncoding to local file"); + } + + if (fileNode.ContentLanguage != null) + { + throw new InvalidOperationException("Can't set ContentLanguage to local file"); + } + + if (fileNode.Metadata != null && fileNode.Metadata.Count > 0) + { + throw new InvalidOperationException("Can't set Metadata to local file"); + } + } + + private void GenerateFile(FileNode fileNode, string parentPath) + { + this.CheckFileNode(fileNode); + + string localFilePath = Path.Combine(parentPath, fileNode.Name); + DMLibDataHelper.CreateLocalFile(fileNode, localFilePath); + + FileInfo fileInfo = new FileInfo(localFilePath); + + this.BuildFileNode(fileInfo, fileNode); + } + + private void BuildDirNode(DirectoryInfo dirInfo, DirNode parent) + { + foreach (FileInfo fileInfo in dirInfo.GetFiles()) + { + FileNode fileNode = new FileNode(fileInfo.Name); + this.BuildFileNode(fileInfo, fileNode); + parent.AddFileNode(fileNode); + } + + foreach (DirectoryInfo subDirInfo in dirInfo.GetDirectories()) + { + DirNode subDirNode = new DirNode(subDirInfo.Name); + this.BuildDirNode(subDirInfo, subDirNode); + parent.AddDirNode(subDirNode); + } + } + + private void BuildFileNode(FileInfo fileInfo, FileNode fileNode) + { + fileNode.MD5 = Helper.GetFileContentMD5(fileInfo.FullName); + fileNode.LastModifiedTime = fileInfo.LastWriteTimeUtc; + fileNode.SizeInByte = fileInfo.Length; + fileNode.Metadata = new Dictionary(); + } + } +} diff --git a/test/DMLibTest/Framework/LocalDataAdaptorBase.cs b/test/DMLibTest/Framework/LocalDataAdaptorBase.cs new file mode 100644 index 00000000..38e06223 --- /dev/null +++ b/test/DMLibTest/Framework/LocalDataAdaptorBase.cs @@ -0,0 +1,93 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.IO; + + public abstract class LocalDataAdaptorBase : DataAdaptor where TDataInfo : IDataInfo + { + protected string BasePath + { + get; + private set; + } + + public override string StorageKey + { + get + { + throw new NotSupportedException("StorageKey is not supported in LocalDataAdaptorBase."); + } + } + + public LocalDataAdaptorBase(string basePath, SourceOrDest sourceOrDest) + { + // The folder pointed by basePath will be deleted when cleanup. + this.BasePath = basePath; + this.SourceOrDest = sourceOrDest; + } + + public override string GetAddress(params string[] list) + { + string address = Path.Combine(this.BasePath, Path.Combine(list)); + return address + Path.DirectorySeparatorChar; + } + + public override string GetSecondaryAddress(params string[] list) + { + throw new NotSupportedException("GetSecondaryAddress is not supported in LocalDataAdaptorBase."); + } + + public override void CreateIfNotExists() + { + if (!Directory.Exists(this.BasePath)) + { + Directory.CreateDirectory(this.BasePath); + } + } + + public override bool Exists() + { + return Directory.Exists(this.BasePath); + } + + public override void WaitForGEO() + { + throw new NotSupportedException("WaitForGEO is not supported in LocalDataAdaptorBase."); + } + + public override void Cleanup() + { + Helper.CleanupFolder(this.BasePath); + } + + public override void DeleteLocation() + { + Helper.DeleteFolder(this.BasePath); + } + + public override void MakePublic() + { + throw new NotSupportedException("MakePublic is not supported in LocalDataAdaptorBase."); + } + + public override void Reset() + { + // Nothing to reset + } + + public override string GenerateSAS(SharedAccessPermissions sap, int validatePeriod, string policySignedIdentifier = null) + { + throw new NotSupportedException("GenerateSAS is not supported in LocalDataAdaptorBase."); + } + + public override void RevokeSAS() + { + throw new NotSupportedException("RevokeSAS is not supported in LocalDataAdaptorBase."); + } + } +} diff --git a/test/DMLibTest/Framework/MultiDirectionTestBase.cs b/test/DMLibTest/Framework/MultiDirectionTestBase.cs new file mode 100644 index 00000000..096357dd --- /dev/null +++ b/test/DMLibTest/Framework/MultiDirectionTestBase.cs @@ -0,0 +1,297 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Text; + using System.Threading.Tasks; + using DMLibTestCodeGen; + using Microsoft.VisualStudio.TestTools.UnitTesting; + using MS.Test.Common.MsTestLib; + + public enum StopDMLibType + { + None, + Kill, + TestHookCtrlC, + BreakNetwork + } + + public enum SourceOrDest + { + Source, + Dest, + } + + public abstract class MultiDirectionTestBase + where TDataInfo : IDataInfo + where TDataType : struct + { + public const string SourceRoot = "sourceroot"; + public const string DestRoot = "destroot"; + public const string SourceFolder = "sourcefolder"; + public const string DestFolder = "destfolder"; + + protected static Random random = new Random(); + + private static Dictionary> sourceAdaptors = new Dictionary>(); + private static Dictionary> destAdaptors = new Dictionary>(); + + private TestContext testContextInstance; + + /// + ///Gets or sets the test context which provides + ///information about and functionality for the current test run. + /// + public TestContext TestContext + { + get + { + return testContextInstance; + } + set + { + testContextInstance = value; + } + } + + public static string NetworkShare + { + get; + set; + } + + public static bool CleanupSource + { + get; + set; + } + + public static bool CleanupDestination + { + get; + set; + } + + public static DataAdaptor SourceAdaptor + { + get + { + return GetSourceAdaptor(MultiDirectionTestContext.SourceType); + } + } + + public static DataAdaptor DestAdaptor + { + get + { + return GetDestAdaptor(MultiDirectionTestContext.DestType); + } + } + + public static void BaseClassInitialize(TestContext testContext) + { + Test.Info("ClassInitialize"); + Test.FullClassName = testContext.FullyQualifiedTestClassName; + + MultiDirectionTestBase.CleanupSource = true; + MultiDirectionTestBase.CleanupDestination = true; + + NetworkShare = Test.Data.Get("NetworkFolder"); + } + + public static void BaseClassCleanup() + { + Test.Info("ClassCleanup"); + DeleteAllLocations(sourceAdaptors); + DeleteAllLocations(destAdaptors); + Test.Info("ClassCleanup done."); + } + + private static void DeleteAllLocations(Dictionary> adaptorDic) + { + Parallel.ForEach(adaptorDic, pair => + { + try + { + pair.Value.DeleteLocation(); + } + catch + { + Test.Warn("Fail to delete location for data adaptor: {0}", pair.Key); + } + }); + } + + public virtual void BaseTestInitialize() + { + Test.Start(TestContext.FullyQualifiedTestClassName, TestContext.TestName); + Test.Info("TestInitialize"); + + MultiDirectionTestInfo.Cleanup(); + } + + public virtual void BaseTestCleanup() + { + if (Test.ErrorCount > 0) + { + MultiDirectionTestInfo.Print(); + } + + Test.Info("TestCleanup"); + Test.End(TestContext.FullyQualifiedTestClassName, TestContext.TestName); + + try + { + this.CleanupData(); + MultiDirectionTestBase.SourceAdaptor.Reset(); + MultiDirectionTestBase.DestAdaptor.Reset(); + } + catch + { + // ignore exception + } + } + + public virtual void CleanupData() + { + this.CleanupData( + MultiDirectionTestBase.CleanupSource, + MultiDirectionTestBase.CleanupDestination); + } + + protected void CleanupData(bool cleanupSource, bool cleanupDestination) + { + if (cleanupSource) + { + MultiDirectionTestBase.SourceAdaptor.Cleanup(); + } + + if (cleanupDestination) + { + MultiDirectionTestBase.DestAdaptor.Cleanup(); + } + } + + protected static string GetLocationKey(TDataType dataType) + { + return dataType.ToString(); + } + + public static DataAdaptor GetSourceAdaptor(TDataType dataType) + { + string key = MultiDirectionTestBase.GetLocationKey(dataType); + + if (!sourceAdaptors.ContainsKey(key)) + { + throw new KeyNotFoundException( + string.Format("Can't find key of source data adaptor. DataType:{0}.", dataType.ToString())); + } + + return sourceAdaptors[key]; + } + + public static DataAdaptor GetDestAdaptor(TDataType dataType) + { + string key = MultiDirectionTestBase.GetLocationKey(dataType); + + if (!destAdaptors.ContainsKey(key)) + { + throw new KeyNotFoundException( + string.Format("Can't find key of destination data adaptor. DataType:{0}.", dataType.ToString())); + } + + return destAdaptors[key]; + } + + protected static void SetSourceAdaptor(TDataType dataType, DataAdaptor adaptor) + { + string key = MultiDirectionTestBase.GetLocationKey(dataType); + sourceAdaptors[key] = adaptor; + } + + protected static void SetDestAdaptor(TDataType dataType, DataAdaptor adaptor) + { + string key = MultiDirectionTestBase.GetLocationKey(dataType); + destAdaptors[key] = adaptor; + } + + public abstract bool IsCloudService(TDataType dataType); + + public static CredentialType GetRandomCredentialType() + { + int credentialCount = Enum.GetNames(typeof(CredentialType)).Length; + int randomNum = MultiDirectionTestBase.random.Next(0, credentialCount); + + CredentialType result; + switch (randomNum) + { + case 0: + result = CredentialType.None; + break; + case 1: + result = CredentialType.Public; + break; + case 2: + result = CredentialType.Key; + break; + case 3: + result = CredentialType.SAS; + break; + default: + result = CredentialType.EmbeddedSAS; + break; + } + + Test.Info("Random credential type: {0}", result.ToString()); + return result; + } + + protected static string GetRelativePath(string basePath, string fullPath) + { + string normalizedBasePath = MultiDirectionTestBase.NormalizePath(basePath); + string normalizedFullPath = MultiDirectionTestBase.NormalizePath(fullPath); + + int index = normalizedFullPath.IndexOf(normalizedBasePath); + + if (index < 0) + { + return null; + } + + return normalizedFullPath.Substring(index + normalizedBasePath.Length); + } + + protected static string NormalizePath(string path) + { + if (path.StartsWith("\"") && path.EndsWith("\"")) + { + path = path.Substring(1, path.Length - 2); + } + + try + { + var uri = new Uri(path); + return uri.GetComponents(UriComponents.Path, UriFormat.Unescaped); + } + catch (UriFormatException) + { + return path; + } + } + } + + public enum CredentialType + { + None = 0, + Public, + Key, + SAS, + EmbeddedSAS, + } +} diff --git a/test/DMLibTest/Framework/MultiDirectionTestHelper.cs b/test/DMLibTest/Framework/MultiDirectionTestHelper.cs new file mode 100644 index 00000000..89e6832e --- /dev/null +++ b/test/DMLibTest/Framework/MultiDirectionTestHelper.cs @@ -0,0 +1,107 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.IO; + using System.Linq; + using System.Threading; + using DMLibTestCodeGen; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using MS.Test.Common.MsTestLib; + + public static class MultiDirectionTestHelper + { + public static void WaitUntilFileCreated(FileNode fileNode, DataAdaptor dataAdaptor, DMLibDataType dataType, int timeoutInSec = 300) + { + Func checkFileCreated = null; + + if (dataType == DMLibDataType.Local) + { + string filePath = dataAdaptor.GetAddress() + fileNode.GetLocalRelativePath(); + checkFileCreated = () => + { + return File.Exists(filePath); + }; + } + else if (dataType == DMLibDataType.PageBlob || + dataType == DMLibDataType.AppendBlob) + { + CloudBlobDataAdaptor blobAdaptor = dataAdaptor as CloudBlobDataAdaptor; + + checkFileCreated = () => + { + CloudBlob cloudBlob = blobAdaptor.GetCloudBlobReference(fileNode); + return cloudBlob.Exists(options: HelperConst.DefaultBlobOptions); + }; + } + else if (dataType == DMLibDataType.BlockBlob) + { + CloudBlobDataAdaptor blobAdaptor = dataAdaptor as CloudBlobDataAdaptor; + + checkFileCreated = () => + { + CloudBlockBlob blockBlob = blobAdaptor.GetCloudBlobReference(fileNode) as CloudBlockBlob; + try + { + return blockBlob.DownloadBlockList(BlockListingFilter.All, options: HelperConst.DefaultBlobOptions).Any(); + } + catch (StorageException) + { + return false; + } + }; + } + else if (dataType == DMLibDataType.CloudFile) + { + CloudFileDataAdaptor fileAdaptor = dataAdaptor as CloudFileDataAdaptor; + + checkFileCreated = () => + { + CloudFile cloudFile = fileAdaptor.GetCloudFileReference(fileNode); + return cloudFile.Exists(options: HelperConst.DefaultFileOptions); + }; + } + else + { + Test.Error("Unexpected data type: {0}", DMLibTestContext.SourceType); + } + + MultiDirectionTestHelper.WaitUntil(checkFileCreated, timeoutInSec); + } + + private static void WaitUntil(Func condition, int timeoutInSec) + { + DateTime nowTime = DateTime.Now; + DateTime timeOut = nowTime.AddSeconds(timeoutInSec); + while (timeOut > DateTime.Now) + { + if (condition()) + { + return; + } + + Thread.Sleep(100); + } + + Test.Error("WaitUntil: condition doesn't meet within timeout {0} second(s).", timeoutInSec); + } + + public static void PrintTransferDataInfo(IDataInfo dataInfo) + { + if (null == dataInfo) + { + Test.Info("TransferDataInfo is null"); + } + else + { + Test.Info(dataInfo.ToString()); + } + } + } +} diff --git a/test/DMLibTest/Framework/MultiDirectionTestInfo.cs b/test/DMLibTest/Framework/MultiDirectionTestInfo.cs new file mode 100644 index 00000000..bec3b9dc --- /dev/null +++ b/test/DMLibTest/Framework/MultiDirectionTestInfo.cs @@ -0,0 +1,37 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System.Collections.Generic; + using MS.Test.Common.MsTestLib; + + public class MultiDirectionTestInfo + { + public static List GeneratedSourceDataInfos = new List(); + public static List GeneratedDestDataInfos = new List(); + + public static void Cleanup() + { + MultiDirectionTestInfo.GeneratedSourceDataInfos.Clear(); + MultiDirectionTestInfo.GeneratedDestDataInfos.Clear(); + } + + public static void Print() + { + Test.Info("-----Source Data-----"); + foreach (var sourceDataInfo in MultiDirectionTestInfo.GeneratedSourceDataInfos) + { + MultiDirectionTestHelper.PrintTransferDataInfo(sourceDataInfo); + } + + Test.Info("-----Dest Data-----"); + foreach (var destDataInfo in MultiDirectionTestInfo.GeneratedDestDataInfos) + { + MultiDirectionTestHelper.PrintTransferDataInfo(destDataInfo); + } + } + } +} diff --git a/test/DMLibTest/Framework/ProgressChecker.cs b/test/DMLibTest/Framework/ProgressChecker.cs new file mode 100644 index 00000000..aec055df --- /dev/null +++ b/test/DMLibTest/Framework/ProgressChecker.cs @@ -0,0 +1,101 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; +using System.Threading; +using Microsoft.WindowsAzure.Storage.DataMovement; +using MS.Test.Common.MsTestLib; + + public class ProgressChecker : IProgress + { + private ProgressValue transferedNumber = new ProgressValue(); + private ProgressValue failedNumber = new ProgressValue(); + private ProgressValue skippedNumber = new ProgressValue(); + private ProgressValue transferedBytes = new ProgressValue(); + private long totalNumber = 0; + private long totalBytes = 0; + private ManualResetEvent dataTransferred; + + public ProgressChecker(long totalNumber, long totalBytes) : this(totalNumber, totalBytes, totalNumber, 0, 0, totalBytes) + { + } + + public ProgressChecker(long totalNumber, long totalBytes, long transferedNumber, long failedNumber, long skippedNumber, long transferedBytes) + { + this.totalNumber = totalNumber; + this.totalBytes = totalBytes; + this.transferedNumber.MaxValue = transferedNumber; + this.failedNumber.MaxValue = failedNumber; + this.skippedNumber.MaxValue = skippedNumber; + this.transferedBytes.MaxValue = transferedBytes; + this.dataTransferred = new ManualResetEvent(false); + } + + public IProgress GetProgressHandler() + { + return this; + } + + public void Report(TransferProgress progress) + { + this.dataTransferred.Set(); + Test.Info("Check progress: {0}", progress.BytesTransferred); + this.CheckIncrease(this.transferedBytes, progress.BytesTransferred, "BytesTransferred"); + this.CheckIncrease(this.transferedNumber, progress.NumberOfFilesTransferred, "NumberOfFilesTransferred"); + this.CheckIncrease(this.failedNumber, progress.NumberOfFilesFailed, "NumberOfFilesFailed"); + this.CheckIncrease(this.skippedNumber, progress.NumberOfFilesSkipped, "NumberOfFilesSkipped"); + } + + public void Reset() + { + this.transferedNumber.PreviousValue = 0; + this.failedNumber.PreviousValue = 0; + this.skippedNumber.PreviousValue = 0; + this.transferedBytes.PreviousValue = 0; + this.dataTransferred.Reset(); + } + + public WaitHandle DataTransferred + { + get + { + return this.dataTransferred; + } + } + + private void CheckEqual(T expectedValue, T currentValue, string valueName) where T : IComparable + { + if (currentValue.CompareTo(expectedValue) != 0) + { + Test.Error("Wrong {0} value: {1}, expected value: {2}", valueName, currentValue, expectedValue); + } + } + + private void CheckIncrease(ProgressValue progressValue, T currentValue, string valueName) where T : IComparable + { + if (currentValue.CompareTo(progressValue.PreviousValue) < 0 || + currentValue.CompareTo(progressValue.MaxValue) > 0) + { + Test.Error("Wrong {0} value: {1}, previous value: {2}, max value {3}", valueName, currentValue, progressValue.PreviousValue, progressValue.MaxValue); + } + + progressValue.PreviousValue = currentValue; + } + } + + class ProgressValue where T : IComparable + { + public ProgressValue() + { + this.MaxValue = default(T); + this.PreviousValue = default(T); + } + + public T MaxValue; + public T PreviousValue; + } +} diff --git a/test/DMLibTest/Framework/SharedAccessPermissions.cs b/test/DMLibTest/Framework/SharedAccessPermissions.cs new file mode 100644 index 00000000..43bbb125 --- /dev/null +++ b/test/DMLibTest/Framework/SharedAccessPermissions.cs @@ -0,0 +1,53 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using Microsoft.WindowsAzure.Storage.Table; + + [Flags] + public enum SharedAccessPermissions + { + None = 0, + Read = 1, + Write = 2, + Delete = 4, + List = 8, + Query = 16, + Add = 32, + Update = 64, + } + + public static class SharedAccessPermissionsExtensions + { + public const SharedAccessPermissions LeastPermissionDest = SharedAccessPermissions.Write | SharedAccessPermissions.Read; + public const SharedAccessPermissions LeastPermissionSource = SharedAccessPermissions.List | SharedAccessPermissions.Read; + public const SharedAccessPermissions LeastPermissionSourceList = SharedAccessPermissions.List; + + public static SharedAccessBlobPermissions ToBlobPermissions(this SharedAccessPermissions sap) + { + return (SharedAccessBlobPermissions)Enum.Parse(typeof(SharedAccessBlobPermissions), sap.ToString()); + } + + public static SharedAccessFilePermissions ToFilePermissions(this SharedAccessPermissions sap) + { + return (SharedAccessFilePermissions)Enum.Parse(typeof(SharedAccessFilePermissions), sap.ToString()); + } + + public static SharedAccessTablePermissions ToTablePermissions(this SharedAccessPermissions sap) + { + return (SharedAccessTablePermissions)Enum.Parse(typeof(SharedAccessTablePermissions), sap.ToString()); + } + + public static SharedAccessPermissions ToCommonPermissions(this Enum specificPermissions) + { + return (SharedAccessPermissions)Enum.Parse(typeof(SharedAccessPermissions), specificPermissions.ToString()); + } + } + +} diff --git a/test/DMLibTest/Framework/TestExecutionOptions.cs b/test/DMLibTest/Framework/TestExecutionOptions.cs new file mode 100644 index 00000000..f344ed5a --- /dev/null +++ b/test/DMLibTest/Framework/TestExecutionOptions.cs @@ -0,0 +1,52 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + + public class TestExecutionOptions where TDataInfo : IDataInfo + { + public const int DefaultTimeoutInMs = 20 * 60 * 1000; // 20 min + + public TestExecutionOptions() + { + this.TimeoutInMs = DefaultTimeoutInMs; + this.DestTransferDataInfo = default(TDataInfo); + this.DisableDestinationFetch = false; + this.LimitSpeed = false; + } + + public int TimeoutInMs + { + get; + set; + } + + public TDataInfo DestTransferDataInfo + { + get; + set; + } + + public bool DisableDestinationFetch + { + get; + set; + } + + public bool LimitSpeed + { + get; + set; + } + + public Action TransferItemModifier; + + public Action AfterDataPrepared; + + public Action AfterAllItemAdded; + } +} diff --git a/test/DMLibTest/Framework/TestResult.cs b/test/DMLibTest/Framework/TestResult.cs new file mode 100644 index 00000000..fe63f3c1 --- /dev/null +++ b/test/DMLibTest/Framework/TestResult.cs @@ -0,0 +1,41 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections; + using System.Collections.Generic; + + public class TestResult where TDataInfo : IDataInfo + { + private List exceptions; + + public TestResult() + { + this.exceptions = new List(); + this.DataInfo = default(TDataInfo); + } + + public TDataInfo DataInfo + { + set; + get; + } + + public List Exceptions + { + get + { + return this.exceptions; + } + } + + public void AddException(Exception e) + { + this.exceptions.Add(e); + } + } +} diff --git a/test/DMLibTest/Framework/TransferItem.cs b/test/DMLibTest/Framework/TransferItem.cs new file mode 100644 index 00000000..fd405215 --- /dev/null +++ b/test/DMLibTest/Framework/TransferItem.cs @@ -0,0 +1,168 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ + +namespace DMLibTest +{ + using System; + using System.Globalization; + using System.IO; + using System.Threading; + using DMLibTestCodeGen; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement; + using Microsoft.WindowsAzure.Storage.File; + + public class TransferItem + { + public object SourceObject + { + get; + set; + } + + public object DestObject + { + get; + set; + } + + public DMLibDataType SourceType + { + get; + set; + } + + public DMLibDataType DestType + { + get; + set; + } + + public bool IsServiceCopy + { + get; + set; + } + + public object Options + { + get; + set; + } + + public TransferContext TransferContext + { + get; + set; + } + + public CancellationToken CancellationToken + { + get; + set; + } + + public Action BeforeStarted + { + get; + set; + } + + public Action AfterStarted + { + get; + set; + } + + public void CloseStreamIfNecessary() + { + Stream sourceStream = this.SourceObject as Stream; + Stream destStream = this.DestObject as Stream; + + if (sourceStream != null) + { + sourceStream.Close(); + } + + if (destStream != null) + { + destStream.Close(); + } + } + + public TransferItem Clone() + { + TransferItem newTransferItem = new TransferItem() + { + SourceObject = NewLocationObject(this.SourceObject), + DestObject = NewLocationObject(this.DestObject), + SourceType = this.SourceType, + DestType = this.DestType, + IsServiceCopy = this.IsServiceCopy, + Options = this.Options, + }; + + return newTransferItem; + } + + private static object NewLocationObject(object locationObject) + { + if (locationObject is CloudBlob) + { + CloudBlob cloudBlob = locationObject as CloudBlob; + if (cloudBlob is CloudPageBlob) + { + return new CloudPageBlob(cloudBlob.SnapshotQualifiedUri, cloudBlob.ServiceClient.Credentials); + } + else if (cloudBlob is CloudBlockBlob) + { + return new CloudBlockBlob(cloudBlob.SnapshotQualifiedUri, cloudBlob.ServiceClient.Credentials); + } + else if (cloudBlob is CloudAppendBlob) + { + return new CloudAppendBlob(cloudBlob.SnapshotQualifiedUri, cloudBlob.ServiceClient.Credentials); + } + else + { + throw new ArgumentException(string.Format("Unsupported blob type: {0}", cloudBlob.BlobType), "locationObject"); + } + } + else if (locationObject is CloudFile) + { + CloudFile cloudFile = locationObject as CloudFile; + CloudFile newCloudFile = new CloudFile(cloudFile.Uri, cloudFile.ServiceClient.Credentials); + return newCloudFile; + } + else + { + return locationObject; + } + } + + public override string ToString() + { + return string.Format( + CultureInfo.InvariantCulture, + "{0} -> {1}", + this.GetDataObjectString(this.SourceObject), + this.GetDataObjectString(this.DestObject)); + } + + private string GetDataObjectString(object dataObject) + { + if (dataObject is CloudBlob) + { + return (dataObject as CloudBlob).SnapshotQualifiedUri.ToString(); + } + else if (dataObject is CloudFile) + { + return (dataObject as CloudFile).Uri.ToString(); + } + + return dataObject.ToString(); + } + } +} diff --git a/test/DMLibTest/Framework/URIBlobDataAdaptor.cs b/test/DMLibTest/Framework/URIBlobDataAdaptor.cs new file mode 100644 index 00000000..a7e9f516 --- /dev/null +++ b/test/DMLibTest/Framework/URIBlobDataAdaptor.cs @@ -0,0 +1,55 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using DMLibTestCodeGen; + using BlobTypeConst = DMLibTest.BlobType; + + internal class URIBlobDataAdaptor : CloudBlobDataAdaptor + { + public URIBlobDataAdaptor(TestAccount testAccount, string containerName) + : base (testAccount, containerName, BlobTypeConst.Block, SourceOrDest.Source) + { + base.MakePublic(); + } + + public override void Reset() + { + // Do nothing, keep the container public + } + + public override object GetTransferObject(FileNode fileNode) + { + return base.GetCloudBlobReference(fileNode).Uri; + } + + public override object GetTransferObject(DirNode dirNode) + { + throw new InvalidOperationException("Can't get directory transfer object in URI data adaptor."); + } + + protected override string BlobType + { + get + { + DMLibDataType destDataType = DMLibTestContext.DestType; + if (destDataType == DMLibDataType.PageBlob) + { + return BlobTypeConst.Page; + } + else if (destDataType == DMLibDataType.AppendBlob) + { + return BlobTypeConst.Append; + } + else + { + return BlobTypeConst.Block; + } + } + } + } +} diff --git a/test/DMLibTest/Framework/UploadWrapper.cs b/test/DMLibTest/Framework/UploadWrapper.cs new file mode 100644 index 00000000..525a9d62 --- /dev/null +++ b/test/DMLibTest/Framework/UploadWrapper.cs @@ -0,0 +1,69 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System.IO; + using System.Threading; + using System.Threading.Tasks; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.DataMovement; + using Microsoft.WindowsAzure.Storage.File; + + internal class UploadWrapper : DMLibWrapper + { + public UploadWrapper() + { + } + + protected override Task DoTransferImp(TransferItem item) + { + return this.Upload(item.DestObject, item); + } + + private Task Upload(dynamic destObject, TransferItem item) + { + UploadOptions uploadOptions = item.Options as UploadOptions; + TransferContext transferContext = item.TransferContext; + CancellationToken cancellationToken = item.CancellationToken; + string sourcePath = item.SourceObject as string; + Stream sourceStream = item.SourceObject as Stream; + + if (cancellationToken != null && cancellationToken != CancellationToken.None) + { + if (sourcePath != null) + { + return TransferManager.UploadAsync(sourcePath, destObject, uploadOptions, transferContext, cancellationToken); + } + else + { + return TransferManager.UploadAsync(sourceStream, destObject, uploadOptions, transferContext, cancellationToken); + } + } + else if (transferContext != null || uploadOptions != null) + { + if (sourcePath != null) + { + return TransferManager.UploadAsync(sourcePath, destObject, uploadOptions, transferContext); + } + else + { + return TransferManager.UploadAsync(sourceStream, destObject, uploadOptions, transferContext); + } + } + else + { + if (sourcePath != null) + { + return TransferManager.UploadAsync(sourcePath, destObject); + } + else + { + return TransferManager.UploadAsync(sourceStream, destObject); + } + } + } + } +} diff --git a/test/DMLibTest/Framework/VerificationHelper.cs b/test/DMLibTest/Framework/VerificationHelper.cs new file mode 100644 index 00000000..263ef9e4 --- /dev/null +++ b/test/DMLibTest/Framework/VerificationHelper.cs @@ -0,0 +1,67 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using DMLibTestCodeGen; + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + public static class VerificationHelper + { + public static void VerifySingleObjectResumeResult(TestResult result, DMLibDataInfo expectedDataInfo) + { + if (DMLibTestContext.SourceType != DMLibDataType.Stream && DMLibTestContext.DestType != DMLibDataType.Stream) + { + Test.Assert(result.Exceptions.Count == 0, "Verify no exception is thrown."); + Test.Assert(DMLibDataHelper.Equals(expectedDataInfo, result.DataInfo), "Verify transfer result."); + } + else + { + Test.Assert(result.Exceptions.Count == 1, "Verify stream resume is not supported"); + Exception exception = result.Exceptions[0]; + Test.Assert(exception is NotSupportedException, "Verify stream resume is not supported"); + } + } + + public static void VerifyTransferException(Exception exception, TransferErrorCode expectedErrorCode, params string[] expectedMessages) + { + TransferException transferException = exception as TransferException; + if (transferException == null) + { + Test.Error("Verify exception is a transfer exception."); + return; + } + + Test.Assert(transferException.ErrorCode == expectedErrorCode, "Verify error code: {0}, expected: {1}", transferException.ErrorCode, expectedErrorCode); + VerificationHelper.VerifyExceptionErrorMessage(exception, expectedMessages); + } + + public static void VerifyStorageException(Exception exception, int expectedHttpStatusCode, params string[] expectedMessages) + { + StorageException storageException = exception as StorageException; + if (storageException == null) + { + Test.Error("Verify exception is a storage exception."); + return; + } + + Test.Assert(storageException.RequestInformation.HttpStatusCode == expectedHttpStatusCode, "Verify http status code: {0}, expected: {1}", storageException.RequestInformation.HttpStatusCode, expectedHttpStatusCode); + VerificationHelper.VerifyExceptionErrorMessage(exception, expectedMessages); + } + + public static void VerifyExceptionErrorMessage(Exception exception, params string[] expectedMessages) + { + Test.Info("Error message: {0}", exception.Message); + + foreach (string expectedMessage in expectedMessages) + { + Test.Assert(exception.Message.Contains(expectedMessage), "Verify exception message contains {0}", expectedMessage); + } + } + } +} diff --git a/test/DMLibTest/Properties/AssemblyInfo.cs b/test/DMLibTest/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..684f96e6 --- /dev/null +++ b/test/DMLibTest/Properties/AssemblyInfo.cs @@ -0,0 +1,14 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("DMLibTest")] +[assembly: AssemblyDescription("")] diff --git a/test/DMLibTest/TestData.xml b/test/DMLibTest/TestData.xml new file mode 100644 index 00000000..c70b7e18 --- /dev/null +++ b/test/DMLibTest/TestData.xml @@ -0,0 +1,22 @@ + + + true + false + DMLibTest.log + true + true + true + true + DevFabric + DefaultEndpointsProtocol=https;AccountName=testaccount1;AccountKey=FjUfNl1KiJttbXlsdkMzBTC7WagvrRM9/g6UPBuy0ypCpAbYTL6/KA+dI/7gyoWvLFYmah3IviUP1jykOHHOlA==;BlobEndpoint=http://127.0.0.1:10000/testaccount1;QueueEndpoint=http://127.0.0.1:10001/testaccount1;TableEndpoint=http://127.0.0.1:10002/testaccount1;FileEndpoint=http://127.0.0.1:10004/testaccount1 + DefaultEndpointsProtocol=https;AccountName=dmtestaccount1;AccountKey=FjUfNl1KiJttbXlsdkMzBTC7WagvrRM9/g6UPBuy0ypCpAbYTL6/KA+dI/7gyoWvLFYmah3IviUP1jykOHHOlA==;BlobEndpoint=http://127.0.0.1:10000/dmtestaccount1;QueueEndpoint=http://127.0.0.1:10001/dmtestaccount1;TableEndpoint=http://127.0.0.1:10002/dmtestaccount1;FileEndpoint=http://127.0.0.1:10004/dmtestaccount1 + testfile + 1024 + testfolder + \\127.0.0.1\Azcopy + journal + 200 + 20 + 3 + 3 + diff --git a/test/DMLibTest/Util/DMLibTestConstants.cs b/test/DMLibTest/Util/DMLibTestConstants.cs new file mode 100644 index 00000000..fac23d1a --- /dev/null +++ b/test/DMLibTest/Util/DMLibTestConstants.cs @@ -0,0 +1,111 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using Microsoft.WindowsAzure.Storage.DataMovement; + using MS.Test.Common.MsTestLib; + + public static class Tag + { + public const string BVT = "bvt"; + public const string Function = "function"; + public const string Stress = "stress"; + public const string Performance = "perf"; + } + + public static class Protocol + { + public const string Http = "http"; + + public static string Https + { + get + { + if (DMLibTestHelper.DisableHttps()) + { + return "http"; + } + + return "https"; + } + } + } + + public static class BlobType + { + public const string Page = "page"; + public const string Block = "block"; + public const string Append = "append"; + } + + public static class DMLibTestConstants + { + public const string ConnStr = "StorageConnectionString"; + public const string ConnStr2 = "StorageConnectionString2"; + public static readonly int DefaultNC = TransferManager.Configurations.ParallelOperations; + public static readonly int LimitedSpeedNC = 4; + + private static Random random = new Random(); + + public static int FlatFileCount + { + get + { + int flatFileCount; + try + { + flatFileCount = int.Parse(Test.Data.Get("FlatFileCount")); + } + catch + { + flatFileCount = 20; + } + + Test.Verbose("Flat file count: {0}", flatFileCount); + return flatFileCount; + } + } + + public static int RecursiveFolderWidth + { + get + { + int recursiveFolderWidth; + try + { + recursiveFolderWidth = int.Parse(Test.Data.Get("RecursiveFolderWidth")); + } + catch + { + recursiveFolderWidth = random.Next(3, 5); + } + + Test.Verbose("Recursive folder width: {0}", recursiveFolderWidth); + return recursiveFolderWidth; + } + } + + public static int RecursiveFolderDepth + { + get + { + int recursiveFolderDepth; + try + { + recursiveFolderDepth = int.Parse(Test.Data.Get("RecursiveFolderDepth")); + } + catch + { + recursiveFolderDepth = random.Next(3, 5); + } + + Test.Verbose("Recursive folder depth: {0}", recursiveFolderDepth); + return recursiveFolderDepth; + } + } + } +} diff --git a/test/DMLibTest/Util/DMLibTestHelper.cs b/test/DMLibTest/Util/DMLibTestHelper.cs new file mode 100644 index 00000000..b9da5696 --- /dev/null +++ b/test/DMLibTest/Util/DMLibTestHelper.cs @@ -0,0 +1,411 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.Threading; + using Microsoft.WindowsAzure.Storage; + using MS.Test.Common.MsTestLib; + using StorageBlob = Microsoft.WindowsAzure.Storage.Blob; + + public enum FileSizeUnit + { + B, + KB, + MB, + GB, + } + + public enum TestAgainst + { + PublicAzure, + TestTenant, + DevFabric + } + + public class SummaryInterval + { + /// + /// Min value which is inclusive. + /// + private int minValue; + + /// + /// Max value which is inclusive. + /// + private int maxValue; + + public SummaryInterval(int minValue, int maxValue) + { + this.minValue = minValue; + this.maxValue = maxValue; + } + + public int MinValue + { + get + { + return this.minValue; + } + } + + public int MaxValue + { + get + { + return this.maxValue; + } + } + + public bool InsideInterval(int value) + { + return value >= this.minValue && value <= this.maxValue; + } + } + + public static class DMLibTestHelper + { + private static Random random = new Random(); + + private static readonly char[] validSuffixChars = "abcdefghijkjlmnopqrstuvwxyz".ToCharArray(); + + public static void KeepFilesWhenCaseFail(params string[] filesToKeep) + { + if (Test.ErrorCount > 0) + { + const string debugFilePrefix = "debug_file_"; + string folderName = Guid.NewGuid().ToString(); + Directory.CreateDirectory(folderName); + + Test.Info("Move files to folder {0} for debug.", folderName); + + for (int i = 0; i < filesToKeep.Length; ++i) + { + string debugFileName = debugFilePrefix + i; + string debugFilePath = Path.Combine(folderName, debugFileName); + File.Move(filesToKeep[i], debugFilePath); + + Test.Info("{0} ---> {1}", filesToKeep[i], debugFileName); + } + } + } + + public static string RandomContainerName() + { + return Test.Data.Get("containerName") + RandomNameSuffix(); + } + + public static string RandomNameSuffix() + { + return FileOp.NextString(random, 6, validSuffixChars); + } + + public static bool WaitForProcessExit(Process p, int timeoutInSecond) + { + bool exit = p.WaitForExit(timeoutInSecond * 1000); + if (!exit) + { + Test.Assert(false, "Process {0} should exit in {1} s.", p.ProcessName, timeoutInSecond); + p.Kill(); + return false; + } + + return true; + } + + public static string RandomizeCase(string value) + { + return ConvertRandomCharsToUpperCase(value.ToLower()); + } + + public static void UploadFromByteArray(this StorageBlob.CloudBlob cloudBlob, byte[] randomData) + { + if (StorageBlob.BlobType.BlockBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudBlockBlob).UploadFromByteArray(randomData, 0, randomData.Length); + } + else if (StorageBlob.BlobType.PageBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudPageBlob).UploadFromByteArray(randomData, 0, randomData.Length); + } + else if (StorageBlob.BlobType.AppendBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudAppendBlob).UploadFromByteArray(randomData, 0, randomData.Length); + } + else + { + throw new InvalidOperationException(string.Format("Invalid blob type: {0}", cloudBlob.BlobType)); + } + } + + public static void UploadFromFile(this StorageBlob.CloudBlob cloudBlob, + string path, + FileMode mode, + AccessCondition accessCondition = null, + StorageBlob.BlobRequestOptions options = null, + OperationContext operationContext = null) + { + if (StorageBlob.BlobType.BlockBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudBlockBlob).UploadFromFile(path, mode, accessCondition, options, operationContext); + } + else if (StorageBlob.BlobType.PageBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudPageBlob).UploadFromFile(path, mode, accessCondition, options, operationContext); + } + else if (StorageBlob.BlobType.AppendBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudAppendBlob).UploadFromFile(path, mode, accessCondition, options, operationContext); + } + else + { + throw new InvalidOperationException(string.Format("Invalid blob type: {0}", cloudBlob.BlobType)); + } + } + + public static void UploadFromStream(this StorageBlob.CloudBlob cloudBlob, Stream source) + { + if (StorageBlob.BlobType.BlockBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudBlockBlob).UploadFromStream(source); + } + else if (StorageBlob.BlobType.PageBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudPageBlob).UploadFromStream(source); + } + else if (StorageBlob.BlobType.AppendBlob == cloudBlob.BlobType) + { + (cloudBlob as StorageBlob.CloudAppendBlob).UploadFromStream(source); + } + else + { + throw new InvalidOperationException(string.Format("Invalid blob type: {0}", cloudBlob.BlobType)); + } + } + + public static void WaitForACLTakeEffect() + { + if (DMLibTestHelper.GetTestAgainst() != TestAgainst.DevFabric) + { + Test.Info("Waiting for 30s to ensure the ACL take effect on server side..."); + Thread.Sleep(30 * 1000); + } + } + + public static string RandomProtocol() + { + if (0 == new Random().Next(2)) + { + return Protocol.Http; + } + + return Protocol.Https; + } + + public static bool DisableHttps() + { + if (DMLibTestHelper.GetTestAgainst() == TestAgainst.TestTenant) + { + return true; + } + + return false; + } + + public static TestAgainst GetTestAgainst() + { + string testAgainst = string.Empty; + try + { + testAgainst = Test.Data.Get("TestAgainst"); + } + catch + { + } + + if (String.Compare(testAgainst, "publicazure", true) == 0) + { + return TestAgainst.PublicAzure; + } + else if (String.Compare(testAgainst, "testtenant", true) == 0) + { + return TestAgainst.TestTenant; + } + else if (String.Compare(testAgainst, "devfabric", true) == 0) + { + return TestAgainst.DevFabric; + } + + // Use dev fabric by default + return TestAgainst.DevFabric; + } + + public static List GetFileAttributesFromParameter(string s) + { + List Lfa = new List(); + + if (null == s) + { + return Lfa; + } + + foreach (char c in s) + { + switch (c) + { + case 'R': + if (!Lfa.Contains(FileAttributes.ReadOnly)) + Lfa.Add(FileAttributes.ReadOnly); + break; + case 'A': + if (!Lfa.Contains(FileAttributes.Archive)) + Lfa.Add(FileAttributes.Archive); + break; + case 'S': + if (!Lfa.Contains(FileAttributes.System)) + Lfa.Add(FileAttributes.System); + break; + case 'H': + if (!Lfa.Contains(FileAttributes.Hidden)) + Lfa.Add(FileAttributes.Hidden); + break; + case 'C': + if (!Lfa.Contains(FileAttributes.Compressed)) + Lfa.Add(FileAttributes.Compressed); + break; + case 'N': + if (!Lfa.Contains(FileAttributes.Normal)) + Lfa.Add(FileAttributes.Normal); + break; + case 'E': + if (!Lfa.Contains(FileAttributes.Encrypted)) + Lfa.Add(FileAttributes.Encrypted); + break; + case 'T': + if (!Lfa.Contains(FileAttributes.Temporary)) + Lfa.Add(FileAttributes.Temporary); + break; + case 'O': + if (!Lfa.Contains(FileAttributes.Offline)) + Lfa.Add(FileAttributes.Offline); + break; + case 'I': + if (!Lfa.Contains(FileAttributes.NotContentIndexed)) + Lfa.Add(FileAttributes.NotContentIndexed); + break; + default: + break; + } + } + return Lfa; + } + + public static List GenerateFileWithAttributes( + string folder, + string filePrefix, + int number, + List includeAttributes, + List excludeAttributes, + int fileSizeInUnit = 1, + FileSizeUnit unit = FileSizeUnit.KB) + { + List fileNames = new List(number); + + for (int i = 0; i < number; i++) + { + string fileName = filePrefix + i.ToString(); + string filePath = Path.Combine(folder, fileName); + fileNames.Add(fileName); + + DMLibTestHelper.PrepareLocalFile(filePath, fileSizeInUnit, unit); + + if (includeAttributes != null) + foreach (FileAttributes fa in includeAttributes) + FileOp.SetFileAttribute(filePath, fa); + if (excludeAttributes != null) + foreach (FileAttributes fa in excludeAttributes) + FileOp.RemoveFileAttribute(filePath, fa); + } + + return fileNames; + } + + public static void PrepareLocalFile(string filePath, long fileSizeInUnit, FileSizeUnit fileSizeUnit) + { + if (FileSizeUnit.B == fileSizeUnit) + { + Helper.GenerateFileInBytes(filePath, fileSizeInUnit); + } + else if (FileSizeUnit.KB == fileSizeUnit) + { + Helper.GenerateFileInKB(filePath, fileSizeInUnit); + } + else if (FileSizeUnit.MB == fileSizeUnit) + { + Helper.GenerateFileInMB(filePath, fileSizeInUnit); + } + else + { + Helper.GenerateFileInGB(filePath, fileSizeInUnit); + } + } + + public static bool ContainsIgnoreCase(string baseString, string subString) + { + return (baseString.IndexOf(subString, StringComparison.OrdinalIgnoreCase) >= 0); + } + + private static string ConvertRandomCharsToUpperCase(string input) + { + Random rnd = new Random(); + char[] array = input.ToCharArray(); + + for (int i = 0; i < array.Length; ++i) + { + if (Char.IsLower(array[i]) && rnd.Next() % 2 != 0) + { + array[i] = Char.ToUpper(array[i]); + } + } + + return new string(array); + } + + /// + /// Append snapshot time to a file name. + /// + /// Original file name. + /// Snapshot time to append. + /// A file name with appended snapshot time. + public static string AppendSnapShotTimeToFileName(string fileName, DateTimeOffset? snapshotTime) + { + string resultName = fileName; + + if (snapshotTime.HasValue) + { + string pathAndFileNameNoExt = Path.ChangeExtension(fileName, null); + string extension = Path.GetExtension(fileName); + string timeStamp = string.Format( + CultureInfo.InvariantCulture, + "{0:yyyy-MM-dd HHmmss fff}", + snapshotTime.Value); + + resultName = string.Format( + CultureInfo.InvariantCulture, + "{0} ({1}){2}", + pathAndFileNameNoExt, + timeStamp, + extension); + } + + return resultName; + } + } +} diff --git a/test/DMLibTest/Util/Helpers.cs b/test/DMLibTest/Util/Helpers.cs new file mode 100644 index 00000000..84a32be2 --- /dev/null +++ b/test/DMLibTest/Util/Helpers.cs @@ -0,0 +1,3668 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.IO.Compression; + using System.Linq; + using System.Runtime.InteropServices; + using System.Security.Cryptography; + using System.Text; + using System.Text.RegularExpressions; + using System.Threading; + using System.Threading.Tasks; + + using Microsoft.WindowsAzure.Storage; + using Microsoft.WindowsAzure.Storage.Auth; + using Microsoft.WindowsAzure.Storage.Blob; + using Microsoft.WindowsAzure.Storage.File; + using Microsoft.WindowsAzure.Storage.RetryPolicies; + using Microsoft.WindowsAzure.Storage.Table; + using MS.Test.Common.MsTestLib; + using StorageBlobType = Microsoft.WindowsAzure.Storage.Blob.BlobType; + + /// + /// this is a static helper class + /// + public static class Helper + { + private static Random random = new Random(); + + public static void CopyLocalDirectory(string sourceDir, string destDir, bool recursive) + { + if (!Directory.Exists(destDir)) + { + Directory.CreateDirectory(destDir); + } + + foreach (var subDir in Directory.GetDirectories(sourceDir, "*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)) + { + Directory.CreateDirectory(ConvertSourceToDestPath(sourceDir, destDir, subDir)); + } + + foreach (var file in Directory.GetFiles(sourceDir, "*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)) + { + File.Copy(file, ConvertSourceToDestPath(sourceDir, destDir, file)); + } + } + + private static string ConvertSourceToDestPath(string sourceRoot, string destRoot, string path) + { + int index = path.IndexOf(sourceRoot, StringComparison.OrdinalIgnoreCase); + string relativePath = path.Substring(index + sourceRoot.Length); + + if (relativePath.StartsWith(Path.DirectorySeparatorChar.ToString())) + { + relativePath = relativePath.Substring(1); + } + + return Path.Combine(destRoot, relativePath); + } + + /// + /// list blobs in a container, return blob name list and content MD5 list + /// + /// + /// + /// + public static bool ListBlobs(string connectionString, string containerName, out List blobNames, out List blobMD5s) + { + CloudBlobClient BlobClient = CloudStorageAccount.Parse(connectionString).CreateCloudBlobClient(); + BlobClient.DefaultRequestOptions.RetryPolicy = new LinearRetry(TimeSpan.Zero, 3); + + blobNames = new List(); + blobMD5s = new List(); + + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + IEnumerable blobs = container.ListBlobs(null, true, BlobListingDetails.All); + if (blobs != null) + { + foreach (CloudBlob blob in blobs) + { + blob.FetchAttributes(); + blobNames.Add(blob.Name); + blobMD5s.Add(blob.Properties.ContentMD5); + } + } + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + public static void WaitForTakingEffect(dynamic cloudStorageClient) + { + Test.Assert( + cloudStorageClient is CloudBlobClient || cloudStorageClient is CloudTableClient || cloudStorageClient is CloudFileClient, + "The argument should only be CloudStorageClient."); + + if (DMLibTestHelper.GetTestAgainst() != TestAgainst.PublicAzure) + { + return; + } + + DateTimeOffset? lastSyncTime = cloudStorageClient.GetServiceStats().GeoReplication.LastSyncTime; + DateTimeOffset currentTime = DateTimeOffset.UtcNow; + int maxWaitCount = 120; + + DateTimeOffset? newLastSyncTime = cloudStorageClient.GetServiceStats().GeoReplication.LastSyncTime; + + while ((maxWaitCount > 0) + && (!newLastSyncTime.HasValue || (lastSyncTime.HasValue && newLastSyncTime.Value <= lastSyncTime.Value) || newLastSyncTime.Value <= currentTime)) + { + --maxWaitCount; + Test.Info("Waiting......"); + Thread.Sleep(10000); + newLastSyncTime = cloudStorageClient.GetServiceStats().GeoReplication.LastSyncTime; + } + + if (maxWaitCount <= 0) + { + Test.Info("NOTE: Wait for taking effect timed out, cases may fail..."); + } + } + + public static bool RandomBoolean() + { + return random.Next(0, 2) % 2 == 0; + } + + public static string RandomBlobType() + { + int rnd = random.Next(0, 3); + if (rnd == 0) + { + return BlobType.Block; + } + else if (rnd == 1) + { + return BlobType.Page; + } + else + { + return BlobType.Append; + } + } + + public static void GenerateFileInBytes(string filename, long sizeB) + { + Random r = new Random(); + byte[] data; + using (FileStream stream = new FileStream(filename, FileMode.Create)) + { + var oneMBInBytes = 1024 * 1024; + var sizeInMB = sizeB / oneMBInBytes; + data = new byte[oneMBInBytes]; + for (int i = 0; i < sizeInMB; i++) + { + r.NextBytes(data); + stream.Write(data, 0, data.Length); + } + + var restSizeInB = sizeB % oneMBInBytes; + data = new byte[restSizeInB]; + r.NextBytes(data); + stream.Write(data, 0, data.Length); + } + } + + public static void GenerateFileInKB(string filename, long sizeKB) + { + byte[] data = new byte[sizeKB * 1024]; + Random r = new Random(); + r.NextBytes(data); + File.WriteAllBytes(filename, data); + return; + } + + //it takes around 74 seconds to generate a 5G file + public static void GenerateFileInMB(string filename, long sizeMB) + { + byte[] data = new byte[1024 * 1024]; + Random r = new Random(); + using (FileStream stream = new FileStream(filename, FileMode.Create)) + { + for (int i = 0; i < sizeMB; i++) + { + r.NextBytes(data); + stream.Write(data, 0, data.Length); + } + } + + return; + } + + // the buffer is too large, better to use GenerateMediumFile + public static void GenerateFileInGB(string filename, long sizeGB) + { + byte[] data = new byte[4 * 1024 * 1024]; + long chunkCount = 256 * sizeGB; + Random r = new Random(); + using (FileStream stream = new FileStream(filename, FileMode.Create)) + { + for (int i = 0; i < chunkCount; i++) + { + r.NextBytes(data); + stream.Write(data, 0, data.Length); + } + } + + return; + } + + public static void GenerateEmptyFile(string filename) + { + if (File.Exists(filename)) + { + Test.Info("GenerateEmptyFile: delte existing file"); + File.Delete(filename); + } + + using (FileStream file = File.Create(filename)) + { + } + } + + public static void AggregateFile(string filename, int times) + { + using (FileStream outputStream = new FileStream(filename, FileMode.Create)) + { + using (FileStream inputStream = new FileStream("abc.txt", FileMode.Open)) + { + for (int i = 0; i < times; i++) + { + inputStream.CopyTo(outputStream); + inputStream.Seek(0, SeekOrigin.Begin); + } + } + } + } + + public static void CompressFile(string filename, int times) + { + using (FileStream outputStream = new FileStream(filename, FileMode.Create)) + { + using (GZipStream compress = new GZipStream(outputStream, CompressionMode.Compress)) + { + + using (FileStream inputStream = new FileStream("abc.txt", FileMode.Open)) + { + for (int i = 0; i < times; i++) + { + inputStream.CopyTo(compress); + inputStream.Seek(0, SeekOrigin.Begin); + } + } + } + } + } + + public static void GenerateLargeFileinKB(string filename, long sizeinKB) + { + byte[] data4MB = new byte[4 * 1024 * 1024]; + byte[] dataMB = new byte[1024 * 1024]; + Random r = new Random(); + using (FileStream stream = new FileStream(filename, FileMode.Create)) + { + long sizeGB = sizeinKB / (1024 * 1024); + long sizeMB = sizeinKB % (1024 * 1024) / 1024; + long sizeKB = sizeinKB % 1024; + for (long i = 0; i < sizeGB * 256; i++) + { + r.NextBytes(data4MB); + stream.Write(data4MB, 0, data4MB.Length); + } + for (long i = 0; i < sizeMB; i++) + { + r.NextBytes(dataMB); + stream.Write(dataMB, 0, dataMB.Length); + } + if (sizeKB != 0) + { + byte[] dataKB = new byte[sizeKB * 1024]; + r.NextBytes(dataKB); + stream.Write(dataKB, 0, dataKB.Length); + } + } + + return; + } + + //this is only for small data + public static byte[] GetMD5(byte[] data) + { + MD5 md5 = MD5.Create(); + return md5.ComputeHash(data); + } + + public static void GenerateRandomTestFile(string filename, long sizeKB, bool createDirIfNotExist = false) + { + if (createDirIfNotExist) + { + string dir = Path.GetDirectoryName(filename); + if (!string.IsNullOrEmpty(dir)) + { + Directory.CreateDirectory(dir); + } + } + + byte[] data = new byte[sizeKB * 1024]; + Random r = new Random(); + r.NextBytes(data); + File.WriteAllBytes(filename, data); + } + + public static void DeleteFile(string filename) + { + if (File.Exists(filename)) + { + File.Delete(filename); + } + } + + public static void CleanupFolder(string foldername) + { + DirectoryInfo rootFolderInfo = new DirectoryInfo(foldername); + + if (!rootFolderInfo.Exists) + { + return; + } + + foreach (FileInfo fileInfo in rootFolderInfo.GetFiles()) + { + ForceDeleteFile(fileInfo.FullName); + } + + foreach (DirectoryInfo subFolderInfo in rootFolderInfo.GetDirectories()) + { + ForceDeleteFiles(subFolderInfo.FullName); + } + } + + public static void DeleteFolder(string foldername) + { + if (Directory.Exists(foldername)) + { + ForceDeleteFiles(foldername); + } + } + + private static void ForceDeleteFile(string filename) + { + try + { + File.Delete(filename); + } + catch + { + FileOp.SetFileAttribute(filename, FileAttributes.Normal); + File.Delete(filename); + } + } + + private static void ForceDeleteFiles(string foldername) + { + try + { + Directory.Delete(foldername, true); + } + catch (Exception) + { + RecursiveRemoveReadOnlyAttribute(foldername); + Directory.Delete(foldername, true); + } + } + + private static void RecursiveRemoveReadOnlyAttribute(string foldername) + { + foreach (string filename in Directory.GetFiles(foldername)) + { + FileOp.SetFileAttribute(filename, FileAttributes.Normal); + } + + foreach (string folder in Directory.GetDirectories(foldername)) + { + RecursiveRemoveReadOnlyAttribute(folder); + } + } + + public static void DeletePattern(string pathPattern) + { + DirectoryInfo folder = new DirectoryInfo("."); + foreach (FileInfo fi in folder.GetFiles(pathPattern, SearchOption.TopDirectoryOnly)) + { + fi.Delete(); + } + foreach (DirectoryInfo di in folder.GetDirectories(pathPattern, SearchOption.TopDirectoryOnly)) + { + di.Delete(true); + } + } + + public static void CreateNewFolder(string foldername) + { + if (Directory.Exists(foldername)) + { + Directory.Delete(foldername, true); + } + if (File.Exists(foldername)) + { + File.Delete(foldername); + } + + Directory.CreateDirectory(foldername); + } + + // for a 5G file, this can be done in 20 seconds + public static string GetFileMD5Hash(string filename) + { + using (FileStream fs = File.Open(filename, FileMode.Open)) + { + MD5 md5 = MD5.Create(); + byte[] md5Hash = md5.ComputeHash(fs); + + + StringBuilder sb = new StringBuilder(); + foreach (byte b in md5Hash) + { + sb.Append(b.ToString("x2").ToLower()); + } + + return sb.ToString(); + } + } + + public static string GetFileContentMD5(string filename) + { + using (FileStream fs = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + MD5 md5 = MD5.Create(); + byte[] md5Hash = md5.ComputeHash(fs); + + return Convert.ToBase64String(md5Hash); + } + } + + public static List GenerateFlatTestFolder(string fileNamePrefix, string parentDir, int fileSizeInKB = -1, bool doNotGenerateFile = false) + { + return GenerateFixedTestTree(fileNamePrefix, string.Empty, parentDir, DMLibTestConstants.FlatFileCount, 0, fileSizeInKB, doNotGenerateFile); + } + + public static List GenerateRecursiveTestFolder(string fileNamePrefix, string dirNamePrefix, string parentDir, int fileSizeInKB = -1, bool doNotGenerateFile = false) + { + return GenerateFixedTestTree(fileNamePrefix, dirNamePrefix, parentDir, DMLibTestConstants.RecursiveFolderWidth, DMLibTestConstants.RecursiveFolderDepth, fileSizeInKB, doNotGenerateFile); + } + + public static List GenerateFixedTestTree(string fileNamePrefix, string dirNamePrefix, string parentDir, int width, int depth, int fileSizeInKB = -1, bool doNotGenerateFile = false) + { + var fileList = new List(); + for (int i = 0; i < width; i++) + { + var fileName = parentDir + "\\" + fileNamePrefix + "_" + i; + fileList.Add(fileName); + if (!doNotGenerateFile) + { + GenerateRandomTestFile(fileName, fileSizeInKB < 0 ? i : fileSizeInKB); + } + } + + if (depth > 0) + { + for (int i = 0; i < width; i++) + { + string dirName = parentDir + "\\" + dirNamePrefix + "_" + i; + + if (!doNotGenerateFile) + { + Directory.CreateDirectory(dirName); + } + + fileList.AddRange(GenerateFixedTestTree(fileNamePrefix, dirNamePrefix, dirName, width, depth - 1, fileSizeInKB, doNotGenerateFile)); + } + } + + return fileList; + } + + public static List TraversalFolderInDepth(string folderName) + { + List files = new List(); + Stack dirStack = new Stack(); + dirStack.Push(folderName); + + while (dirStack.Count > 0) + { + string currentFolder = dirStack.Pop(); + + foreach (string file in Directory.EnumerateFiles(currentFolder)) + { + files.Add(file); + } + + Stack foldersUnderCurrent = new Stack(); + + foreach (string folder in Directory.EnumerateDirectories(currentFolder)) + { + foldersUnderCurrent.Push(folder); + } + + foreach (string folderPath in foldersUnderCurrent) + { + dirStack.Push(folderPath); + } + } + + return files; + } + + public static void CompareBlobAndFile(string filename, CloudBlob blob) + { + string tempblob = "tempblob"; + DeleteFile(tempblob); + try + { + if (!File.Exists(filename)) + Test.Error("The file {0} should exist", filename); + if (blob == null) + Test.Error("The blob {0} should exist", blob.Name); + using (FileStream fileStream = new FileStream(tempblob, FileMode.Create)) + { + BlobRequestOptions bro = new BlobRequestOptions(); + bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 30), 3); + bro.ServerTimeout = new TimeSpan(1, 30, 0); + bro.MaximumExecutionTime = new TimeSpan(1, 30, 0); + blob.DownloadToStream(fileStream, null, bro); + fileStream.Close(); + } + string MD51 = Helper.GetFileContentMD5(tempblob); + string MD52 = Helper.GetFileContentMD5(filename); + + if (MD51 != MD52) + Test.Error("{2}: {0} == {1}", MD51, MD52, filename); + DeleteFile(tempblob); + } + catch (Exception e) + { + Test.Error("Meet Excpetion when download and compare blob {0}, file{1}, Excpetion: {2}", blob.Name, filename, e.ToString()); + DeleteFile(tempblob); + return; + } + } + + public static bool CompareTwoFiles(string filename, string filename2) + { + FileInfo fi = new FileInfo(filename); + FileInfo fi2 = new FileInfo(filename2); + return CompareTwoFiles(fi, fi2); + } + + public static bool CompareTwoFiles(FileInfo fi, FileInfo fi2) + { + if (!fi.Exists || !fi2.Exists) + { + return false; + } + if (fi.Length != fi2.Length) + { + return false; + } + + long fileLength = fi.Length; + // 4M a chunk + const int ChunkSizeByte = 4 * 1024 * 1024; + using (FileStream fs = new FileStream(fi.FullName, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + using (FileStream fs2 = new FileStream(fi2.FullName, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + BinaryReader reader = new BinaryReader(fs); + BinaryReader reader2 = new BinaryReader(fs2); + + long comparedLength = 0; + do + { + byte[] bytes = reader.ReadBytes(ChunkSizeByte); + byte[] bytes2 = reader2.ReadBytes(ChunkSizeByte); + + MD5 md5 = MD5.Create(); + byte[] md5Hash = md5.ComputeHash(bytes); + byte[] md5Hash2 = md5.ComputeHash(bytes2); + + if (!md5Hash.SequenceEqual(md5Hash2)) + { + return false; + } + + comparedLength += bytes.Length; + } while (comparedLength < fileLength); + } + } + + return true; + } + + public static bool CompareTwoFolders(string foldername, string foldername2, bool recursive = true) + { + DirectoryInfo folder = new DirectoryInfo(foldername); + DirectoryInfo folder2 = new DirectoryInfo(foldername2); + + IEnumerable list = folder.GetFiles("*.*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly); + IEnumerable list2 = folder2.GetFiles("*.*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly); + + FileCompare fc = new FileCompare(); + + return list.SequenceEqual(list2, fc); + } + + public static bool CompareFolderWithBlob(string foldername, string containerName) + { + return true; + } + + public static bool CompareTwoBlobs(string containerName, string containerName2) + { + return false; //todo: implement + } + + public static string[] ListToGetRelativePaths(string folderName) + { + DirectoryInfo folder = new DirectoryInfo(folderName); + IEnumerable list = folder.GetFiles("*.*", SearchOption.AllDirectories); + List relativePaths = new List(); + + string absolutePath = folder.FullName + "\\"; + + foreach (FileInfo fileInfo in list) + { + relativePaths.Add(fileInfo.FullName.Substring(absolutePath.Length, fileInfo.FullName.Length - absolutePath.Length)); + } + + return relativePaths.ToArray(); + } + + public static void verifyFilesExistinBlobDirectory(int fileNumber, CloudBlobDirectory blobDirectory, string FileName, String blobType) + { + for (int i = 0; i < fileNumber; i++) + { + string blobName = FileName + "_" + i; + CloudBlob blob = blobDirectory.GetBlobReference(blobName); + if (null == blob || !blob.Exists()) + { + Test.Error("the file {0} in the blob virtual directory does not exist:", blobName); + } + } + } + + public static void VerifyFilesExistInFileDirectory(int fileNumber, CloudFileDirectory fileDirectory, string fileName) + { + for (int i = 0; i < fileNumber; i++) + { + CloudFile cloudFile = fileDirectory.GetFileReference(fileName + "_" + i); + if (null == cloudFile || !cloudFile.Exists()) + Test.Error("the file {0}_{1} in the directory does not exist:", fileName, i); + } + } + + /// + /// calculate folder size in Byte + /// + /// the folder path + /// the folder size in Byte + public static long CalculateFolderSizeInByte(string folder) + { + long folderSize = 0; + try + { + //Checks if the path is valid or not + if (!Directory.Exists(folder)) + return folderSize; + else + { + try + { + foreach (string file in Directory.GetFiles(folder)) + { + if (File.Exists(file)) + { + FileInfo finfo = new FileInfo(file); + folderSize += finfo.Length; + } + } + + foreach (string dir in Directory.GetDirectories(folder)) + folderSize += CalculateFolderSizeInByte(dir); + } + catch (NotSupportedException e) + { + Test.Error("Unable to calculate folder size: {0}", e.Message); + throw; + } + } + } + catch (UnauthorizedAccessException e) + { + Test.Error("Unable to calculate folder size: {0}", e.Message); + throw; + } + + return folderSize; + } + + /// + /// Count number of files in the folder + /// + /// the folder path + /// whether including subfolders recursively or not + /// number of files under the folder (and subfolders) + public static int GetFileCount(string folder, bool recursive) + { + int count = 0; + try + { + //Checks if the path is valid or not + if (Directory.Exists(folder)) + { + count += Directory.GetFiles(folder).Length; + + if (recursive) + { + foreach (string dir in Directory.GetDirectories(folder)) + count += GetFileCount(dir, true); + } + } + } + catch (NotSupportedException e) + { + Test.Error("Exception thrown when accessing folder: {0}", e.Message); + throw; + } + catch (UnauthorizedAccessException e) + { + Test.Error("Exception thrown when accessing folder: {0}", e.Message); + throw; + } + + return count; + } + + public static Process StartProcess(string cmd, string args) + { + Test.Info("Running: {0} {1}", cmd, args); + ProcessStartInfo psi = new ProcessStartInfo(cmd, args); + psi.CreateNoWindow = false; + psi.UseShellExecute = false; + Process p = Process.Start(psi); + return p; + } + + public static bool WaitUntilFileCreated(string fileName, int timeoutInSeconds, bool checkContent = true) + { + int i = 0; + while (i < timeoutInSeconds) + { + FileInfo f = new FileInfo(fileName); + + // wait for file size > 0 + if (f.Exists) + { + if (!checkContent || f.Length > 0) + { + return true; + } + } + + Test.Info("waiting file '{0}' to be created...", fileName); + Thread.Sleep(1000); + i++; + } + + return false; + } + + [DllImport("kernel32.dll", CallingConvention = CallingConvention.StdCall)] + static extern bool GenerateConsoleCtrlEvent(ConsoleCtrlEvent sigevent, int dwProcessGroupId); + + [DllImport("kernel32.dll", CharSet = CharSet.Auto)] + public static extern bool SetConsoleCtrlHandler(HandlerRoutine Handler, bool Add); + public delegate bool HandlerRoutine(ConsoleCtrlEvent CtrlType); + + // An enumerated type for the control messages + // sent to the handler routine. + public enum ConsoleCtrlEvent + { + CTRL_C_EVENT = 0, + CTRL_BREAK_EVENT, + CTRL_CLOSE_EVENT, + CTRL_LOGOFF_EVENT = 5, + CTRL_SHUTDOWN_EVENT + } + + public static Process StartProcess(string cmd, string args, out StreamReader stdout, out StreamReader stderr, out StreamWriter stdin) + { + Test.Logger.Verbose("Running: {0} {1}", cmd, args); + ProcessStartInfo psi = new ProcessStartInfo(cmd, args); + psi.CreateNoWindow = true; + psi.WindowStyle = ProcessWindowStyle.Hidden; + psi.UseShellExecute = false; + psi.RedirectStandardError = true; + psi.RedirectStandardOutput = true; + psi.RedirectStandardInput = true; + Process p = Process.Start(psi); + stdout = p.StandardOutput; + stderr = p.StandardError; + stdin = p.StandardInput; + return p; + } + + public static Process StartProcess(string cmd, string args, out StringBuilder stdout, out StringBuilder stderr, out StreamWriter stdin, Dictionary faultInjectionPoints = null) + { + Test.Logger.Verbose("Running: {0} {1}", cmd, args); + ProcessStartInfo psi = new ProcessStartInfo(cmd, args); + psi.CreateNoWindow = true; + psi.WindowStyle = ProcessWindowStyle.Hidden; + psi.UseShellExecute = false; + psi.RedirectStandardError = true; + psi.RedirectStandardOutput = true; + psi.RedirectStandardInput = true; + if (null != faultInjectionPoints) + { + foreach (var kv in faultInjectionPoints) + { + Test.Info("Envrionment {0}:{1}", kv.Key, kv.Value); + psi.EnvironmentVariables.Add(kv.Key, kv.Value); + } + } + + Process p = Process.Start(psi); + + StringBuilder outString = new StringBuilder(); + p.OutputDataReceived += (sendingProcess, outLine) => + { + if (!String.IsNullOrEmpty(outLine.Data)) + { + outString.Append(outLine.Data + "\n"); + } + }; + + StringBuilder errString = new StringBuilder(); + p.ErrorDataReceived += (sendingProcess, outLine) => + { + if (!String.IsNullOrEmpty(outLine.Data)) + { + errString.Append(outLine.Data + "\n"); + } + }; + p.BeginOutputReadLine(); + p.BeginErrorReadLine(); + + stdout = outString; + stderr = errString; + stdin = p.StandardInput; + + return p; + } + + public static void PrintBlockBlobBlocks(CloudBlockBlob cloudBlob, bool printDetailBlock = true) + { + IEnumerable blocks = cloudBlob.DownloadBlockList(); + + Test.Info("There are {0} blocks in blob {1}: ", blocks.Count(), cloudBlob.Name); + + if (printDetailBlock) + { + foreach (var block in blocks) + { + Test.Info("BlockId:{0}, Length:{1}", block.Name, block.Length); + } + } + } + + public static void PrintPageBlobRanges(CloudPageBlob cloudBlob, bool printDetailPage = true) + { + //Write out the page ranges for the page blob. + IEnumerable ranges = cloudBlob.GetPageRanges(options: HelperConst.DefaultBlobOptions); + + Test.Info("There are {0} pages range in blob {1}: ", ranges.Count(), cloudBlob.Name); + if (printDetailPage) + { + PrintRanges(ranges); + } + } + + public static void PrintCloudFileRanges(CloudFile cloudFile, bool printDetailRange = true) + { + IEnumerable ranges = cloudFile.ListRanges(options: HelperConst.DefaultFileOptions); + + Test.Info("There are {0} ranges in cloud file {1}: ", ranges.Count(), cloudFile.Name); + if (printDetailRange) + { + PrintRanges(ranges); + } + } + + private static void PrintRanges(IEnumerable ranges) + { + foreach (var range in ranges) + { + Test.Info(" [{0}-{1}]: {2} ", range.StartOffset, range.EndOffset, range.EndOffset - range.StartOffset + 1); + } + } + + public static char GenerateRandomDelimiter() + { + List notavailList = new List(new char[] { 't', 'e', 's', 'f', 'o', 'l', 'd', 'r', 'i', '\\', '?' }); + Random rnd = new Random(); + int random; + do + { + random = rnd.Next(0x20, 0xFF); + } + while (char.GetUnicodeCategory((char)random) == UnicodeCategory.Control || notavailList.Contains((char)random)); + + return (char)random; + } + + public static string GetAccountNameFromConnectString(string connectString) + { + Dictionary dict = connectString.Split(';') + .Select(s => s.Split('=')) + .ToDictionary(key => key[0].Trim(), value => value[1].Trim()); + + return dict["AccountName"]; + } + + public static string GetBlobDirectoryUri(string blobEndpoint, string containerName, string dirName) + { + string containerUri = string.Format("{0}/{1}", blobEndpoint, containerName); + var containerRef = new CloudBlobContainer(new Uri(containerUri)); + return containerRef.GetDirectoryReference(dirName).Uri.ToString(); + } + + public static string GetXsmbDirectoryUri(string fileEndpoint, string shareName, string dirName) + { + string shareUri = string.Format("{0}/{1}", fileEndpoint, shareName); + var shareRef = new CloudFileShare(new Uri(shareUri), new StorageCredentials()); + return shareRef.GetRootDirectoryReference().GetDirectoryReference(dirName).Uri.ToString(); + } + + public static string AppendSlash(string input) + { + if (input.EndsWith("/")) + { + return input; + } + else + { + return input + "/"; + } + } + + public static bool IsNotFoundException(StorageException e) + { + if (null != e.RequestInformation && + 404 == e.RequestInformation.HttpStatusCode) + { + Test.Info("Server returns 404 error: {0}", e.ToString()); + return true; + } + + return false; + } + + public static void GenerateSparseCloudObject( + List ranges, + List gaps, + Action createObject, + Action writeUnit) + { + if (ranges.Count != gaps.Count + 1) + { + Test.Error("Invalid input for SparseCloudObject."); + } + + Test.Info("Ranges:"); + ranges.PrintAllElements(); + Test.Info("Gaps:"); + gaps.PrintAllElements(); + + int totalSize = ranges.Sum() + gaps.Sum(); + createObject(totalSize); + + int offset = 0; + for (int i = 0; i < ranges.Count; ++i) + { + int range = ranges[i]; + + Helper.WriteRange(offset, range, writeUnit); + + offset += range; + + if (i != ranges.Count - 1) + { + offset += gaps[i]; + } + } + } + + private static void WriteRange(int offset, int length, Action writeUnit) + { + int remainingLength = length; + int currentOffset = offset; + const int MaxLength = 4 * 1024 * 1024; + + while (remainingLength > 0) + { + int lengthToWrite = Math.Min(MaxLength, remainingLength); + + using (MemoryStream randomData = Helper.GetRandomData(lengthToWrite)) + { + writeUnit(currentOffset, randomData); + } + + currentOffset += lengthToWrite; + remainingLength -= lengthToWrite; + } + } + + public static MemoryStream GetRandomData(int size) + { + Random random = new Random(); + byte[] data = new byte[size]; + random.NextBytes(data); + return new MemoryStream(data); + } + + public static void Shuffle(this List list) + { + Random random = new Random(); + int currentPosition = list.Count; + while (currentPosition > 1) + { + currentPosition--; + int swapPosition = random.Next(currentPosition + 1); + var temp = list[swapPosition]; + list[swapPosition] = list[currentPosition]; + list[currentPosition] = temp; + } + } + + public static void PrintAllElements(this List list) + { + Test.Info("[{0}]", string.Join(",", list)); + } + + /// + /// return setting from testData.xml if exist, else return default value + /// + /// the name of the setting + /// the default Value of the setting + /// the setting value + public static string ParseSetting(string settingName, object defaultValue) + { + try + { + return Test.Data.Get(settingName); + } + catch + { + return defaultValue.ToString(); + } + } + } + + public class FileCompare : IEqualityComparer + { + public FileCompare() { } + + public bool Equals(FileInfo f1, FileInfo f2) + { + if (f1.Name != f2.Name) + { + Test.Verbose("file name {0}:{1} not equal {2}:{3}", f1.FullName, f1.Name, f2.FullName, f2.Name); + return false; + } + + if (f1.Length != f2.Length) + { + Test.Verbose("file length {0}:{1} not equal {2}:{3}", f1.FullName, f1.Length, f2.FullName, f2.Length); + return false; + } + + if (f1.Length < 200 * 1024 * 1024) + { + string f1MD5Hash = f1.MD5Hash(); + string f2MD5Hash = f2.MD5Hash(); + if (f1MD5Hash != f2MD5Hash) + { + Test.Verbose("file MD5 mismatch {0}:{1} not equal {2}:{3}", f1.FullName, f1MD5Hash, f2.FullName, f2MD5Hash); + return false; + } + } + else + { + if (!Helper.CompareTwoFiles(f1, f2)) + { + Test.Verbose("file MD5 mismatch {0} not equal {1}", f1.FullName, f2.FullName); + return false; + } + } + return true; + } + + public int GetHashCode(FileInfo fi) + { + string s = String.Format("{0}{1}", fi.Name, fi.Length); + return s.GetHashCode(); + } + } + + public static class FileOp + { + private const int NumberBase = 48; + private const int UpperCaseLetterBase = 65; + private const int Underline = 95; + private const int LowerCaseLetterBase = 97; + + public static HashSet AllSpecialChars { get; private set; } + + public static HashSet InvalidCharsInLocalAndCloudFileName { get; private set; } + + public static HashSet InvalidCharsInBlobName { get; private set; } + + public static List ValidSpecialCharsInLocalFileName { get; private set; } + + static FileOp() + { + AllSpecialChars = new HashSet { '$', '&', '+', ',', '/', ':', '=', '?', '@', ' ', '"', '<', '>', '#', '%', '{', '}', '|', '\\', '^', '~', '[', ']', '`', '*', '!', '(', ')', '-', '_', '\'', '.', ';' }; + InvalidCharsInLocalAndCloudFileName = new HashSet { '/', ':', '?', '"', '<', '>', '|', '\\', '*' }; + InvalidCharsInBlobName = new HashSet { '\\' }; + + ValidSpecialCharsInLocalFileName = new List(); + foreach (var ch in AllSpecialChars) + { + if (!InvalidCharsInLocalAndCloudFileName.Contains(ch)) + { + ValidSpecialCharsInLocalFileName.Add(ch); + } + } + } + + public static string MD5Hash(this FileInfo fi) + { + return Helper.GetFileMD5Hash(fi.FullName); + } + + public static string NextString(Random Randomint) + { + int length = Randomint.Next(1, 100); + return NextString(Randomint, length); + } + + public static string NextString(Random Randomint, int length) + { + if (length == 0) + { + return string.Empty; + } + + while (true) + { + var result = new String( + Enumerable.Repeat(0, length) + .Select(p => GetRandomVisiableChar(Randomint)) + .ToArray()); + result = result.Trim(); + if (result.Length == length && !string.IsNullOrWhiteSpace(result) && !result.EndsWith(".")) + { + return result; + } + } + } + + public static string NextCIdentifierString(Random random) + { + int length = random.Next(1, 255); + return NextCIdentifierString(random, length); + } + + public static string NextCIdentifierString(Random random, int length) + { + char[] charArray = + Enumerable.Repeat(0, length) + .Select(p => GetCIdentifierChar(random)) + .ToArray(); + + if (charArray[0] >= NumberBase && charArray[0] <= (NumberBase + 10)) + { + charArray[0] = '_'; + } + + return new string(charArray); + } + + public static string NextNormalString(Random random) + { + int length = random.Next(1, 255); + return NextNormalString(random, length); + } + + public static string NextNormalString(Random random, int length) + { + while (true) + { + var result = new String( + Enumerable.Repeat(0, length) + .Select(p => GetNormalChar(random)) + .ToArray()); + result = result.Trim(); + if (result.Length == length && !string.IsNullOrWhiteSpace(result) && !result.EndsWith(".")) + { + return result; + } + } + } + + public static char GetCIdentifierChar(Random random) + { + int i = random.Next(0, 63); + + if (i < 10) + { + return (char)(NumberBase + i); + } + + i = i - 10; + + if (i < 26) + { + return (char)(UpperCaseLetterBase + i); + } + + i = i - 26; + + if (i == 0) + { + return (char)(Underline); + } + + i--; + + return (char)(LowerCaseLetterBase + i); + } + + public static char GetNormalChar(Random random) + { + return (char)random.Next(0x20, 0x7E); + } + + public static string NextString(Random Randomint, int length, char[] ValidChars) + { + return new String( + Enumerable.Repeat(0, length) + .Select(p => GetRandomItem(Randomint, ValidChars)) + .ToArray()); + } + + public static string NextNonASCIIString(Random Randomint) + { + var builder = new StringBuilder(FileOp.NextString(Randomint)); + var countToInsert = Randomint.Next(1, 50); + for (int i = 0; i < countToInsert; i++) + { + char ch; + while (true) + { + ch = FileOp.GetRandomVisiableChar(Randomint); + if ((int)ch >= 0x80) + { + break; + } + } + + builder.Insert(Randomint.Next(0, builder.Length + 1), ch); + } + + return builder.ToString(); + } + + public static T GetRandomItem(Random Randomint, T[] items) + { + if (items.Length <= 0) + { + Test.Error("no candidate item"); + } + + int i = Randomint.Next(0, items.Length); + return items[i]; + } + + public static char GetRandomVisiableChar(Random Randomint) + { + double specialCharProbability = 0.05; + + if (Randomint.Next(0, 100) / 100.0 < specialCharProbability) + { + return ValidSpecialCharsInLocalFileName[Randomint.Next(0, ValidSpecialCharsInLocalFileName.Count)]; + } + else + { + while (true) + { + int i = Randomint.Next(0x20, 0xD7FF); + var ch = (char)i; + + // Control characters are all invalid to blob name. + // Characters U+200E, U+200F, U+202A, U+202B, U+202C, U+202D, U+202E are all invalid to URI. + if (char.GetUnicodeCategory(ch) != UnicodeCategory.Control && + !InvalidCharsInLocalAndCloudFileName.Contains(ch) && + i != 0x200e && i != 0x200f && + i != 0x202a && i != 0x202b && i != 0x202c && i != 0x202d && i != 0x202e) + { + return ch; + } + } + } + } + + public static string GetDriveMapping(char letter) + { + var sb = new StringBuilder(259); + if (QueryDosDevice(CreateDeviceName(letter), sb, sb.Capacity) == 0) + { + // Return empty string if the drive is not mapped + int err = Marshal.GetLastWin32Error(); + if (err == 2) return ""; + throw new System.ComponentModel.Win32Exception(); + } + return sb.ToString().Substring(4); + } + + private static string CreateDeviceName(char letter) + { + return new string(char.ToUpper(letter), 1) + ":"; + } + + [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)] + private static extern bool DefineDosDevice(int flags, string devname, string path); + [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)] + private static extern int QueryDosDevice(string devname, StringBuilder buffer, int bufSize); + + public static void SetFileAttribute(string Filename, FileAttributes attribute) + { + FileAttributes fa = File.GetAttributes(Filename); + if ((fa & attribute) == attribute) + { + Test.Info("Attribute {0} is already in file {1}. Don't need to add again.", attribute.ToString(), Filename); + return; + } + + switch (attribute) + { + case FileAttributes.Encrypted: + string fullPath = GetFullPath(Filename); + File.Encrypt(fullPath); + break; + case FileAttributes.Normal: + RemoveFileAttribute(Filename, FileAttributes.Encrypted); + RemoveFileAttribute(Filename, FileAttributes.Compressed); + fa = FileAttributes.Normal; + File.SetAttributes(Filename, fa); + break; + case FileAttributes.Compressed: + compress(Filename); + break; + default: + fa = fa | attribute; + File.SetAttributes(Filename, fa); + break; + } + Test.Info("Attribute {0} is added to file {1}.", attribute.ToString(), Filename); + } + + private static string GetFullPath(string Filename) + { + string fullPath = Path.GetFullPath(Filename); + char driveLetter = fullPath.ToCharArray()[0]; + String actualPath = GetDriveMapping(driveLetter); + // WAES will map c:\user\tasks\workitems\{jobid} to f:\wd, + // and File.Encrypt will throw DirectoryNotFoundException + // Thus it is necessary to convert the file path to original one + if (Regex.IsMatch(actualPath, @"\w:\\", RegexOptions.IgnoreCase) == true) + { + fullPath = String.Format("{0}{1}", actualPath, fullPath.Substring(2)); + } + return fullPath; + } + + public static void RemoveFileAttribute(string Filename, FileAttributes attribute) + { + FileAttributes fa = File.GetAttributes(Filename); + if ((fa & attribute) != attribute) + { + Test.Info("Attribute {0} is NOT in file{1}. Don't need to remove.", attribute.ToString(), Filename); + return; + } + + switch (attribute) + { + case FileAttributes.Encrypted: + File.Decrypt(GetFullPath(Filename)); + break; + case FileAttributes.Normal: + fa = fa | FileAttributes.Archive; + File.SetAttributes(Filename, fa); + break; + case FileAttributes.Compressed: + uncompress(Filename); + break; + default: + fa = fa & ~attribute; + File.SetAttributes(Filename, fa); + break; + } + Test.Info("Attribute {0} is removed from file{1}.", attribute.ToString(), Filename); + } + + [DllImport("kernel32.dll")] + public static extern int DeviceIoControl(IntPtr hDevice, int + dwIoControlCode, ref short lpInBuffer, int nInBufferSize, IntPtr + lpOutBuffer, int nOutBufferSize, ref int lpBytesReturned, IntPtr + lpOverlapped); + + private static int FSCTL_SET_COMPRESSION = 0x9C040; + private static short COMPRESSION_FORMAT_DEFAULT = 1; + private static short COMPRESSION_FORMAT_NONE = 0; + +#pragma warning disable 612, 618 + public static void compress(string filename) + { + if ((File.GetAttributes(filename) & FileAttributes.Encrypted) == FileAttributes.Encrypted) + { + Test.Info("Decrypt File {0} to prepare for compress.", filename); + File.Decrypt(GetFullPath(filename)); + } + int lpBytesReturned = 0; + FileStream f = File.Open(filename, System.IO.FileMode.Open, + System.IO.FileAccess.ReadWrite, System.IO.FileShare.None); + int result = DeviceIoControl(f.Handle, FSCTL_SET_COMPRESSION, + ref COMPRESSION_FORMAT_DEFAULT, 2 /*sizeof(short)*/, IntPtr.Zero, 0, + ref lpBytesReturned, IntPtr.Zero); + f.Close(); + } + + public static void uncompress(string filename) + { + int lpBytesReturned = 0; + FileStream f = File.Open(filename, System.IO.FileMode.Open, + System.IO.FileAccess.ReadWrite, System.IO.FileShare.None); + int result = DeviceIoControl(f.Handle, FSCTL_SET_COMPRESSION, + ref COMPRESSION_FORMAT_NONE, 2 /*sizeof(short)*/, IntPtr.Zero, 0, + ref lpBytesReturned, IntPtr.Zero); + f.Close(); + } +#pragma warning restore 612, 618 + + } + + public class CloudFileHelper + { + public const string AllowedCharactersInShareName = "abcdefghijklmnopqrstuvwxyz0123456789-"; + public const string InvalidCharactersInDirOrFileName = "\"\\/:|<>*?"; + public const int MinShareNameLength = 3; + public const int MaxShareNameLength = 63; + public const int MinDirOrFileNameLength = 1; + public const int MaxDirOrFileNameLength = 255; + + public CloudStorageAccount Account + { + get; + private set; + } + + public CloudFileClient FileClient + { + get; + private set; + } + + public CloudFileHelper(CloudStorageAccount account) + { + this.Account = account; + this.FileClient = account.CreateCloudFileClient(); + this.FileClient.DefaultRequestOptions.RetryPolicy = new LinearRetry(TimeSpan.Zero, 3); + } + + public bool Exists(string shareName) + { + CloudFileShare share = this.FileClient.GetShareReference(shareName); + return share.Exists(); + } + + public bool CreateShare(string shareName) + { + CloudFileShare share = this.FileClient.GetShareReference(shareName); + return share.CreateIfNotExists(); + } + + public bool CleanupShare(string shareName) + { + return this.CleanupFileDirectory(shareName, string.Empty); + } + + public bool CleanupShareByRecreateIt(string shareName) + { + try + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (share == null || !share.Exists()) return false; + + FileRequestOptions fro = new FileRequestOptions(); + fro.RetryPolicy = new LinearRetry(new TimeSpan(0, 1, 0), 3); + + share.DeleteIfExists(null, fro); + + Test.Info("Share deleted."); + fro.RetryPolicy = new LinearRetry(new TimeSpan(0, 3, 0), 3); + + bool createSuccess = false; + int retry = 0; + while (!createSuccess && retry++ < 100) //wait up to 5 minutes + { + try + { + share.Create(fro); + createSuccess = true; + Test.Info("Share recreated."); + } + catch (StorageException e) + { + if (e.Message.Contains("(409)")) //conflict, the container is still in deleteing + { + Thread.Sleep(3000); + } + else + { + throw; + } + } + } + + return createSuccess; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + + throw; + } + } + + public bool DeleteShare(string shareName) + { + CloudFileShare share = this.FileClient.GetShareReference(shareName); + return share.DeleteIfExists(); + } + + public bool DownloadFile(string shareName, string fileName, string filePath) + { + try + { + CloudFileShare share = FileClient.GetShareReference(shareName); + FileRequestOptions fro = new FileRequestOptions(); + fro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 30), 3); + fro.ServerTimeout = new TimeSpan(1, 30, 0); + fro.MaximumExecutionTime = new TimeSpan(1, 30, 0); + + CloudFileDirectory root = share.GetRootDirectoryReference(); + CloudFile cloudFile = root.GetFileReference(fileName); + + using (FileStream fileStream = new FileStream(filePath, FileMode.Create)) + { + cloudFile.DownloadToStream(fileStream, null, fro); + fileStream.Close(); + } + + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + + throw; + } + } + + public bool UploadFile(string shareName, string fileName, string filePath, bool createParentIfNotExist = true) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + try + { + FileRequestOptions options = new FileRequestOptions + { + RetryPolicy = new ExponentialRetry(TimeSpan.FromSeconds(60), 3), + }; + + if (createParentIfNotExist) + { + share.CreateIfNotExists(options); + string parentDirectoryPath = GetFileDirectoryName(fileName); + this.CreateFileDirectory(shareName, parentDirectoryPath); + } + } + catch (StorageException e) + { + Test.Error("UploadFile: receives StorageException when creating parent: {0}", e.ToString()); + return false; + } + catch (Exception e) + { + Test.Error("UploadFile: receives Exception when creating parent: {0}", e.ToString()); + return false; + } + + CloudFileDirectory root = share.GetRootDirectoryReference(); + CloudFile cloudFile = root.GetFileReference(fileName); + return UploadFile(cloudFile, filePath); + } + + public static bool UploadFile(CloudFile destFile, string sourceFile) + { + try + { + FileInfo fi = new FileInfo(sourceFile); + if (!fi.Exists) + { + return false; + } + + FileRequestOptions fro = new FileRequestOptions(); + fro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 60), 5); + fro.ServerTimeout = new TimeSpan(1, 90, 0); + fro.MaximumExecutionTime = new TimeSpan(1, 90, 0); + + destFile.Create(fi.Length, null, fro); + + using (FileStream fileStream = new FileStream(sourceFile, FileMode.Open)) + { + destFile.UploadFromStream(fileStream, null, fro); + fileStream.Close(); + } + + // update content md5 + destFile.Properties.ContentMD5 = Helper.GetFileContentMD5(sourceFile); + destFile.SetProperties(null, fro); + + Test.Info("Local file {0} has been uploaded to xSMB successfully", sourceFile); + + return true; + } + catch (StorageException e) + { + Test.Error("UploadFile: receives StorageException: {0}", e.ToString()); + return false; + } + catch (Exception e) + { + Test.Error("UploadFile: receives Exception: {0}", e.ToString()); + return false; + } + } + + public static void GenerateCloudFileWithRangedData(CloudFile cloudFile, List ranges, List gaps) + { + Helper.GenerateSparseCloudObject( + ranges, + gaps, + createObject: (totalSize) => + { + cloudFile.Create(totalSize); + }, + writeUnit: (unitOffset, randomData) => + { + cloudFile.WriteRange(randomData, unitOffset, options: HelperConst.DefaultFileOptions); + }); + + Helper.PrintCloudFileRanges(cloudFile, true); + + // Set correct MD5 to cloud file + string md5 = CalculateMD5ByDownloading(cloudFile); + cloudFile.Properties.ContentMD5 = md5; + cloudFile.SetProperties(options: HelperConst.DefaultFileOptions); + } + + public CloudFile QueryFile(string shareName, string fileName) + { + try + { + CloudFileShare share = this.FileClient.GetShareReference(shareName); + CloudFileDirectory root = share.GetRootDirectoryReference(); + CloudFile file = root.GetFileReference(fileName); + + if (file.Exists()) + { + file.FetchAttributes(); + return file; + } + else + { + return null; + } + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + + throw; + } + } + + public bool DeleteFile(string shareName, string fileName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (share.Exists()) + { + CloudFileDirectory root = share.GetRootDirectoryReference(); + CloudFile file = root.GetFileReference(fileName); + + return file.DeleteIfExists(); + } + + return false; + } + + public bool DeleteFileDirectory(string shareName, string fileDirectoryName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (!share.Exists()) + { + return false; + } + + // do not try to delete a root directory + if (string.IsNullOrEmpty(fileDirectoryName)) + { + return false; + } + + CloudFileDirectory root = share.GetRootDirectoryReference(); + CloudFileDirectory dir = root.GetDirectoryReference(fileDirectoryName); + + if (!dir.Exists()) + { + return false; + } + + DeleteFileDirectory(dir); + + return true; + } + + public CloudFileDirectory QueryFileDirectory(string shareName, string fileDirectoryName) + { + try + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (share == null || !share.Exists()) return null; + + CloudFileDirectory root = share.GetRootDirectoryReference(); + if (string.IsNullOrEmpty(fileDirectoryName)) + { + return root; + } + + CloudFileDirectory dir = root.GetDirectoryReference(fileDirectoryName); + + return dir.Exists() ? dir : null; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + + throw; + } + } + + public bool CreateFileDirectory(string shareName, string fileDirectoryName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (share == null || !share.Exists()) + { + return false; + } + + if (string.IsNullOrEmpty(fileDirectoryName)) + { + return false; + } + + CloudFileDirectory parent = share.GetRootDirectoryReference(); + + string[] directoryTokens = fileDirectoryName.Split('/'); + foreach (string directoryToken in directoryTokens) + { + parent = CreateFileDirectoryIfNotExist(parent, directoryToken); + } + + return true; + } + + // create a directory under the specified parent directory. + public static CloudFileDirectory CreateFileDirectoryIfNotExist(CloudFileDirectory parent, string fileDirectoryName) + { + CloudFileDirectory dir = parent.GetDirectoryReference(fileDirectoryName); + dir.CreateIfNotExists(); + + return dir; + } + + // upload all files & dirs(including empty dir) in a local directory to an xsmb directory + public void UploadDirectory(string localDirName, string shareName, string fileDirName, bool recursive) + { + DirectoryInfo srcDir = new DirectoryInfo(localDirName); + CloudFileDirectory destDir = QueryFileDirectory(shareName, fileDirName); + if (null == destDir) + { + this.CreateFileDirectory(shareName, fileDirName); + destDir = QueryFileDirectory(shareName, fileDirName); + Test.Assert(null != destDir, "{0} should exist in file share {1}.", fileDirName, shareName); + } + + UploadDirectory(srcDir, destDir, recursive); + } + + public static void UploadDirectory(DirectoryInfo sourceDir, CloudFileDirectory destDir, bool recursive) + { + destDir.CreateIfNotExists(); + + Parallel.ForEach( + sourceDir.EnumerateFiles(), + fi => + { + string fileName = Path.GetFileName(fi.Name); + CloudFile file = destDir.GetFileReference(fileName); + + bool uploaded = UploadFile(file, fi.FullName); + if (!uploaded) + { + Test.Assert(false, "failed to upload file:{0}", fi.FullName); + } + }); + + if (recursive) + { + foreach (DirectoryInfo di in sourceDir.EnumerateDirectories()) + { + string subDirName = Path.GetFileName(di.Name); + CloudFileDirectory subDir = destDir.GetDirectoryReference(subDirName); + UploadDirectory(di, subDir, true); + } + } + } + + // compare an xsmb directory with a local directory. return true only if + // 1. all files under both dir are the same, and + // 2. all sub directories under both dir are the same + public bool CompareCloudFileDirAndLocalDir(string shareName, string fileDirName, string localDirName) + { + try + { + CloudFileDirectory dir = QueryFileDirectory(shareName, fileDirName); + if (null == dir) + { + return false; + } + + return CompareCloudFileDirAndLocalDir(dir, localDirName); + } + catch + { + return false; + } + } + + public static bool CompareCloudFileDirAndLocalDir(CloudFileDirectory dir, string localDirName) + { + if (!dir.Exists() || !Directory.Exists(localDirName)) + { + // return false if cloud dir or local dir not exist. + Test.Info("dir not exist. local dir={0}", localDirName); + return false; + } + + HashSet localSubFiles = new HashSet(); + foreach (string localSubFile in Directory.EnumerateFiles(localDirName)) + { + localSubFiles.Add(Path.GetFileName(localSubFile)); + } + + HashSet localSubDirs = new HashSet(); + foreach (string localSubDir in Directory.EnumerateDirectories(localDirName)) + { + localSubDirs.Add(Path.GetFileName(localSubDir)); + } + + foreach (IListFileItem item in dir.ListFilesAndDirectories()) + { + if (item is CloudFile) + { + CloudFile tmpFile = item as CloudFile; + + // TODO: tmpFile.RelativeName + string tmpFileName = Path.GetFileName(tmpFile.Name); + if (!localSubFiles.Remove(tmpFileName)) + { + Test.Info("file not found at local: {0}", tmpFile.Name); + return false; + } + + if (!CompareCloudFileAndLocalFile(tmpFile, Path.Combine(localDirName, tmpFileName))) + { + Test.Info("file content not consistent: {0}", tmpFile.Name); + return false; + } + } + else if (item is CloudFileDirectory) + { + CloudFileDirectory tmpDir = item as CloudFileDirectory; + string tmpDirName = tmpDir.Name; + if (!localSubDirs.Remove(tmpDirName)) + { + Test.Info("dir not found at local: {0}", tmpDir.Name); + return false; + } + + if (!CompareCloudFileDirAndLocalDir(tmpDir, Path.Combine(localDirName, tmpDirName))) + { + return false; + } + } + } + + return (localSubFiles.Count == 0 && localSubDirs.Count == 0); + } + + public bool CompareCloudFileAndLocalFile(string shareName, string fileName, string localFileName) + { + CloudFile file = QueryFile(shareName, fileName); + if (null == file) + { + return false; + } + + return CompareCloudFileAndLocalFile(file, localFileName); + } + + public static bool CompareCloudFileAndLocalFile(CloudFile file, string localFileName) + { + if (!file.Exists() || !File.Exists(localFileName)) + { + return false; + } + + file.FetchAttributes(); + return file.Properties.ContentMD5 == Helper.GetFileContentMD5(localFileName); + } + + public static string CalculateMD5ByDownloading(CloudFile cloudFile, bool disableMD5Check = false) + { + using (TemporaryTestFolder tempFolder = new TemporaryTestFolder(Guid.NewGuid().ToString())) + { + const string tempFileName = "tempFile"; + string tempFilePath = Path.Combine(tempFolder.Path, tempFileName); + var fileOptions = new FileRequestOptions(); + fileOptions.DisableContentMD5Validation = disableMD5Check; + fileOptions.RetryPolicy = HelperConst.DefaultFileOptions.RetryPolicy.CreateInstance(); + cloudFile.DownloadToFile(tempFilePath, FileMode.OpenOrCreate, options: fileOptions); + return Helper.GetFileContentMD5(tempFilePath); + } + } + + public CloudFile GetFileReference(string shareName, string cloudFileName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + CloudFileDirectory dir = share.GetRootDirectoryReference(); + return dir.GetFileReference(cloudFileName); + } + + public CloudFileDirectory GetDirReference(string shareName, string cloudDirName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + CloudFileDirectory dir = share.GetRootDirectoryReference(); + + if (cloudDirName == string.Empty) + { + return dir; + } + else + { + return dir.GetDirectoryReference(cloudDirName); + } + } + + // enumerate files under the specified cloud directory. + // Returns an enumerable collection of the full names(including dirName), for the files in the directory. + public IEnumerable EnumerateFiles(string shareName, string dirName, bool recursive) + { + CloudFileDirectory dir = QueryFileDirectory(shareName, dirName); + if (null == dir) + { + Test.Assert(false, "directory or share doesn't exist"); + } + + return EnumerateFiles(dir, recursive); + } + + // enumerate files under the specified cloud directory. + // Returns an enumerable collection of the full names(including dir name), for the files in the directory. + public static IEnumerable EnumerateFiles(CloudFileDirectory dir, bool recursive) + { + var folders = new List(); + foreach (IListFileItem item in dir.ListFilesAndDirectories()) + { + if (item is CloudFile) + { + CloudFile file = item as CloudFile; + string fileName = Path.GetFileName(file.Name); + string filePath = dir.Name + "/" + fileName; + yield return filePath; + } + else if (item is CloudFileDirectory) + { + if (recursive) + { + CloudFileDirectory subDir = item as CloudFileDirectory; + folders.Add(subDir); + } + } + } + + foreach (var folder in folders) + { + foreach (var filePath in EnumerateFiles(folder, recursive)) + { + yield return dir.Name + "/" + filePath; + } + } + } + + // enumerate directory under the specified cloud directory. + // Returns an enumerable collection of the full names(including dirName), for the directories in the directory + public IEnumerable EnumerateDirectories(string shareName, string dirName, bool recursive) + { + CloudFileDirectory dir = QueryFileDirectory(shareName, dirName); + if (null == dir) + { + Test.Assert(false, "directory or share doesn't exist"); + } + + return EnumerateDirectories(dir, recursive); + } + + // enumerate directory under the specified cloud directory. + // Returns an enumerable collection of the full names(including dir name), for the directories in the directory + public static IEnumerable EnumerateDirectories(CloudFileDirectory dir, bool recursive) + { + List dirs = new List(); + foreach (IListFileItem item in dir.ListFilesAndDirectories()) + { + if (item is CloudFileDirectory) + { + CloudFileDirectory subDir = item as CloudFileDirectory; + dirs.Add(dir.Name + "/" + subDir.Name); + + if (recursive) + { + foreach (string subSubDir in EnumerateDirectories(subDir, true)) + { + dirs.Add(dir.Name + "/" + subSubDir); + } + } + } + } + + return dirs; + } + + // convert xsmb file name to local file name by replacing "/" with DirectorySeparatorChar + public static string ConvertCloudFileNameToLocalFileName(string fileName) + { + if (Path.DirectorySeparatorChar == '/') + { + return fileName; + } + + return fileName.Replace('/', Path.DirectorySeparatorChar); + } + + // convert local file name to xsmb by replacing DirectorySeparatorChar with "/" + public static string ConvertLocalFileNameToCloudFileName(string fileName) + { + if (Path.DirectorySeparatorChar == '/') + { + return fileName; + } + + return fileName.Replace(Path.DirectorySeparatorChar, '/'); + } + + public static string GetFileDirectoryName(string fileName) + { + int index = fileName.LastIndexOf('/'); + + if (-1 == index) + { + return string.Empty; + } + + return fileName.Substring(0, index); + } + + public bool CleanupFileDirectory(string shareName, string fileDirectoryName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (!share.Exists()) + { + return false; + } + + CloudFileDirectory root = share.GetRootDirectoryReference(); + if (!string.IsNullOrEmpty(fileDirectoryName)) + { + root = root.GetDirectoryReference(fileDirectoryName); + } + else + { + if (root.ListFilesAndDirectories().Count() > 500) + return CleanupFileShareByRecreateIt(shareName); + } + + CleanupFileDirectory(root); + return true; + } + + public static void CleanupFileDirectory(CloudFileDirectory cloudDirectory) + { + foreach (IListFileItem item in cloudDirectory.ListFilesAndDirectories()) + { + if (item is CloudFile) + { + (item as CloudFile).Delete(); + } + + if (item is CloudFileDirectory) + { + DeleteFileDirectory(item as CloudFileDirectory); + } + } + } + + public bool CleanupFileShareByRecreateIt(string shareName) + { + CloudFileShare share = FileClient.GetShareReference(shareName); + if (!share.Exists()) + { + return true; + } + + try + { + share.Delete(); + + Test.Info("share deleted."); + + bool createSuccess = false; + int retry = 0; + while (!createSuccess && retry++ < 100) //wait up to 5 minutes + { + try + { + share.Create(); + createSuccess = true; + Test.Info("share recreated."); + } + catch (StorageException e) + { + if (e.Message.Contains("(409)")) //conflict, the share is still in deleteing + { + Thread.Sleep(3000); + } + else + { + throw; + } + } + } + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + public static void DeleteFileDirectory(CloudFileDirectory cloudDirectory) + { + CleanupFileDirectory(cloudDirectory); + cloudDirectory.Delete(); + } + + /// + /// Get SAS of a share with specific permission and period. + /// + /// The name of the share. + /// The permission of the SAS. + /// How long the SAS will be valid before expire, in second + /// the SAS + public string GetSASofShare( + string shareName, + SharedAccessFilePermissions permissions, + int validatePeriod, + bool UseSavedPolicy = true, + string policySignedIdentifier = "PolicyIdentifier") + { + var share = this.FileClient.GetShareReference(shareName); + string sas = string.Empty; + var policy = new SharedAccessFilePolicy(); + policy.Permissions = permissions; + policy.SharedAccessExpiryTime = DateTimeOffset.Now.AddSeconds(validatePeriod); + if (UseSavedPolicy) + { + var sharePermissions = share.GetPermissions(); + sharePermissions.SharedAccessPolicies.Clear(); + sharePermissions.SharedAccessPolicies.Add(policySignedIdentifier, policy); + share.SetPermissions(sharePermissions); + sas = share.GetSharedAccessSignature(new SharedAccessFilePolicy(), policySignedIdentifier); + + DMLibTestHelper.WaitForACLTakeEffect(); + } + else + { + sas = share.GetSharedAccessSignature(policy); + } + + Test.Info("The SAS is {0}", sas); + return sas; + } + + /// + /// Clears the SAS policy set to a container, used to revoke the SAS. + /// + /// The name of the share. + public void ClearSASPolicyofShare(string shareName) + { + var share = this.FileClient.GetShareReference(shareName); + var bp = share.GetPermissions(); + bp.SharedAccessPolicies.Clear(); + share.SetPermissions(bp); + } + } + + + + /// + /// This class helps to do operations on cloud blobs + /// + public class CloudBlobHelper + { + public const string RootContainer = "$root"; + + private CloudStorageAccount account; + + /// + /// The storage account + /// + public CloudStorageAccount Account + { + get { return account; } + private set { account = value; } + } + + private CloudBlobClient blobClient; + /// + /// The blob client + /// + public CloudBlobClient BlobClient + { + get { return blobClient; } + set { blobClient = value; } + } + + /// + /// Construct the helper with the storage account + /// + /// + public CloudBlobHelper(CloudStorageAccount account) + { + Account = account; + BlobClient = account.CreateCloudBlobClient(); + BlobClient.DefaultRequestOptions.RetryPolicy = new LinearRetry(TimeSpan.Zero, 3); + } + + /// + /// Construct the helper with the storage account + /// + /// + public CloudBlobHelper(string ConnectionString) + { + Account = CloudStorageAccount.Parse(ConnectionString); + BlobClient = Account.CreateCloudBlobClient(); + BlobClient.DefaultRequestOptions.RetryPolicy = new LinearRetry(TimeSpan.Zero, 3); + } + + public bool Exists(string containerName) + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + return container.Exists(); + } + + /// + /// Create a container for blobs + /// + /// the name of the container + /// Return true on success, false if already exists, throw exception on error + public bool CreateContainer(string containerName) + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + return container.CreateIfNotExists(); + } + + /// + /// Delete the container for the blobs + /// + /// the name of container + /// Return true on success (or the container was deleted before), false if the container doesnot exist, throw exception on error + public bool DeleteContainer(string containerName) + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + return container.DeleteIfExists(); + } + + public CloudBlobContainer GetGRSContainer(string containerName) + { + return new CloudBlobContainer( + new Uri(string.Format("{0}/{1}", this.Account.BlobStorageUri.SecondaryUri.AbsoluteUri, containerName)), + this.Account.Credentials); + } + + public BlobContainerPermissions SetGRSContainerAccessType(string containerName, BlobContainerPublicAccessType accessType) + { + BlobContainerPermissions oldPermissions = this.SetContainerAccessType(containerName, accessType); + if (null == oldPermissions) + { + return null; + } + + CloudBlobContainer containerGRS = new CloudBlobContainer( + new Uri(string.Format("{0}/{1}", this.Account.BlobStorageUri.SecondaryUri.AbsoluteUri, containerName)), + this.blobClient.Credentials); + + Helper.WaitForTakingEffect(containerGRS.ServiceClient); + return oldPermissions; + } + + /// + /// Set the specific container to the accesstype + /// + /// container Name + /// the accesstype the contain will be set + /// the container 's permission before set, so can be set back when test case finish + public BlobContainerPermissions SetContainerAccessType(string containerName, BlobContainerPublicAccessType accesstype) + { + try + { + CloudBlobContainer container = blobClient.GetContainerReference(containerName); + container.CreateIfNotExists(); + BlobContainerPermissions oldPerm = container.GetPermissions(); + BlobContainerPermissions blobPermissions = new BlobContainerPermissions(); + blobPermissions.PublicAccess = accesstype; + container.SetPermissions(blobPermissions); + return oldPerm; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + throw; + } + } + + public bool ListBlobs(string containerName, out List blobList) + { + return this.ListBlobs(containerName, BlobListingDetails.All, out blobList); + } + + + public bool ListBlobs(string containerName, BlobListingDetails listingDetails, out List blobList) + { + blobList = new List(); + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + IEnumerable blobs = container.ListBlobs(null, true, listingDetails); + if (blobs != null) + { + foreach (CloudBlob blob in blobs) + { + blobList.Add(blob); + } + } + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + + /// + /// list blobs in a folder, TODO: implement this for batch operations on blobs + /// + /// + /// + /// + public bool ListBlobs(string containerName, string folderName, out List blobList) + { + blobList = new List(); + + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlobDirectory blobDir = container.GetDirectoryReference(folderName); + IEnumerable blobs = blobDir.ListBlobs(true, BlobListingDetails.All); + if (blobs != null) + { + foreach (CloudBlob blob in blobs) + { + blobList.Add(blob); + } + } + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + /// + /// Validate the uploaded tree which is created by Helper.GenerateFixedTestTree() + /// + /// the file prefix of the tree + /// the folder prefix of the tree + /// + /// + /// how many files in each folder + /// how many folder level to verify + /// the container which contain the uploaded tree + /// true means should verify the folder not exist. false means verify the folder exist. + /// true if verify pass, false mean verify fail + public bool ValidateFixedTestTree(string filename, string foldername, string sourceFolder, string destFolder, int size, int layer, string containerName, bool empty = false) + { + Test.Info("Verify the folder {0}...", sourceFolder); + for (int i = 0; i < size; i++) + { + string sourcefilename = sourceFolder + "\\" + filename + "_" + i; + string destblobname = destFolder + "\\" + filename + "_" + i; + CloudBlob blob = this.QueryBlob(containerName, destblobname); + if (!empty) + { + if (blob == null) + { + Test.Error("Blob {0} not exist.", destblobname); + return false; + } + string source_MD5 = Helper.GetFileContentMD5(sourcefilename); + string Dest_MD5 = blob.Properties.ContentMD5; + if (source_MD5 != Dest_MD5) + { + Test.Error("sourcefile:{0}: {1} == destblob:{2}:{3}", sourcefilename, source_MD5, destblobname, Dest_MD5); + return false; + } + } + else + { + if (blob != null && blob.Properties.Length != 0) + { + Test.Error("Blob {0} should not exist.", destblobname); + return false; + } + } + } + if (layer > 0) + { + for (int i = 0; i < size; i++) + { + if (!ValidateFixedTestTree(filename, foldername, sourceFolder + "\\" + foldername + "_" + i, destFolder + "\\" + foldername + "_" + i, size, layer - 1, containerName, empty)) + return false; + } + + } + + return true; + } + + /// + /// Validate the uploaded tree which is created by Helper.GenerateFixedTestTree() + /// + /// the file prefix of the tree + /// the folder prefix of the tree + /// current folder to validate + /// how many files in each folder + /// how many folder level to verify + /// the container which contain the uploaded tree + /// true means should verify the folder not exist. false means verify the folder exist. + /// true if verify pass, false mean verify fail + public bool ValidateFixedTestTree(string filename, string foldername, string currentFolder, int size, int layer, string containerName, bool empty = false) + { + Test.Info("Verify the folder {0}...", currentFolder); + return this.ValidateFixedTestTree(filename, foldername, currentFolder, currentFolder, size, layer, containerName, empty); + } + + /// + /// Get SAS of a container with specific permission and period + /// + /// the name of the container + /// the permission of the SAS + /// How long the SAS will be valid before expire, in second + /// the SAS + public string GetSASofContainer(string containerName, SharedAccessBlobPermissions SAB, int validatePeriod, bool UseSavedPolicy = true, string PolicySignedIdentifier = "PolicyIdentifier") + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + string SAS = string.Empty; + SharedAccessBlobPolicy sap = new SharedAccessBlobPolicy(); + sap.Permissions = SAB; + sap.SharedAccessExpiryTime = DateTimeOffset.Now.AddSeconds(validatePeriod); + if (UseSavedPolicy) + { + BlobContainerPermissions bp = container.GetPermissions(); + bp.SharedAccessPolicies.Clear(); + bp.SharedAccessPolicies.Add(PolicySignedIdentifier, sap); + container.SetPermissions(bp); + SAS = container.GetSharedAccessSignature(new SharedAccessBlobPolicy(), PolicySignedIdentifier); + + DMLibTestHelper.WaitForACLTakeEffect(); + } + else + { + SAS = container.GetSharedAccessSignature(sap); + } + Test.Info("The SAS is {0}", SAS); + return SAS; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return string.Empty; + } + throw; + } + } + + /// + /// Clear the SAS policy set to a container, used to revoke the SAS + /// + /// the name of the container + /// True for success + public bool ClearSASPolicyofContainer(string containerName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + BlobContainerPermissions bp = container.GetPermissions(); + bp.SharedAccessPolicies.Clear(); + container.SetPermissions(bp); + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + + public bool CleanupContainer(string containerName) + { + string blobname = string.Empty; + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + if (!container.Exists()) + return true; + IEnumerable blobs = container.ListBlobs(null, true, BlobListingDetails.All); + if (blobs != null) + { + if (blobs.Count() > 500) + { + return CleanupContainerByRecreateIt(containerName); + } + foreach (CloudBlob blob in blobs) + { + blobname = blob.Name; + if (blob == null) continue; + if (!blob.Exists()) + { + try + { + blob.Delete(DeleteSnapshotsOption.IncludeSnapshots); + continue; + } + catch (Exception) + { + continue; + } + } + try + { + blob.Delete(DeleteSnapshotsOption.IncludeSnapshots); + } + catch (Exception) + { + blob.Delete(DeleteSnapshotsOption.None); + } + } + } + + Thread.Sleep(5 * 1000); + if (container.ListBlobs(null, true, BlobListingDetails.All).Any()) + { + Test.Warn("The container hasn't been cleaned actually."); + Test.Info("Trying to cleanup the container by recreating it..."); + return CleanupContainerByRecreateIt(containerName); + } + else + { + return true; + } + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e) || 409 == e.RequestInformation.HttpStatusCode) + { + if (!CleanupContainerByRecreateIt(containerName)) + return false; + } + throw; + } + } + + public bool CleanupContainerByRecreateIt(string containerName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + if (container == null || !container.Exists()) return false; + + BlobRequestOptions bro = new BlobRequestOptions(); + bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 1, 0), 3); + + try + { + container.Delete(null, bro); + } + catch (StorageException e) + { + if (!Helper.IsNotFoundException(e)) + { + throw; + } + } + + Test.Info("container deleted."); + bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 3, 0), 3); + + bool createSuccess = false; + int retry = 0; + while (!createSuccess && retry++ < 100) //wait up to 5 minutes + { + try + { + container.Create(bro); + createSuccess = true; + Test.Info("container recreated."); + } + catch (StorageException e) + { + if (e.Message.Contains("(409)")) //conflict, the container is still in deleteing + { + Thread.Sleep(3000); + } + else + { + throw; + } + } + } + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + // upload all files & dirs(including empty dir) in a local directory to a blob directory + public void UploadDirectory(string localDirName, string containerName, string blobDirName, bool recursive, string blobType = BlobType.Block) + { + DirectoryInfo srcDir = new DirectoryInfo(localDirName); + CloudBlobDirectory destDir = QueryBlobDirectory(containerName, blobDirName); + Test.Assert(null != destDir, "dest blob directory exists"); + + UploadDirectory(srcDir, destDir, recursive, blobType); + } + + public void UploadDirectory(DirectoryInfo sourceDir, CloudBlobDirectory destDir, bool recursive, string blobType = BlobType.Block) + { + Parallel.ForEach( + sourceDir.EnumerateFiles(), + fi => + { + string fileName = Path.GetFileName(fi.Name); + CloudBlob blob = GetCloudBlobReference(destDir, fileName, blobType); + bool uploaded = UploadFileToBlob(destDir.Container.Name, blob.Name, blobType, fi.FullName); + if (!uploaded) + { + Test.Assert(false, "failed to upload file:{0}", fi.FullName); + } + }); + + if (recursive) + { + foreach (DirectoryInfo di in sourceDir.EnumerateDirectories()) + { + string subDirName = Path.GetFileName(di.Name); + CloudBlobDirectory subDir = destDir.GetDirectoryReference(subDirName); + UploadDirectory(di, subDir, true); + } + } + } + + // upload all files & dirs(including empty dir) in a local directory to a blob directory + public void UploadDirectoryIfNotExist(string localDirName, string containerName, string blobDirName, bool recursive, string blobType = BlobType.Block) + { + DirectoryInfo srcDir = new DirectoryInfo(localDirName); + CloudBlobDirectory destDir = QueryBlobDirectory(containerName, blobDirName); + + UploadDirectoryIfNotExist(srcDir, destDir, recursive, blobType); + } + + public void UploadDirectoryIfNotExist(DirectoryInfo sourceDir, CloudBlobDirectory destDir, bool recursive, string blobType = BlobType.Block) + { + Dictionary blobs = new Dictionary(); + + foreach (IListBlobItem blobItem in destDir.ListBlobs(true)) + { + CloudBlob blob = blobItem as CloudBlob; + + if (null != blob) + { + if (MapStorageBlobTypeToBlobType(blob.BlobType) == blobType) + { + blob.Delete(); + } + else + { + blobs.Add(blob.Name.Substring(destDir.Prefix.Length), blob); + } + } + } + + foreach (FileInfo fi in sourceDir.EnumerateFiles()) + { + string fileName = Path.GetFileName(fi.Name); + CloudBlob blob; + + if (blobs.TryGetValue(fileName, out blob) + && (Helper.GetFileContentMD5(fi.Name) == blob.Properties.ContentMD5)) + { + continue; + } + + blob = GetCloudBlobReference(destDir, fileName, blobType); + bool uploaded = UploadFileToBlob(destDir.Container.Name, blob.Name, blobType, fi.FullName); + if (!uploaded) + { + Test.Assert(false, "failed to upload file:{0}", fi.FullName); + } + } + + if (recursive) + { + foreach (DirectoryInfo di in sourceDir.EnumerateDirectories()) + { + string subDirName = Path.GetFileName(di.Name); + CloudBlobDirectory subDir = destDir.GetDirectoryReference(subDirName); + UploadDirectory(di, subDir, true); + } + } + } + + // compare blob directory with a local directory. return true only if + // 1. all files under both dir are the same, and + // 2. all sub directories under both dir are the same + public bool CompareCloudBlobDirAndLocalDir(string containerName, string blobDirName, string localDirName) + { + try + { + CloudBlobDirectory dir = QueryBlobDirectory(containerName, blobDirName); + if (null == dir) + { + return false; + } + + return CompareCloudBlobDirAndLocalDir(dir, localDirName); + } + catch + { + return false; + } + } + + public static bool CompareCloudBlobDirAndLocalDir(CloudBlobDirectory dir, string localDirName) + { + if (!Directory.Exists(localDirName)) + { + // return false if local dir not exist. + Test.Info("dir not exist. local dir={0}", localDirName); + return false; + } + + HashSet localSubFiles = new HashSet(); + foreach (string localSubFile in Directory.EnumerateFiles(localDirName)) + { + localSubFiles.Add(Path.GetFileName(localSubFile)); + } + + HashSet localSubDirs = new HashSet(); + foreach (string localSubDir in Directory.EnumerateDirectories(localDirName)) + { + localSubDirs.Add(Path.GetFileName(localSubDir)); + } + + foreach (IListBlobItem item in dir.ListBlobs()) + { + if (item is CloudBlob) + { + CloudBlob tmpBlob = item as CloudBlob; + + string tmpFileName = Path.GetFileName(tmpBlob.Name); + if (!localSubFiles.Remove(tmpFileName)) + { + Test.Info("file not found at local: {0}", tmpBlob.Name); + return false; + } + + if (!CompareCloudBlobAndLocalFile(tmpBlob, Path.Combine(localDirName, tmpFileName))) + { + Test.Info("file content not consistent: {0}", tmpBlob.Name); + return false; + } + } + else if (item is CloudBlobDirectory) + { + CloudBlobDirectory tmpDir = item as CloudBlobDirectory; + string tmpDirName = tmpDir.Prefix.TrimEnd(new char[] { '/' }); + tmpDirName = Path.GetFileName(tmpDirName); + + if (!localSubDirs.Remove(tmpDirName)) + { + Test.Info("dir not found at local: {0}", tmpDirName); + return false; + } + + if (!CompareCloudBlobDirAndLocalDir(tmpDir, Path.Combine(localDirName, tmpDirName))) + { + return false; + } + } + } + + return (localSubFiles.Count == 0 && localSubDirs.Count == 0); + } + + public bool CompareCloudBlobAndLocalFile(string containerName, string blobName, string localFileName) + { + CloudBlob blob = QueryBlob(containerName, blobName); + if (null == blob) + { + return false; + } + + return CompareCloudBlobAndLocalFile(blob, localFileName); + } + + public static bool CompareCloudBlobAndLocalFile(CloudBlob blob, string localFileName) + { + if (!blob.Exists() || !File.Exists(localFileName)) + { + return false; + } + + blob.FetchAttributes(); + return blob.Properties.ContentMD5 == Helper.GetFileContentMD5(localFileName); + } + + public static bool CompareCloudBlobAndCloudBlob(CloudBlob blobA, CloudBlob blobB) + { + if (blobA == null || blobB == null || !blobA.Exists() || !blobB.Exists()) + { + return false; + } + + blobA.FetchAttributes(); + blobB.FetchAttributes(); + return blobA.Properties.ContentMD5 == blobB.Properties.ContentMD5; + } + + public static string CalculateMD5ByDownloading(CloudBlob blob, bool disableMD5Check = false) + { + using (TemporaryTestFolder tempFolder = new TemporaryTestFolder(Guid.NewGuid().ToString())) + { + const string tempFileName = "tempFile"; + string tempFilePath = Path.Combine(tempFolder.Path, tempFileName); + var blobOptions = new BlobRequestOptions(); + blobOptions.DisableContentMD5Validation = disableMD5Check; + blobOptions.RetryPolicy = HelperConst.DefaultBlobOptions.RetryPolicy.CreateInstance(); + blob.DownloadToFile(tempFilePath, FileMode.OpenOrCreate, options: blobOptions); + return Helper.GetFileContentMD5(tempFilePath); + } + } + + /// + /// Query the blob + /// + /// + /// + /// + public CloudBlob QueryBlob(string containerName, string blobName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlob blob = GetCloudBlobReference(container, blobName); + //since GetBlobReference method return no null value even if blob is not exist. + //use FetchAttributes method to confirm the existence of the blob + blob.FetchAttributes(); + + return blob; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + throw; + } + } + + + public BlobProperties QueryBlobProperties(string containerName, string blobName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlob blob = container.GetBlobReference(blobName); + if (blob == null) + { + return null; + } + blob.FetchAttributes(); + return blob.Properties; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + throw; + } + } + + /// + /// Query the blob virtual directory + /// + /// + /// + /// + public CloudBlobDirectory QueryBlobDirectory(string containerName, string blobDirectoryName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + if (container == null || !container.Exists()) return null; + CloudBlobDirectory blobDirectory = container.GetDirectoryReference(blobDirectoryName); + return blobDirectory; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + throw; + } + } + + public static void GeneratePageBlobWithRangedData(CloudPageBlob pageBlob, List ranges, List gaps) + { + Helper.GenerateSparseCloudObject( + ranges, + gaps, + createObject: (totalSize) => + { + pageBlob.Create(totalSize); + }, + writeUnit: (unitOffset, randomData) => + { + pageBlob.WritePages(randomData, unitOffset, options: HelperConst.DefaultBlobOptions); + }); + + Helper.PrintPageBlobRanges(pageBlob); + + // Set correct MD5 to page blob + string md5 = CalculateMD5ByDownloading(pageBlob); + pageBlob.Properties.ContentMD5 = md5; + pageBlob.SetProperties(options: HelperConst.DefaultBlobOptions); + } + + public static void GenerateBlockBlob(CloudBlockBlob blockBlob, List blockSizes) + { + int blockIndex = 0; + List blocksToCommit = new List(); + foreach (int blockSize in blockSizes) + { + byte[] blockIdInBytes = System.Text.Encoding.UTF8.GetBytes(blockIndex.ToString("D4")); + string blockId = Convert.ToBase64String(blockIdInBytes); + blocksToCommit.Add(blockId); + + using (MemoryStream randomData = Helper.GetRandomData(blockSize)) + { + blockBlob.PutBlock(blockId, randomData, null, options: HelperConst.DefaultBlobOptions); + } + + ++blockIndex; + } + + // Commit + blockBlob.PutBlockList(blocksToCommit, options: HelperConst.DefaultBlobOptions); + + Helper.PrintBlockBlobBlocks(blockBlob); + + // Set correct MD5 to block blob + string md5 = CloudBlobHelper.CalculateMD5ByDownloading(blockBlob); + blockBlob.Properties.ContentMD5 = md5; + blockBlob.SetProperties(options: HelperConst.DefaultBlobOptions); + } + + /// + /// Create or update a blob by its name + /// + /// the name of the container + /// the name of the blob + /// the content to the blob + /// Return true on success, false if unable to create, throw exception on error + public bool PutBlob(string containerName, string blobName, string content) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + if (container == null || !container.Exists()) return false; + CloudBlob blob = GetCloudBlobReference(container, blobName); + + using (MemoryStream MStream = new MemoryStream(ASCIIEncoding.Default.GetBytes(content))) + { + blob.UploadFromStream(MStream); + } + + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + /// + /// change an exist Blob MD5 hash + /// + /// the name of the container + /// the name of the blob + /// the MD5 hash to set, must be a base 64 string + /// Return true on success, false if unable to set + public bool SetMD5Hash(string containerName, string blobName, string MD5Hash) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlob blob = container.GetBlobReference(blobName); + blob.FetchAttributes(); + blob.Properties.ContentMD5 = MD5Hash; + blob.SetProperties(); + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + /// + /// put block list. TODO: implement this for large files + /// + /// + /// + /// + /// + public bool PutBlockList(string containerName, string blobName, string[] blockIds) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + if (container == null || !container.Exists()) return false; + CloudBlockBlob blob = container.GetBlockBlobReference(blobName); + + blob.PutBlockList(blockIds); + + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + public static string MapStorageBlobTypeToBlobType(StorageBlobType storageBlobType) + { + switch (storageBlobType) + { + case StorageBlobType.BlockBlob: + return BlobType.Block; + case StorageBlobType.PageBlob: + return BlobType.Page; + case StorageBlobType.AppendBlob: + return BlobType.Append; + default: + throw new ArgumentException("storageBlobType"); + } + } + + public static StorageBlobType MapBlobTypeToStorageBlobType(string blobType) + { + switch (blobType) + { + case BlobType.Block: + return StorageBlobType.BlockBlob; + case BlobType.Page: + return StorageBlobType.PageBlob; + case BlobType.Append: + return StorageBlobType.AppendBlob; + default: + throw new ArgumentException("blobType"); + } + } + + public static CloudBlob GetCloudBlobReference(CloudBlobContainer container, string blobName, string blobType) + { + switch (blobType) + { + case BlobType.Block: + return container.GetBlockBlobReference(blobName); + + case BlobType.Page: + return container.GetPageBlobReference(blobName); + + case BlobType.Append: + return container.GetAppendBlobReference(blobName); + + default: + throw new ArgumentException("blobType"); + } + } + + public static CloudBlob GetCloudBlobReference(CloudBlobContainer container, string blobName) + { + CloudBlob cloudBlob = container.GetBlobReference(blobName); + cloudBlob.FetchAttributes(); + + return GetCloudBlobReference(container, blobName, MapStorageBlobTypeToBlobType(cloudBlob.Properties.BlobType)); + } + + public static CloudBlob GetCloudBlobReference(CloudBlobDirectory directory, string blobName, string blobType) + { + switch (blobType) + { + case BlobType.Block: + return directory.GetBlockBlobReference(blobName); + + case BlobType.Page: + return directory.GetPageBlobReference(blobName); + + case BlobType.Append: + return directory.GetAppendBlobReference(blobName); + + default: + throw new ArgumentException("blobType"); + } + } + + public CloudBlob GetBlobReference(string containerName, string blobName, string blobType = BlobType.Block) + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + return GetCloudBlobReference(container, blobName, blobType); + } + + public CloudBlobDirectory GetDirReference(string containerName, string dirName) + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + return container.GetDirectoryReference(dirName); + } + + /// + /// Download Blob text by the blob name + /// + /// the name of the container + /// + /// + /// + public bool GetBlob(string containerName, string blobName, out string content) + { + content = null; + + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlob blob = container.GetBlobReference(blobName); + //content = blob.DownloadText(); + string tempfile = "temp.txt"; + using (FileStream fileStream = new FileStream(tempfile, FileMode.Create)) + { + blob.DownloadToStream(fileStream); + fileStream.Close(); + } + content = File.ReadAllText(tempfile); + File.Delete(tempfile); + + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + /// + /// Delete a blob by its name + /// + /// the name of the container + /// the name of the blob + /// Return true on success, false if blob not found, throw exception on error + public bool DeleteBlob(string containerName, string blobName) + { + blobName = blobName.Replace("\\", "/"); + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + if (container.Exists()) + { + IEnumerable blobs = container.ListBlobs(blobName, true, BlobListingDetails.All); + foreach (CloudBlob blob in blobs) + { + if (blob.Name == blobName) + { + return blob.DeleteIfExists(); + } + } + } + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + public bool DeleteBlobDirectory(string containerName, string blobDirectoryName, bool recursive) + { + try + { + if (blobDirectoryName == string.Empty) + return true; + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlobDirectory blobDirectory = container.GetDirectoryReference(blobDirectoryName); + + const int MaxRetryCount = 10; + int retryCount = 0; + while (true) + { + bool hasBlobDeleted = false; + if (recursive) + { + foreach (CloudBlob blob in blobDirectory.ListBlobs(recursive, BlobListingDetails.All)) + { + blob.Delete(); + hasBlobDeleted = true; + } + } + else + { + foreach (CloudBlob blob in blobDirectory.ListBlobs(recursive)) + { + blob.Delete(); + hasBlobDeleted = true; + } + } + + retryCount++; + + if (!hasBlobDeleted) + { + // Return from the method until no blob is listed. + break; + } + else + { + if (retryCount > MaxRetryCount) + { + Test.Error("Cannot delete the blob directory within max retry count"); + return false; + } + + // Wait for some time, and then attempt to delete all listed blobs again. + Thread.Sleep(5 * 1000); + } + } + + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + + public bool UploadFileToBlockBlob(string containerName, string blobName, string filePath) + { + return UploadFileToBlob(containerName, blobName, BlobType.Block, filePath); + } + + public bool UploadFileToPageBlob(string containerName, string blobName, string filePath) + { + return UploadFileToBlob(containerName, blobName, BlobType.Page, filePath); + } + + public bool UploadFileToAppendBlob(string containerName, string blobName, string filePath) + { + return UploadFileToBlob(containerName, blobName, BlobType.Append, filePath); + } + + public bool UploadFileToBlob(string containerName, string blobName, string blobType, string filePath) + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + BlobRequestOptions options = new BlobRequestOptions + { + RetryPolicy = new ExponentialRetry(TimeSpan.FromSeconds(90), 3), + StoreBlobContentMD5 = true, + }; + + container.CreateIfNotExists(options); + + CloudBlob blob = GetCloudBlobReference(container, blobName, blobType); + blob.UploadFromFile(filePath, FileMode.Open, null, options, null); + Test.Info("block blob {0} has been uploaded successfully", blob.Name); + + return true; + } + + public bool DownloadFile(string containerName, string blobName, string filePath) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + BlobRequestOptions bro = new BlobRequestOptions(); + bro.RetryPolicy = new LinearRetry(new TimeSpan(0, 0, 30), 3); + bro.ServerTimeout = new TimeSpan(1, 30, 0); + bro.MaximumExecutionTime = new TimeSpan(1, 30, 0); + CloudBlob blob = container.GetBlobReference(blobName); + + using (FileStream fileStream = new FileStream(filePath, FileMode.Create)) + { + blob.DownloadToStream(fileStream, null, bro); + fileStream.Close(); + } + + return true; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return false; + } + throw; + } + } + /// + /// Creates a snapshot of the blob + /// + /// the name of the container + /// the name of blob + /// blob snapshot + public CloudBlob CreateSnapshot(string containerName, string blobName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlob blob = GetCloudBlobReference(container, blobName); + if (blob.Properties.BlobType == Microsoft.WindowsAzure.Storage.Blob.BlobType.BlockBlob) + { + CloudBlockBlob BBlock = blob as CloudBlockBlob; + return BBlock.Snapshot(); + } + else + { + CloudPageBlob BBlock = blob as CloudPageBlob; + return BBlock.Snapshot(); + } + + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return null; + } + throw; + } + } + + /// + /// delete snapshot of the blob (DO NOT delete blob) + /// + /// the name of the container + /// the name of blob + /// + public void DeleteSnapshotOnly(string containerName, string blobName) + { + try + { + CloudBlobContainer container = BlobClient.GetContainerReference(containerName); + CloudBlob blob = container.GetBlobReference(blobName); + + //Indicate that any snapshots should be deleted. + blob.Delete(DeleteSnapshotsOption.DeleteSnapshotsOnly); + return; + } + catch (StorageException e) + { + if (Helper.IsNotFoundException(e)) + { + return; + } + throw; + } + } + /// + /// return name of snapshot + /// + /// the name of blob + /// A blob snapshot + /// name of snapshot + public string GetNameOfSnapshot(string fileName, CloudBlob snapshot) + { + string fileNameNoExt = Path.GetFileNameWithoutExtension(fileName); + string extension = Path.GetExtension(fileName); + string timeStamp = string.Format("{0:yyyy-MM-dd HHmmss fff}", snapshot.SnapshotTime.Value); + return string.Format("{0} ({1}){2}", fileNameNoExt, timeStamp, extension); + } + } + internal class TemporaryTestFile : IDisposable + { + private const int DefaultSizeInKB = 1; + private bool disposed = false; + + public string Path + { + get; + private set; + } + + public int Size + { + get; + private set; + } + + public TemporaryTestFile(string path) + : this(path, DefaultSizeInKB) + { + } + + public TemporaryTestFile(string path, int sizeInKB) + { + Path = path; + Size = sizeInKB; + + if (File.Exists(path)) + { + Test.Assert(false, "file {0} already exist", path); + } + + Helper.GenerateRandomTestFile(Path, Size); + } + + ~TemporaryTestFile() + { + Dispose(false); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (!disposed) + { + try + { + Helper.DeleteFile(Path); + } + catch + { + } + + disposed = true; + } + } + } + + internal class TemporaryTestFolder : IDisposable + { + private bool disposed = false; + + public string Path + { + get; + private set; + } + + public TemporaryTestFolder(string path) + { + Path = path; + + if (Directory.Exists(path)) + { + Test.Assert(false, "folder {0} already exist", path); + } + + Helper.CreateNewFolder(path); + } + + ~TemporaryTestFolder() + { + Dispose(false); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (!disposed) + { + try + { + Helper.DeleteFolder(Path); + } + catch + { + } + + disposed = true; + } + } + } +} diff --git a/test/DMLibTest/Util/TestAccounts.cs b/test/DMLibTest/Util/TestAccounts.cs new file mode 100644 index 00000000..fd627be6 --- /dev/null +++ b/test/DMLibTest/Util/TestAccounts.cs @@ -0,0 +1,163 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTest +{ + using System; + using Microsoft.WindowsAzure.Storage; + using MS.Test.Common.MsTestLib; + + public static class TestAccounts + { + public static TestAccount Primary + { + get + { + return new TestAccount(AccountInConfig.Primary); + } + } + + public static TestAccount Secondary + { + get + { + return new TestAccount(AccountInConfig.Secondary); + } + } + } + + public class TestAccount + { + public TestAccount(AccountInConfig accountInConfig) + : this(GetConnectionString(accountInConfig)) + { + } + + public TestAccount(string connectionString) + { + this.ConnectionString = connectionString; + this.Account = CloudStorageAccount.Parse(connectionString); + } + + public CloudStorageAccount Account { get; private set; } + + public string ConnectionString { get; private set; } + + public string AccountName + { + get + { + return this.Account.Credentials.AccountName; + } + } + + public string StorageKey + { + get + { + return this.Account.Credentials.ExportBase64EncodedKey(); + } + } + + public string GetEndpointBaseUri(EndpointType endpoint, bool secondary = false) + { + return this.GetEndpointBaseUri(endpoint, DMLibTestHelper.RandomProtocol(), secondary); + } + + public string GetEndpointBaseUri(EndpointType endpoint, string protocol, bool secondary = false) + { + string url = string.Empty; + bool isHttps = (string.Compare(protocol, "https", StringComparison.InvariantCultureIgnoreCase) == 0); + if (DMLibTestHelper.GetTestAgainst() == TestAgainst.DevFabric) + { + int port; + string host; + if (endpoint == EndpointType.Blob) + { + port = isHttps ? 10100 : 10000; + host = this.Account.BlobEndpoint.Host; + } + else if (endpoint == EndpointType.Queue) + { + port = isHttps ? 10101 : 10001; + host = this.Account.QueueEndpoint.Host; + } + else if (endpoint == EndpointType.Table) + { + port = isHttps ? 10102 : 10002; + host = this.Account.TableEndpoint.Host; + } + else + { + port = isHttps ? 10104 : 10004; + host = this.Account.FileEndpoint.Host; + } + + url = string.Format(@"{0}://{1}:{2}/{3}", protocol, host, port, this.AccountName); + if (secondary) + { + Test.Error("DevFabric doesn't have secondary endpoint."); + } + } + else + { + Uri endpointUri; + if (endpoint == EndpointType.Blob) + { + endpointUri = secondary ? this.Account.BlobStorageUri.SecondaryUri : this.Account.BlobStorageUri.PrimaryUri; + } + else if (endpoint == EndpointType.Queue) + { + endpointUri = secondary ? this.Account.QueueStorageUri.SecondaryUri : this.Account.QueueStorageUri.PrimaryUri; + } + else if (endpoint == EndpointType.Table) + { + endpointUri = secondary ? this.Account.TableStorageUri.SecondaryUri : this.Account.TableStorageUri.PrimaryUri; + } + else + { + endpointUri = secondary ? this.Account.FileStorageUri.SecondaryUri : this.Account.FileStorageUri.PrimaryUri; + } + + url = endpointUri.AbsoluteUri.Replace(endpointUri.Scheme, protocol); + } + + if (url.EndsWith("/")) + { + url = url.Remove(url.Length - 1); + } + + return url; + } + + private static string GetConnectionString(AccountInConfig accountInConfig) + { + if (accountInConfig == AccountInConfig.Primary) + { + return Test.Data.Get(DMLibTestConstants.ConnStr); + } + else if (accountInConfig == AccountInConfig.Secondary) + { + return Test.Data.Get(DMLibTestConstants.ConnStr2); + } + + throw new ArgumentException(string.Format("Invalid accountInConfig value: {0}", accountInConfig), "accountInConfig"); + } + } + + public enum AccountInConfig + { + Primary, + Secondary, + } + + public enum EndpointType + { + Blob, + Queue, + Table, + File, + } +} diff --git a/test/DMLibTest/packages.config b/test/DMLibTest/packages.config new file mode 100644 index 00000000..f48ace35 --- /dev/null +++ b/test/DMLibTest/packages.config @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/test/DMLibTestCodeGen/App.config b/test/DMLibTestCodeGen/App.config new file mode 100644 index 00000000..8e156463 --- /dev/null +++ b/test/DMLibTestCodeGen/App.config @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/test/DMLibTestCodeGen/DMLibDataType.cs b/test/DMLibTestCodeGen/DMLibDataType.cs new file mode 100644 index 00000000..b1eaf2a5 --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibDataType.cs @@ -0,0 +1,52 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + + [Flags] + public enum DMLibDataType : int + { + Unspecified = 0x0, + Stream = 0x01, + URI = 0x02, + Local = 0x04, + CloudFile = 0x08, + BlockBlob = 0x10, + PageBlob = 0x20, + AppendBlob = 0x40, + + CloudBlob = PageBlob | BlockBlob | AppendBlob, + Cloud = CloudBlob | CloudFile, + All = Local | Cloud, + } + + internal static class DMLibDataTypeExtentions + { + public static IEnumerable Extract(this DMLibDataType type) + { + DMLibDataType[] dataTypesToExtract = + { + DMLibDataType.Stream, + DMLibDataType.URI, + DMLibDataType.Local, + DMLibDataType.CloudFile, + DMLibDataType.BlockBlob, + DMLibDataType.PageBlob, + DMLibDataType.AppendBlob, + }; + + foreach (var dataType in dataTypesToExtract) + { + if (type.HasFlag(dataType)) + { + yield return dataType; + } + } + } + } +} diff --git a/test/DMLibTestCodeGen/DMLibDirectionFilter.cs b/test/DMLibTestCodeGen/DMLibDirectionFilter.cs new file mode 100644 index 00000000..33fa1569 --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibDirectionFilter.cs @@ -0,0 +1,85 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + + internal class DMLibDirectionFilter : DirectionFilter + { + public bool? IsAsync + { + get; + set; + } + + public DMLibDataType SourceType + { + get; + set; + } + + public DMLibDataType DestType + { + get; + set; + } + + public DMLibDirectionFilter(string queryString = null) + { + this.IsAsync = null; + this.SourceType = DMLibDataType.Unspecified; + this.DestType = DMLibDataType.Unspecified; + + this.SetProperties(queryString); + } + + protected override void AddValueGenerators() + { + base.AddValueGenerators(); + + this.AddValueGenerator("IsAsync", ParseNullableBoolean); + this.AddValueGenerator("SourceType", ParseDMLibDataType); + this.AddValueGenerator("DestType", ParseDMLibDataType); + } + + private static object ParseNullableBoolean(string value) + { + return (bool?)Boolean.Parse(value); + } + + private static object ParseDMLibDataType(string value) + { + return Enum.Parse(typeof(DMLibDataType), value, true); + } + + public override bool Filter(TestMethodDirection direction) + { + DMLibTransferDirection DMLibDirection = direction as DMLibTransferDirection; + + if (DMLibDirection == null) + { + throw new ArgumentException("DMLibDirectionFilter is only applicable to DMLibTransferDirection.", "direction"); + } + + if (this.IsAsync != null && this.IsAsync != DMLibDirection.IsAsync) + { + return false; + } + + if (this.SourceType != DMLibDataType.Unspecified && !this.SourceType.HasFlag(DMLibDirection.SourceType)) + { + return false; + } + + if (this.DestType != DMLibDataType.Unspecified && !this.DestType.HasFlag(DMLibDirection.DestType)) + { + return false; + } + + return true; + } + } +} diff --git a/test/DMLibTestCodeGen/DMLibTestCodeGen.csproj b/test/DMLibTestCodeGen/DMLibTestCodeGen.csproj new file mode 100644 index 00000000..05e4efef --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibTestCodeGen.csproj @@ -0,0 +1,79 @@ + + + + + Debug + AnyCPU + {7018EE4E-D389-424E-A8DD-F9B4FFDA5194} + Exe + Properties + DMLibTestCodeGen + DMLibTestCodeGen + v4.5 + 512 + + + AnyCPU + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + + + + + + + + + + SharedAssemblyInfo.cs + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/DMLibTestCodeGen/DMLibTestContext.cs b/test/DMLibTestCodeGen/DMLibTestContext.cs new file mode 100644 index 00000000..16eb43b6 --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibTestContext.cs @@ -0,0 +1,16 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + public class DMLibTestContext : MultiDirectionTestContext + { + public static bool IsAsync + { + get; + set; + } + } +} diff --git a/test/DMLibTestCodeGen/DMLibTestMethodSet.cs b/test/DMLibTestCodeGen/DMLibTestMethodSet.cs new file mode 100644 index 00000000..da8ff9ca --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibTestMethodSet.cs @@ -0,0 +1,163 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + + public enum DMLibTestMethodSet + { + AllValidDirection, + Cloud2Cloud, + AllAsync, + AllSync, + CloudSource, + CloudBlobSource, + CloudFileSource, + LocalSource, + CloudDest, + CloudBlobDest, + CloudFileDest, + LocalDest, + } + + [AttributeUsage(AttributeTargets.Method, AllowMultiple = true, Inherited = false)] + public class DMLibTestMethodSetAttribute : MultiDirectionTestMethodSetAttribute + { + public static DMLibTestMethodSetAttribute AllValidDirectionSet; + + static DMLibTestMethodSetAttribute() + { + // All valid direction + AllValidDirectionSet = new DMLibTestMethodSetAttribute(); + // Sync copy + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.Local, DMLibDataType.Cloud)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.Stream, DMLibDataType.Cloud)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.Cloud, DMLibDataType.Local)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.Cloud, DMLibDataType.Stream)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.CloudFile, DMLibDataType.Cloud)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.Cloud, DMLibDataType.CloudFile)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.CloudBlob)); + + // Async copy + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.URI, DMLibDataType.Cloud, isAsync: true)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.CloudBlob, isAsync: true)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.Cloud, DMLibDataType.CloudFile, isAsync: true)); + AllValidDirectionSet.AddTestMethodAttribute(new DMLibTestMethodAttribute(DMLibDataType.CloudFile, DMLibDataType.BlockBlob, isAsync: true)); + } + + public DMLibTestMethodSetAttribute() + { + } + + /// + /// Create a new instance of containing specific + /// valid transfer directions from a query string. Query string format: + /// propertyName1=value1,propertyName2=value2... + /// e.g. + /// To specify all valid async copy directions to blob: + /// DestType=CloudBlob,IsAsync=true + /// + /// Query string + public DMLibTestMethodSetAttribute(string queryString) + { + this.AddTestMethodAttribute(AllValidDirectionSet); + + DMLibDirectionFilter directionFilter = new DMLibDirectionFilter(queryString); + this.AddDirectionFilter(directionFilter); + } + + public DMLibTestMethodSetAttribute(DMLibTestMethodSet directionSet) + { + switch (directionSet) + { + case DMLibTestMethodSet.AllValidDirection: + this.AddTestMethodAttribute(AllValidDirectionSet); + break; + case DMLibTestMethodSet.Cloud2Cloud: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + SourceType = DMLibDataType.Cloud, + DestType = DMLibDataType.Cloud, + }); + break; + case DMLibTestMethodSet.AllAsync: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + IsAsync = true, + }); + break; + case DMLibTestMethodSet.AllSync: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + IsAsync = false, + }); + break; + case DMLibTestMethodSet.CloudSource: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + SourceType = DMLibDataType.Cloud, + }); + break; + case DMLibTestMethodSet.CloudBlobSource: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + SourceType = DMLibDataType.CloudBlob, + }); + break; + case DMLibTestMethodSet.CloudFileSource: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + SourceType = DMLibDataType.CloudFile, + }); + break; + case DMLibTestMethodSet.LocalSource: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + SourceType = DMLibDataType.Local, + }); + break; + case DMLibTestMethodSet.CloudDest: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + DestType = DMLibDataType.Cloud, + }); + break; + case DMLibTestMethodSet.CloudBlobDest: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + DestType = DMLibDataType.CloudBlob, + }); + break; + case DMLibTestMethodSet.CloudFileDest: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + DestType = DMLibDataType.CloudFile, + }); + break; + case DMLibTestMethodSet.LocalDest: + this.AddTestMethodAttribute(AllValidDirectionSet); + this.AddDirectionFilter(new DMLibDirectionFilter() + { + DestType = DMLibDataType.Local, + }); + break; + default: + throw new ArgumentException(string.Format("Invalid MultiDirectionSet: {0}", directionSet.ToString()), "directionSet"); + } + } + } +} diff --git a/test/DMLibTestCodeGen/DMLibTestMetholdAttribute.cs b/test/DMLibTestCodeGen/DMLibTestMetholdAttribute.cs new file mode 100644 index 00000000..3fc9c257 --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibTestMetholdAttribute.cs @@ -0,0 +1,89 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + + [AttributeUsage(AttributeTargets.Method, AllowMultiple = true, Inherited = false)] + public class DMLibTestMethodAttribute : MultiDirectionTestMethodAttribute, ITestDirection + { + public bool IsAsync + { + get; + private set; + } + + public DMLibDataType SourceType + { + get; + private set; + } + + public DMLibDataType DestType + { + get; + private set; + } + + public DMLibTestMethodAttribute( + DMLibDataType dataType, + bool isAsync = false, + string[] tags = null) + : this( + dataType, + DMLibDataType.Unspecified, + isAsync, + tags) + { + } + + public DMLibTestMethodAttribute( + DMLibDataType sourceType, + DMLibDataType destType, + bool isAsync = false, + string[] tags = null) + : base(tags) + { + this.SourceType = sourceType; + this.DestType = destType; + this.IsAsync = isAsync; + } + + internal override IEnumerable ExtractDirections() + { + if (this.DestType == DMLibDataType.Unspecified) + { + foreach (DMLibDataType sourceType in this.SourceType.Extract()) + { + DMLibTransferDirection transferDirection = + new DMLibTransferDirection( + sourceType, + sourceType, + this.IsAsync, + new List(this.Tags)); + yield return transferDirection; + } + } + else + { + foreach (DMLibDataType sourceType in this.SourceType.Extract()) + { + foreach (DMLibDataType destType in this.DestType.Extract()) + { + DMLibTransferDirection transferDirection = + new DMLibTransferDirection( + sourceType, + destType, + this.IsAsync, + new List(this.Tags)); + yield return transferDirection; + } + } + } + } + } +} diff --git a/test/DMLibTestCodeGen/DMLibTransferDirection.cs b/test/DMLibTestCodeGen/DMLibTransferDirection.cs new file mode 100644 index 00000000..a6576df6 --- /dev/null +++ b/test/DMLibTestCodeGen/DMLibTransferDirection.cs @@ -0,0 +1,113 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System.CodeDom; + using System.Collections.Generic; + + internal class DMLibTransferDirection : TestMethodDirection, ITestDirection + { + public DMLibTransferDirection( + DMLibDataType sourceType, + DMLibDataType destType, + bool isAsync, + List tags) + : base(tags) + { + this.SourceType = sourceType; + this.DestType = destType; + this.IsAsync = isAsync; + } + + public bool IsAsync + { + get; + private set; + } + + public DMLibDataType SourceType + { + get; + private set; + } + + public DMLibDataType DestType + { + get; + private set; + } + + public override bool Equals(object obj) + { + DMLibTransferDirection other = obj as DMLibTransferDirection; + if (other == null) + { + return false; + } + + return this.SourceType == other.SourceType && + this.DestType == other.DestType && + this.IsAsync == other.IsAsync; + } + + public override int GetHashCode() + { + int factor = 31; + int hash = this.IsAsync ? 1 : 0; + hash = hash * factor + (int)this.SourceType; + hash = hash * factor + (int)this.DestType; + + return hash; + } + + public override string GetTestMethodNameSuffix() + { + // [SourceType]2[DestType][Async] + return string.Format("{0}2{1}{2}", + this.SourceType.ToString(), + this.DestType.ToString(), + this.IsAsync ? "Async" : string.Empty); + } + + protected override IEnumerable GetExtraTags() + { + yield return string.Format("{0}2{1}{2}", this.SourceType, this.DestType, this.IsAsync ? "Async" : string.Empty); + + if (this.IsAsync) + { + yield return MultiDirectionTag.Async; + } + } + + public override IEnumerable EnumerateUpdateContextStatements() + { + CodeFieldReferenceExpression sourceType = new CodeFieldReferenceExpression( + new CodeTypeReferenceExpression(typeof(DMLibDataType)), + this.SourceType.ToString()); + CodeFieldReferenceExpression destType = new CodeFieldReferenceExpression( + new CodeTypeReferenceExpression(typeof(DMLibDataType)), + this.DestType.ToString()); + + CodePropertyReferenceExpression sourceTypeProperty = new CodePropertyReferenceExpression( + new CodeTypeReferenceExpression(typeof(DMLibTestContext)), + "SourceType"); + + CodePropertyReferenceExpression destTypeProperty = new CodePropertyReferenceExpression( + new CodeTypeReferenceExpression(typeof(DMLibTestContext)), + "DestType"); + + CodePropertyReferenceExpression isAsyncProperty = new CodePropertyReferenceExpression( + new CodeTypeReferenceExpression(typeof(DMLibTestContext)), + "IsAsync"); + + yield return new CodeAssignStatement(sourceTypeProperty, sourceType); + + yield return new CodeAssignStatement(destTypeProperty, destType); + + yield return new CodeAssignStatement(isAsyncProperty, new CodePrimitiveExpression(this.IsAsync)); + } + } +} diff --git a/test/DMLibTestCodeGen/DirectionFilter.cs b/test/DMLibTestCodeGen/DirectionFilter.cs new file mode 100644 index 00000000..72d7b02f --- /dev/null +++ b/test/DMLibTestCodeGen/DirectionFilter.cs @@ -0,0 +1,72 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + using System.Reflection; + + internal abstract class DirectionFilter + { + private IDictionary> valueGenerators = new Dictionary>(); + + protected void SetProperties(string queryString) + { + if (string.IsNullOrEmpty(queryString)) + { + return; + } + + this.AddValueGenerators(); + + string[] keyValuePairs = queryString.Split(new char[] { ',' }, StringSplitOptions.None); + + foreach (var keyValuePair in keyValuePairs) + { + string key; + string value; + if (this.TryParseKeyValuePair(keyValuePair, out key, out value)) + { + var valueGen = this.valueGenerators[key]; + object valueObject = valueGen(value); + + PropertyInfo prop = this.GetType().GetProperty(key); + prop.SetValue(this, valueObject); + } + else + { + throw new ArgumentException(string.Format("Invalid queryString: {0}", queryString), "queryString"); + } + } + } + + private bool TryParseKeyValuePair(string keyValuePair, out string key, out string value) + { + string[] keyValueArray = keyValuePair.Split(new char[] { '=' }, StringSplitOptions.None); + if (keyValueArray.Length != 2) + { + key = null; + value = null; + return false; + } + + key = keyValueArray[0].Trim(); + value = keyValueArray[1].Trim(); + return true; + } + + protected virtual void AddValueGenerators() + { + } + + protected void AddValueGenerator(string propertyName, Func valueGenerator) + { + this.valueGenerators.Add(propertyName, valueGenerator); + } + + public abstract bool Filter(TestMethodDirection direction); + } +} diff --git a/test/DMLibTestCodeGen/ITestDirection.cs b/test/DMLibTestCodeGen/ITestDirection.cs new file mode 100644 index 00000000..70c946bb --- /dev/null +++ b/test/DMLibTestCodeGen/ITestDirection.cs @@ -0,0 +1,20 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + public interface ITestDirection where TDataType : struct + { + TDataType SourceType + { + get; + } + + TDataType DestType + { + get; + } + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTag.cs b/test/DMLibTestCodeGen/MultiDirectionTag.cs new file mode 100644 index 00000000..7f8bc9bd --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTag.cs @@ -0,0 +1,13 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + public static class MultiDirectionTag + { + public const string MultiDirection = "multiDirection"; + public const string Async = "async"; + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTestClass.cs b/test/DMLibTestCodeGen/MultiDirectionTestClass.cs new file mode 100644 index 00000000..57f76a15 --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTestClass.cs @@ -0,0 +1,104 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + using System.Reflection; + using Microsoft.VisualStudio.TestTools.UnitTesting; + + internal class MultiDirectionTestClass + { + public Type ClassType + { + private set; + get; + } + + public MethodInfo TestInit + { + private set; + get; + } + + public MethodInfo TestCleanup + { + private set; + get; + } + + public MethodInfo ClassInit + { + private set; + get; + } + + public MethodInfo ClassCleanup + { + private set; + get; + } + + public List MultiDirectionMethods + { + private set; + get; + } + + public MultiDirectionTestClass(Type type) + { + this.ClassType = type; + this.MultiDirectionMethods = new List(); + + this.ParseTestMethods(type); + } + + private void ParseTestMethods(Type type) + { + foreach (MethodInfo methodInfo in type.GetMethods()) + { + this.ParseTestMethod(methodInfo); + } + } + + private void ParseTestMethod(MethodInfo methodInfo) + { + bool isMultiDirectionMethod = false; + foreach (Attribute attribute in methodInfo.GetCustomAttributes(true)) + { + if (attribute is ClassInitializeAttribute) + { + this.ClassInit = methodInfo; + } + else if (attribute is ClassCleanupAttribute) + { + this.ClassCleanup = methodInfo; + } + else if (attribute is TestInitializeAttribute) + { + this.TestInit = methodInfo; + } + else if (attribute is TestCleanupAttribute) + { + this.TestCleanup = methodInfo; + } + else if (attribute is MultiDirectionTestMethodAttribute) + { + isMultiDirectionMethod = true; + } + else if (attribute is MultiDirectionTestMethodSetAttribute) + { + isMultiDirectionMethod = true; + } + } + + if (isMultiDirectionMethod) + { + this.MultiDirectionMethods.Add(new MultiDirectionTestMethod(methodInfo)); + } + } + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTestClassAttribute.cs b/test/DMLibTestCodeGen/MultiDirectionTestClassAttribute.cs new file mode 100644 index 00000000..8592e8b6 --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTestClassAttribute.cs @@ -0,0 +1,17 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + + [AttributeUsage(AttributeTargets.Class, Inherited = false)] + public class MultiDirectionTestClassAttribute : Attribute + { + public MultiDirectionTestClassAttribute() + { + } + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTestContext.cs b/test/DMLibTestCodeGen/MultiDirectionTestContext.cs new file mode 100644 index 00000000..5d43004f --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTestContext.cs @@ -0,0 +1,22 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + public class MultiDirectionTestContext where TDataType : struct + { + public static TDataType SourceType + { + get; + set; + } + + public static TDataType DestType + { + get; + set; + } + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTestMethod.cs b/test/DMLibTestCodeGen/MultiDirectionTestMethod.cs new file mode 100644 index 00000000..eb9523f3 --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTestMethod.cs @@ -0,0 +1,56 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + using System.Linq; + using System.Reflection; + + internal class MultiDirectionTestMethod + { + private HashSet transferDirections; + + public MethodInfo MethodInfoObj + { + get; + private set; + } + + public MultiDirectionTestMethod(MethodInfo methodInfo) + { + this.MethodInfoObj = methodInfo; + transferDirections = new HashSet(); + + foreach (Attribute attribute in methodInfo.GetCustomAttributes(true)) + { + MultiDirectionTestMethodAttribute multiDirectionAttr = attribute as MultiDirectionTestMethodAttribute; + if (null != multiDirectionAttr) + { + this.ParseMultiDirectionAttribute(multiDirectionAttr); + } + } + } + + public IEnumerable GetTransferDirections() + { + return this.transferDirections; + } + + private void ParseMultiDirectionAttribute(MultiDirectionTestMethodAttribute multiDirectionAttr) + { + foreach (var direction in multiDirectionAttr.ExtractDirections()) + { + if (this.transferDirections.Contains(direction) && direction.Tags.Any()) + { + this.transferDirections.Remove(direction); + } + + this.transferDirections.Add(direction); + } + } + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTestMethodAttribute.cs b/test/DMLibTestCodeGen/MultiDirectionTestMethodAttribute.cs new file mode 100644 index 00000000..a5f979d4 --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTestMethodAttribute.cs @@ -0,0 +1,26 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + + public abstract class MultiDirectionTestMethodAttribute : Attribute + { + protected string[] Tags + { + get; + private set; + } + + protected MultiDirectionTestMethodAttribute(string[] tags = null) + { + this.Tags = tags ?? new string[0]; + } + + internal abstract IEnumerable ExtractDirections(); + } +} diff --git a/test/DMLibTestCodeGen/MultiDirectionTestMethodSetAttribute.cs b/test/DMLibTestCodeGen/MultiDirectionTestMethodSetAttribute.cs new file mode 100644 index 00000000..95a81c6e --- /dev/null +++ b/test/DMLibTestCodeGen/MultiDirectionTestMethodSetAttribute.cs @@ -0,0 +1,60 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Collections.Generic; + using System.Reflection; + + public abstract class MultiDirectionTestMethodSetAttribute : MultiDirectionTestMethodAttribute + { + private List testMethodAttributes = new List(); + + private List directionFilters = new List(); + + protected void AddTestMethodAttribute(MultiDirectionTestMethodAttribute testMethodAttribute) + { + if (testMethodAttribute == null) + { + throw new ArgumentNullException("testMethodAttribute"); + } + + this.testMethodAttributes.Add(testMethodAttribute); + } + + internal void AddDirectionFilter(DirectionFilter directionFilter) + { + this.directionFilters.Add(directionFilter); + } + + internal override IEnumerable ExtractDirections() + { + foreach(var attribute in this.testMethodAttributes) + { + foreach(var direction in attribute.ExtractDirections()) + { + if (this.Filter(direction)) + { + yield return direction; + } + } + } + } + + private bool Filter(TestMethodDirection direction) + { + foreach(var directionFilter in this.directionFilters) + { + if (!directionFilter.Filter(direction)) + { + return false; + } + } + + return true; + } + } +} diff --git a/test/DMLibTestCodeGen/Program.cs b/test/DMLibTestCodeGen/Program.cs new file mode 100644 index 00000000..1b892b7c --- /dev/null +++ b/test/DMLibTestCodeGen/Program.cs @@ -0,0 +1,48 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.Reflection; + + public class Program + { + public static void Main(string[] args) + { + if (args == null || args.Length != 2) + { + PrintHelp(); + return; + } + + string dllName = args[0]; + string sourceFolder = args[1]; + + GenerateCode(dllName, sourceFolder); + } + + private static void GenerateCode(string dllName, string outputFolder) + { + SourceCodeGenerator codeGen = new SourceCodeGenerator(outputFolder); + + Assembly assembly = Assembly.LoadFrom(dllName); + + foreach (Type type in assembly.GetTypes()) + { + if (null != type.GetCustomAttribute(typeof(MultiDirectionTestClassAttribute))) + { + MultiDirectionTestClass testClass = new MultiDirectionTestClass(type); + codeGen.GenerateSourceCode(testClass); + } + } + } + + private static void PrintHelp() + { + Console.WriteLine("Usage: DMLibTestCodeGen.exe [InputDll] [OutputSourceFolder]"); + } + } +} diff --git a/test/DMLibTestCodeGen/Properties/AssemblyInfo.cs b/test/DMLibTestCodeGen/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..04676b2b --- /dev/null +++ b/test/DMLibTestCodeGen/Properties/AssemblyInfo.cs @@ -0,0 +1,14 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("DMLibTestCodeGen")] +[assembly: AssemblyDescription("")] diff --git a/test/DMLibTestCodeGen/SourceCodeGenerator.cs b/test/DMLibTestCodeGen/SourceCodeGenerator.cs new file mode 100644 index 00000000..93db7847 --- /dev/null +++ b/test/DMLibTestCodeGen/SourceCodeGenerator.cs @@ -0,0 +1,263 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System; + using System.CodeDom; + using System.CodeDom.Compiler; + using System.IO; + using Microsoft.CSharp; + using Microsoft.VisualStudio.TestTools.UnitTesting; + + internal static class GodeGeneratorConst + { + public const string RootNameSpace = "DMLibTest.Generated"; + public const string ClassInitMethodName = "GeneratedClassInit"; + public const string ClassCleanupMethodName = "GeneratedClassCleanup"; + public const string TestInitMethodName = "GeneratedTestInit"; + public const string TestCleanupMethodName = "GeneratedTestCleanup"; + } + + internal class SourceCodeGenerator + { + private const string SourceFileExtention = ".cs"; + private const string GeneratedSuffix = "_Generated"; + + private string outputPath; + + public SourceCodeGenerator(string outputPath) + { + this.outputPath = outputPath; + } + + public void GenerateSourceCode(MultiDirectionTestClass testClass) + { + string sourceFileName = this.GetSourceFileName(testClass); + + if (testClass.MultiDirectionMethods.Count == 0) + { + Console.WriteLine("{0} has no multiple direction test case. Skip code generating...", testClass.ClassType.Name); + return; + } + + Console.WriteLine("Generating code for {0}", testClass.ClassType.Name); + + CodeCompileUnit compileUnit = new CodeCompileUnit(); + CodeNamespace rootNameSpace = new CodeNamespace(GodeGeneratorConst.RootNameSpace); + + this.AddImport(rootNameSpace); + + rootNameSpace.Types.Add(GetGeneratedClass(testClass)); + + compileUnit.Namespaces.Add(rootNameSpace); + + this.WriteCodeToFile(compileUnit, Path.Combine(this.outputPath, sourceFileName)); + } + + private void AddImport(CodeNamespace nameSpace) + { + nameSpace.Imports.Add(new CodeNamespaceImport("DMLibTestCodeGen")); + nameSpace.Imports.Add(new CodeNamespaceImport("Microsoft.VisualStudio.TestTools.UnitTesting")); + nameSpace.Imports.Add(new CodeNamespaceImport("MS.Test.Common.MsTestLib")); + nameSpace.Imports.Add(new CodeNamespaceImport("System")); + } + + private CodeTypeDeclaration GetGeneratedClass(MultiDirectionTestClass testClass) + { + CodeTypeDeclaration result = new CodeTypeDeclaration(this.GetGeneratedClassName(testClass)); + result.Attributes = MemberAttributes.Public; + result.BaseTypes.Add(testClass.ClassType); + + CodeAttributeDeclaration testClassAttribute = new CodeAttributeDeclaration( + new CodeTypeReference(typeof(TestClassAttribute))); + + result.CustomAttributes.Add(testClassAttribute); + + // Add initialize and cleanup method + result.Members.Add(this.GetInitCleanupMethod(typeof(ClassInitializeAttribute), testClass)); + result.Members.Add(this.GetInitCleanupMethod(typeof(ClassCleanupAttribute), testClass)); + + // No need to generate TestInitialize and TestCleanup Method. + // Generated class can inherit from base class. + + // Expand multiple direction test case + foreach (MultiDirectionTestMethod testMethod in testClass.MultiDirectionMethods) + { + this.AddGeneratedMethod(result, testMethod); + } + + return result; + } + + private CodeMemberMethod GetInitCleanupMethod(Type methodAttributeType, MultiDirectionTestClass testClass) + { + bool isStatic = false; + string generatedMetholdName = string.Empty; + string methodToInvokeName = string.Empty; + CodeParameterDeclarationExpression parameterDec = null; + + if (methodAttributeType == typeof(ClassInitializeAttribute)) + { + isStatic = true; + generatedMetholdName = GodeGeneratorConst.ClassInitMethodName; + methodToInvokeName = testClass.ClassInit.Name; + parameterDec = new CodeParameterDeclarationExpression(typeof(TestContext), "testContext"); + } + else if (methodAttributeType == typeof(ClassCleanupAttribute)) + { + isStatic = true; + generatedMetholdName = GodeGeneratorConst.ClassCleanupMethodName; + methodToInvokeName = testClass.ClassCleanup.Name; + } + else + { + throw new ArgumentException("methodAttributeType"); + } + + CodeMemberMethod result = new CodeMemberMethod(); + result.Name = generatedMetholdName; + + // Add parameter list if needed + if (parameterDec != null) + { + result.Parameters.Add(parameterDec); + } + + CodeExpression callBase = null; + if (isStatic) + { + result.Attributes = MemberAttributes.Public | MemberAttributes.Static; + callBase = new CodeTypeReferenceExpression(testClass.ClassType.FullName); + } + else + { + result.Attributes = MemberAttributes.Public | MemberAttributes.Final; + callBase = new CodeBaseReferenceExpression(); + } + + // Add methold attribute + CodeAttributeDeclaration methodAttribute = new CodeAttributeDeclaration( + new CodeTypeReference(methodAttributeType)); + result.CustomAttributes.Add(methodAttribute); + + // Add invoke statement + CodeMethodInvokeExpression invokeExp = null; + if (parameterDec != null) + { + CodeVariableReferenceExpression sourceParameter = new CodeVariableReferenceExpression(parameterDec.Name); + invokeExp = new CodeMethodInvokeExpression(callBase, methodToInvokeName, sourceParameter); + } + else + { + invokeExp = new CodeMethodInvokeExpression(callBase, methodToInvokeName); + } + + result.Statements.Add(invokeExp); + + return result; + } + + private void AddGeneratedMethod(CodeTypeDeclaration generatedClass, MultiDirectionTestMethod testMethod) + { + foreach (var transferDirection in testMethod.GetTransferDirections()) + { + string generatedMethodName = this.GetGeneratedMethodName(testMethod, transferDirection); + + CodeMemberMethod generatedMethod = new CodeMemberMethod(); + generatedMethod.Name = generatedMethodName; + generatedMethod.Attributes = MemberAttributes.Public | MemberAttributes.Final; + + // Add TestCategoryAttribute to the generated method + this.AddTestCategoryAttributes(generatedMethod, testMethod); + this.AddTestCategoryAttribute(generatedMethod, MultiDirectionTag.MultiDirection); + foreach (var tag in transferDirection.GetTags()) + { + this.AddTestCategoryAttribute(generatedMethod, tag); + } + + CodeAttributeDeclaration testMethodAttribute = new CodeAttributeDeclaration( + new CodeTypeReference(typeof(TestMethodAttribute))); + + generatedMethod.CustomAttributes.Add(testMethodAttribute); + + foreach (var statement in transferDirection.EnumerateUpdateContextStatements()) + { + generatedMethod.Statements.Add(statement); + } + + CodeMethodReferenceExpression callee = new CodeMethodReferenceExpression( + new CodeBaseReferenceExpression(), testMethod.MethodInfoObj.Name); + CodeMethodInvokeExpression invokeExp = new CodeMethodInvokeExpression(callee); + generatedMethod.Statements.Add(invokeExp); + generatedClass.Members.Add(generatedMethod); + } + } + + private void AddTestCategoryAttributes(CodeMemberMethod method, MultiDirectionTestMethod testMethod) + { + foreach (var customAttribute in testMethod.MethodInfoObj.CustomAttributes) + { + if (customAttribute.AttributeType == typeof(TestCategoryAttribute)) + { + if (customAttribute.ConstructorArguments.Count != 1) + { + // Unrecognized attribute, skip + continue; + } + + this.AddTestCategoryAttribute( + method, + new CodeSnippetExpression(customAttribute.ConstructorArguments[0].ToString())); + } + } + } + + private void AddTestCategoryAttribute(CodeMemberMethod method, string tagName) + { + this.AddTestCategoryAttribute(method, new CodePrimitiveExpression(tagName)); + } + + private void AddTestCategoryAttribute(CodeMemberMethod method, CodeExpression expression) + { + CodeAttributeArgument testCategoryTag = new CodeAttributeArgument(expression); + + CodeAttributeDeclaration testCategoryAttribute = new CodeAttributeDeclaration( + new CodeTypeReference(typeof(TestCategoryAttribute)), + testCategoryTag); + + method.CustomAttributes.Add(testCategoryAttribute); + } + + private string GetSourceFileName(MultiDirectionTestClass testClass) + { + return this.GetGeneratedClassName(testClass) + SourceFileExtention; + } + + private string GetGeneratedClassName(MultiDirectionTestClass testClass) + { + return testClass.ClassType.Name + GeneratedSuffix; + } + + private string GetGeneratedMethodName(MultiDirectionTestMethod testMethod, TestMethodDirection transferDirection) + { + // [MethodName]_[DirectionSuffix] + return String.Format("{0}_{1}", testMethod.MethodInfoObj.Name, transferDirection.GetTestMethodNameSuffix()); + } + + private void WriteCodeToFile(CodeCompileUnit compileUnit, string sourceFileName) + { + CSharpCodeProvider provider = new CSharpCodeProvider(); + + using (StreamWriter sw = new StreamWriter(sourceFileName, false)) + { + using (IndentedTextWriter tw = new IndentedTextWriter(sw, " ")) + { + provider.GenerateCodeFromCompileUnit(compileUnit, tw, new CodeGeneratorOptions()); + } + } + } + } +} diff --git a/test/DMLibTestCodeGen/TestMethodDirection.cs b/test/DMLibTestCodeGen/TestMethodDirection.cs new file mode 100644 index 00000000..1ecfce0e --- /dev/null +++ b/test/DMLibTestCodeGen/TestMethodDirection.cs @@ -0,0 +1,50 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace DMLibTestCodeGen +{ + using System.CodeDom; + using System.Collections.Generic; + + internal abstract class TestMethodDirection + { + public List Tags + { + get; + private set; + } + + public TestMethodDirection(List tags) + { + if (null != tags) + { + this.Tags = new List(tags); + } + else + { + this.Tags = new List(); + } + } + + public abstract string GetTestMethodNameSuffix(); + + protected abstract IEnumerable GetExtraTags(); + + public IEnumerable GetTags() + { + foreach(var tag in Tags) + { + yield return tag; + } + + foreach (var extraTag in this.GetExtraTags()) + { + yield return extraTag; + } + } + + public abstract IEnumerable EnumerateUpdateContextStatements(); + } +} diff --git a/test/MsTestLib/ClassConfig.cs b/test/MsTestLib/ClassConfig.cs new file mode 100644 index 00000000..b4c481e9 --- /dev/null +++ b/test/MsTestLib/ClassConfig.cs @@ -0,0 +1,55 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + + public class ClassConfig + { + public ClassConfig() + { + classParams = new Dictionary(); + classMethods = new Dictionary(); + } + + private Dictionary classParams; + + public Dictionary ClassParams + { + get { return classParams; } + set { classParams = value; } + } + + private Dictionary classMethods; + + public MethodConfig this[string methodName] + { + get + { + if (classMethods.ContainsKey(methodName)) + { + return classMethods[methodName]; + } + else + { + return null; + } + } + + set + { + classMethods[methodName] = value; + } + + } + + } + +} diff --git a/test/MsTestLib/ConsoleLogger.cs b/test/MsTestLib/ConsoleLogger.cs new file mode 100644 index 00000000..8bd4a603 --- /dev/null +++ b/test/MsTestLib/ConsoleLogger.cs @@ -0,0 +1,163 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + public class ConsoleLogger : ILogger + { + + private const ConsoleColor ERROR_FG_COLOR = ConsoleColor.Red; + private const ConsoleColor INFO_FG_COLOR = ConsoleColor.White; + private const ConsoleColor WARN_FG_COLOR = ConsoleColor.Green; + private const ConsoleColor NOTE_FG_COLOR = ConsoleColor.DarkYellow; + + private ConsoleColor m_prevFGColor; + + public ConsoleLogger() + { + m_prevFGColor = Console.ForegroundColor; + } + + /// + /// + /// Writes an error log + /// + /// Format message string + /// exception object + /// Objects that need to be serialized in the message + /// + public void WriteError(string msg, params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[ERROR][" + dt.ToString() + "." + dt.Millisecond + "]"); + sBuilder.Append( MessageBuilder.FormatString( msg, objToLog ) ); + Console.ForegroundColor = ERROR_FG_COLOR; + Console.WriteLine( sBuilder.ToString() ); + Console.ForegroundColor = m_prevFGColor; + } + + /// + /// + /// Writes a warn log + /// + /// Format message string + /// Objects that need to be serialized in the message + /// + public void WriteWarning(string msg, params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[WARN][" + dt.ToString() + "." + dt.Millisecond + "]"); + sBuilder.Append( MessageBuilder.FormatString( msg, objToLog ) ); + Console.ForegroundColor = WARN_FG_COLOR; + Console.WriteLine( sBuilder.ToString() ); + Console.ForegroundColor = m_prevFGColor; + } + + /// + /// + /// Writes an info log + /// + /// Format message string + /// Objects that need to be serialized in the message + /// + public void WriteInfo(string msg, params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder( "[INFO][" + dt.ToString()+"."+ dt.Millisecond+ "]" ); + sBuilder.Append( MessageBuilder.FormatString( msg, objToLog) ); + Console.ForegroundColor = INFO_FG_COLOR; + Console.WriteLine( sBuilder.ToString() ); + Console.ForegroundColor = m_prevFGColor; + } + + /// + /// + /// Writes a verbose log + /// + /// Format message string + /// Objects that need to be serialized in the message + /// + public void WriteVerbose(string msg, params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[VERB][" + dt.ToString() + "." + dt.Millisecond + "]"); + sBuilder.Append( MessageBuilder.FormatString( msg, objToLog) ); + Console.ForegroundColor = INFO_FG_COLOR; + Console.WriteLine( sBuilder.ToString() ); + Console.ForegroundColor = m_prevFGColor; + } + + + /// + /// + /// Starts a test (as a child of the current context) + /// + /// Test id + /// + public void StartTest(string testId) + { + StringBuilder sBuilder = new StringBuilder("[START] Test: "); + sBuilder.Append( testId ); + + Console.ForegroundColor = NOTE_FG_COLOR; + Console.WriteLine( sBuilder.ToString() ); + Console.ForegroundColor = m_prevFGColor; + } + + /// + /// + /// Ends the specified test with the specified test result. + /// + /// Test id + /// Result of the Test + /// + public void EndTest(string testId, TestResult result ) + { + Console.ForegroundColor = NOTE_FG_COLOR; + + if (result == TestResult.FAIL || result == TestResult.SKIP) + { + Console.ForegroundColor = ERROR_FG_COLOR; + } + + StringBuilder sBuilder = new StringBuilder("[END] Test: "); + sBuilder.Append( testId ); + sBuilder.Append( " RESULT: " ); + sBuilder.Append( result.ToString() ); + + Console.WriteLine( sBuilder.ToString() ); + + Console.ForegroundColor = m_prevFGColor; + return; + } + + /// + /// + /// Returns "this" object + /// + /// SimpleConsoleLogger object + /// + public object GetLogger() + { + return this; + } + + /// + /// + /// Releases any resource held + /// + /// + public void Close() + { + //Do nothing + } + } +} + diff --git a/test/MsTestLib/Exceptions.cs b/test/MsTestLib/Exceptions.cs new file mode 100644 index 00000000..1cbe1437 --- /dev/null +++ b/test/MsTestLib/Exceptions.cs @@ -0,0 +1,30 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + public class TestPauseException : Exception + { + public TestPauseException() + { + } + + public TestPauseException(string message) + : base(message) + { + } + + public TestPauseException(string message, Exception innerException) + : base(message, innerException) + { + } + + } +} diff --git a/test/MsTestLib/FileLogger.cs b/test/MsTestLib/FileLogger.cs new file mode 100644 index 00000000..f956c5af --- /dev/null +++ b/test/MsTestLib/FileLogger.cs @@ -0,0 +1,192 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + public class FileLogger : ILogger + { + + private System.IO.StreamWriter m_file; + + /// + /// + /// Creates a new instance of this class + /// + /// + /// + public FileLogger() + { + string fileName = Environment.UserName + "_" + Environment.MachineName + " " + DateTime.Now.ToString().Replace('/', '-').Replace(':', '_') + ".txt"; + m_file = new System.IO.StreamWriter(fileName.ToString(), true); + } + + /// + /// + /// Creates a new instance of this class + /// + /// + /// File to which logs should be appended + /// + public FileLogger(string fileName) + : this(fileName, true) + { + + } + + /// + /// + /// Creates a new instance of this class + /// + /// + /// File to which logs should be written/appended + /// denotes whether the file is to be appended or over-written + /// + public FileLogger(string fileName, bool append) + { + // Open the file and assign to member variable + m_file = new System.IO.StreamWriter(fileName, append); + } + + /// + /// + /// Writes an error log + /// + /// Format message string + /// exception object + /// Objects that need to be serialized in the message + /// + public void WriteError( + string msg, + params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[ERROR][" + dt.ToLongTimeString() + "." + dt.Millisecond + "]"); + sBuilder.Append(MessageBuilder.FormatString(msg, objToLog)); + m_file.WriteLine(sBuilder.ToString()); + m_file.Flush(); + } + + /// + /// + /// Writes a warn log + /// + /// Format message string + /// Objects that need to be serialized in the message + /// + public void WriteWarning( + string msg, + params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[WARN][" + dt.ToLongTimeString() + "." + dt.Millisecond + "]"); + sBuilder.Append(MessageBuilder.FormatString(msg, objToLog)); + m_file.WriteLine(sBuilder.ToString()); + m_file.Flush(); + } + + /// + /// + /// Writes an info log + /// + /// Format message string + /// Objects that need to be serialized in the message + /// + public void WriteInfo( + string msg, + params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[INFO][" + dt.ToLongTimeString() + "." + dt.Millisecond + "]"); + sBuilder.Append(MessageBuilder.FormatString(msg, objToLog)); + m_file.WriteLine(sBuilder.ToString()); + m_file.Flush(); + } + + /// + /// + /// Writes a verbose log + /// + /// Format message string + /// Objects that need to be serialized in the message + /// + public void WriteVerbose( + string msg, + params object[] objToLog) + { + DateTime dt = DateTime.Now; + StringBuilder sBuilder = new StringBuilder("[VERB][" + dt.ToLongTimeString() + "." + dt.Millisecond + "]"); + sBuilder.Append(MessageBuilder.FormatString(msg, objToLog)); + m_file.WriteLine(sBuilder.ToString()); + m_file.Flush(); + } + + + /// + /// + /// Starts a test (as a child of the current context) + /// + /// Test id + /// + public void StartTest( + string testId) + { + StringBuilder sBuilder = new StringBuilder("[START] Test: "); + sBuilder.Append(testId); + m_file.WriteLine(sBuilder.ToString()); + m_file.Flush(); + } + + /// + /// + /// Ends the specified test with the specified test result + /// + /// Test id + /// Result of the Test + /// + public void EndTest( + string testId, + TestResult result) + { + StringBuilder sBuilder = new StringBuilder("[END] Test: "); + sBuilder.Append(testId); + sBuilder.Append(" RESULT: "); + sBuilder.Append(result.ToString()); + m_file.WriteLine(sBuilder.ToString()); + m_file.Flush(); + return; + } + + + /// + /// + /// Returns "this" object + /// + /// SimpleFileLogger object + /// + public object GetLogger() + { + return this; + } + + /// + /// + /// Closes the log file + /// + /// + public void Close() + { + if (m_file != null) + { + m_file.Flush(); + m_file.Close(); + } + } + } +} diff --git a/test/MsTestLib/ILogger.cs b/test/MsTestLib/ILogger.cs new file mode 100644 index 00000000..6aa5f028 --- /dev/null +++ b/test/MsTestLib/ILogger.cs @@ -0,0 +1,52 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + public interface ILogger + { + void WriteError( + string msg, + params object[] objToLog); + + void WriteWarning( + string msg, + params object[] objToLog); + + void WriteInfo( + string msg, + params object[] objToLog); + + void WriteVerbose( + string msg, + params object[] objToLog); + + void StartTest( + string testId); + + void EndTest( + string testId, + TestResult result); + + object GetLogger(); + + void Close(); + + } + + public enum TestResult + { + PASS, + FAIL, + SKIP + } + + +} diff --git a/test/MsTestLib/MessageBuilder.cs b/test/MsTestLib/MessageBuilder.cs new file mode 100644 index 00000000..dc52094e --- /dev/null +++ b/test/MsTestLib/MessageBuilder.cs @@ -0,0 +1,85 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + public class MessageBuilder + { + /// + /// + /// Uses String.Format method for formatting. Incase of any Exceptions due + /// to null arguments or incorrect message format, it formats the message in + /// an internal standard format. For example: + /// MSG: my-message + /// Obj-1: objToLog[1] + /// Obj-2: objToLog[2] + /// ... + /// + /// + /// Objects that need to be serialized in the message + /// + /// + public static string FormatString(string msgFormat, params object[] objToLog) + { + if ((string.IsNullOrEmpty(msgFormat) == false) + && (msgFormat.IndexOf('{') != -1) + && (msgFormat.IndexOf('}') != -1)) + { + try + { + return String.Format(msgFormat, objToLog); + } + catch + { + //ignore exception + } + } + + string prefix = string.Empty; + if (objToLog != null && objToLog.Length > 1) + { + prefix = " "; + } + + StringBuilder sBuilder = new StringBuilder(prefix); + sBuilder.Append(msgFormat); + sBuilder.Append(SerializeObjects(objToLog)); + return sBuilder.ToString(); + } + + private static string SerializeObjects(object[] objToLog) + { + StringBuilder sBuilder = new StringBuilder(); + if (objToLog != null) + { + for (int i = 0; i < objToLog.Length; i++) + { + if (objToLog != null) + { + try + { + sBuilder.Append("\n"); + sBuilder.Append(" Obj-"); + sBuilder.Append(i); + sBuilder.Append(" : "); + sBuilder.Append(objToLog[i]); + } + catch + { + //Ignore any serialization exceptions + + } + } + } + } + return sBuilder.ToString(); + } + } +} diff --git a/test/MsTestLib/MethodConfig.cs b/test/MsTestLib/MethodConfig.cs new file mode 100644 index 00000000..5ae37bf8 --- /dev/null +++ b/test/MsTestLib/MethodConfig.cs @@ -0,0 +1,44 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace MS.Test.Common.MsTestLib +{ + + public class MethodConfig + { + public MethodConfig() + { + methodParams = new Dictionary(); + } + + private Dictionary methodParams; + + public Dictionary MethodParams + { + get { return methodParams; } + set { methodParams = value; } + } + + public string this[string key] + { + get + { + return methodParams[key]; + } + + set + { + methodParams[key] = value; + } + } + } + + +} diff --git a/test/MsTestLib/MsTestLib.csproj b/test/MsTestLib/MsTestLib.csproj new file mode 100644 index 00000000..2ef0e25a --- /dev/null +++ b/test/MsTestLib/MsTestLib.csproj @@ -0,0 +1,73 @@ + + + + + Debug + AnyCPU + {AC39B50F-DC27-4411-9ED4-A4A137190ACB} + Library + MS.Test.Common.MsTestLib + MsTestLib + Properties + v4.5 + 512 + ..\ + true + + + + false + + + true + + + ..\..\tools\strongnamekeys\fake\windows.snk + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + false + ManagedMinimumRules.ruleset + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + false + ManagedMinimumRules.ruleset + + + + ..\..\..\..\imports\VisualStudio\VS10RTM\MsTest\Microsoft.VisualStudio.QualityTools.UnitTestFramework.dll + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/MsTestLib/Test.cs b/test/MsTestLib/Test.cs new file mode 100644 index 00000000..5d8225ef --- /dev/null +++ b/test/MsTestLib/Test.cs @@ -0,0 +1,139 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace MS.Test.Common.MsTestLib +{ + public static class Test + { + public static string TestDataFile; + public static TestConfig Data; + public static TestLogger Logger; + public static int TestCount = 0; + public static int FailCount = 0; + public static int SkipCount = 0; + + public static string FullClassName = string.Empty; + public static string MethodName = string.Empty; + + public static int ErrorCount = 0; + public static int SkipErrorCount = 0; + + public static List FailedCases = null; + public static List SkippedCases = null; + + public static void Init() + { + Init(TestDataFile); + } + + public static void Init(string testDataFile) + { + Data = new TestConfig(testDataFile); + Logger = new TestLogger(Data); + FailedCases = new List(); + SkippedCases = new List(); + } + + public static void Close() + { + Logger.Close(); + } + + public static void Info( + string msg, + params object[] objToLog) + { + Logger.Info(msg, objToLog); + } + + public static void Warn( + string msg, + params object[] objToLog) + { + Logger.Warning(msg, objToLog); + } + + public static void Verbose( + string msg, + params object[] objToLog) + { + Logger.Verbose(msg, objToLog); + } + + public static void Error( + string msg, + params object[] objToLog) + { + ErrorCount++; + Logger.Error(msg, objToLog); + } + + public static void SkipError( + string msg, + params object[] objToLog) + { + SkipErrorCount++; + Logger.Error(msg, objToLog); + } + + public static void Assert(bool condition, + string msg, + params object[] objToLog) + { + if (condition) + { + Verbose("[Assert Pass] " + msg, objToLog); + } + else + { + Error("[Assert Fail] " + msg, objToLog); + } + } + + public static void Start(string testClass, string testMethod) + { + TestCount++; + ErrorCount = 0; + SkipErrorCount = 0; + Logger.StartTest(testClass + "." + testMethod); + Test.FullClassName = testClass; + Test.MethodName = testMethod; + } + + public static void End(string testClass, string testMethod) + { + if (ErrorCount == 0 && SkipErrorCount == 0) + { + Logger.EndTest(testClass + "." + testMethod, TestResult.PASS); + } + else if (SkipErrorCount > 0) + { + SkipCount++; + Logger.EndTest(testClass + "." + testMethod, TestResult.SKIP); + AssertFail(string.Format("The case is skipped since Test init fail. Please check the detailed case log.")); + SkippedCases.Add(String.Format("{0}.{1}", testClass, testMethod)); + } + else + { + FailCount++; + Logger.EndTest(testClass + "." + testMethod, TestResult.FAIL); + AssertFail(string.Format("There " + (ErrorCount > 1 ? "are {0} errors" : "is {0} error") + " so the case fails. Please check the detailed case log.", ErrorCount)); + FailedCases.Add(String.Format("{0}.{1}", testClass, testMethod)); + } + + } + + public static AssertFailDelegate AssertFail; + + } + + public delegate void AssertFailDelegate(string msg); +} diff --git a/test/MsTestLib/TestConfig.cs b/test/MsTestLib/TestConfig.cs new file mode 100644 index 00000000..d1081f99 --- /dev/null +++ b/test/MsTestLib/TestConfig.cs @@ -0,0 +1,181 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Xml; +using System.IO; + +namespace MS.Test.Common.MsTestLib +{ + public class TestConfig + { + private string DefaultConfigFileName = "TestData.xml"; + + public TestConfig(string configFile) + { + testParams = new Dictionary(); + testClasses = new Dictionary(); + + //Initialze: read default config file TestData.xml and then read configFile file + if(string.IsNullOrEmpty(configFile)) + { + configFile = "TestData.xml"; + } + if (File.Exists(DefaultConfigFileName)) + { + ReadConfig(DefaultConfigFileName); //read default config file: TestData.xml + } + if (File.Exists(configFile)) + { + ReadConfig(configFile); //read configFile file: e.g MyTestData.xml, configuration in this file will cover settings in TestData.xml + } + else + { + throw new FileNotFoundException(String.Format("{0} not found", configFile)); + } + } + private void ReadConfig(string configFile) + { + if (string.IsNullOrEmpty(configFile)) + { + throw new ArgumentNullException(); //illegal use + } + XmlDocument config = new XmlDocument(); + try + { + config.Load(configFile); + } + catch (FileNotFoundException) + { + string errorMsg = string.Format("{0} file not found", configFile); + throw new FileNotFoundException(errorMsg); + } + catch (Exception) + { + throw; + } + XmlNode root = config.SelectSingleNode("TestConfig"); + if (root != null) + { + foreach (XmlNode node in root.ChildNodes) + { + XmlElement eleNode = node as XmlElement; + if (eleNode == null) + { + continue; + } + + if (string.Compare(eleNode.Name.ToLower(), "testclass") == 0 && eleNode.Attributes["name"] != null) + { + ClassConfig classConfig = this[eleNode.Attributes["name"].Value]; + if(classConfig == null) + classConfig = new ClassConfig(); + foreach (XmlNode subnode in eleNode.ChildNodes) + { + XmlElement eleSubnode = subnode as XmlElement; + if (eleSubnode == null) + { + continue; + } + + if (string.Compare(eleSubnode.Name.ToLower(), "testmethod") == 0 && eleSubnode.Attributes["name"] != null) + { + MethodConfig methodConfig = classConfig[eleSubnode.Attributes["name"].Value]; + if (methodConfig == null) + methodConfig = new MethodConfig(); + foreach (XmlNode methodParamNode in eleSubnode.ChildNodes) + { + XmlElement eleMethodParamNode = methodParamNode as XmlElement; + if (eleMethodParamNode == null) + { + continue; + } + methodConfig[eleMethodParamNode.Name] = eleMethodParamNode.InnerText; + + } + classConfig[eleSubnode.Attributes["name"].Value] = methodConfig; + continue; + } + + classConfig.ClassParams[eleSubnode.Name] = eleSubnode.InnerText; + + } + this[eleNode.Attributes["name"].Value] = classConfig; + continue; + + } + + TestParams[eleNode.Name] = eleNode.InnerText; + + } + } + } + + private Dictionary testParams = null; + + public Dictionary TestParams + { + get { return testParams; } + set { testParams = value; } + } + + private Dictionary testClasses; + + public ClassConfig this[string className] + { + get + { + if (testClasses.ContainsKey(className)) + { + return testClasses[className]; + } + else + { + return null; + } + } + + set + { + testClasses[className] = value; + } + } + + public string Get(string paramName) + { + //first search the method params + if (this[Test.FullClassName] != null) + { + if (this[Test.FullClassName][Test.MethodName] != null) + { + if (this[Test.FullClassName][Test.MethodName].MethodParams.ContainsKey(paramName)) + { + return this[Test.FullClassName][Test.MethodName].MethodParams[paramName].Trim(); + } + } + + if (this[Test.FullClassName].ClassParams.ContainsKey(paramName)) + { + return this[Test.FullClassName].ClassParams[paramName].Trim(); + } + } + + if (TestParams.ContainsKey(paramName)) + { + return TestParams[paramName].Trim(); + } + + throw new ArgumentException("The test param does not exist.", paramName); + + //return null; + + } + + } + +} diff --git a/test/MsTestLib/TestHelper.cs b/test/MsTestLib/TestHelper.cs new file mode 100644 index 00000000..50305539 --- /dev/null +++ b/test/MsTestLib/TestHelper.cs @@ -0,0 +1,258 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Diagnostics; +using System.Text.RegularExpressions; +using System.Threading; + +namespace MS.Test.Common.MsTestLib +{ + public class TestHelper + { + // default time out for runcmd + // AzCopy will retry for 15 min when server error happens + // Set command timeout to 20 min in case azcopy is terminated during retry and cause no error output + public const int CommandTimeoutInSec = 1200; + public const int CommandTimeoutInMs = CommandTimeoutInSec * 1000; + public const int WaitForKillTimeoutInMs = 30 * 1000; + + public static int RunCmd(string cmd, string args, string input = null) + { + return RunCmd(cmd, args, CommandTimeoutInMs, input); + } + + public static int RunCmd(string cmd, string args, out string stdout, out string stderr, string input = null) + { + return RunCmd(cmd, args, out stdout, out stderr, CommandTimeoutInMs, input); + } + + public static int RunCmd(string cmd, string args, int timeout, string input = null) + { + string stdout, stderr; + return RunCmd(cmd, args, out stdout, out stderr, timeout, input); + } + + public static int RunCmd(string cmd, string args, out string stdout, out string stderr, int timeout, string input = null) + { + Test.Logger.Verbose("Running: {0} {1}", cmd, args); + ProcessStartInfo psi = new ProcessStartInfo(cmd, args); + psi.CreateNoWindow = true; + psi.WindowStyle = ProcessWindowStyle.Hidden; + psi.UseShellExecute = false; + psi.RedirectStandardError = true; + psi.RedirectStandardOutput = true; + if (string.IsNullOrEmpty(input)) + { + psi.RedirectStandardInput = false; + } + else + { + psi.RedirectStandardInput = true; + } + + Process p = Process.Start(psi); + // To avoid deadlock between Process.WaitForExit and Process output redirection buffer filled up, we need to async read output before calling Process.WaitForExit + StringBuilder outputBuffer = new StringBuilder(); + var outputBufferLock = new object(); + p.OutputDataReceived += (sendingProcess, outLine) => + { + if (!String.IsNullOrEmpty(outLine.Data)) + { + lock (outputBufferLock) + { + outputBuffer.Append(outLine.Data + "\n"); + } + } + }; + StringBuilder errorBuffer = new StringBuilder(); + var errorBufferLock = new object(); + p.ErrorDataReceived += (sendingProcess, outLine) => + { + if (!String.IsNullOrEmpty(outLine.Data)) + { + lock (errorBufferLock) + { + errorBuffer.Append(outLine.Data + "\n"); + } + } + }; + + if (!string.IsNullOrEmpty(input)) + { + var writer = p.StandardInput; + writer.AutoFlush = true; + writer.WriteLine(input); + writer.Close(); + } + + p.BeginOutputReadLine(); + p.BeginErrorReadLine(); + if (p.WaitForExit(timeout)) + { + GetStdOutAndStdErr(p, outputBuffer, errorBuffer, out stdout, out stderr); + return p.ExitCode; + } + else + { + Test.Logger.Verbose("--Command timed out!"); + TestHelper.KillProcess(p); + GetStdOutAndStdErr(p, outputBuffer, errorBuffer, out stdout, out stderr); + return int.MinValue; + } + } + + private static void GetStdOutAndStdErr(Process p, StringBuilder outputBuffer, StringBuilder errorBuffer, out string stdout, out string stderr) + { + // Call this overload of WaitForExit to make sure all stdout/stderr strings are flushed. + p.WaitForExit(); + + stdout = outputBuffer.ToString(); + stderr = errorBuffer.ToString(); + + Test.Logger.Verbose("Stdout: {0}", stdout); + if (!string.IsNullOrEmpty(stderr) + && !string.Equals(stdout, stderr, StringComparison.InvariantCultureIgnoreCase)) + { + Test.Logger.Verbose("Stderr: {0}", stderr); + } + } + + public delegate bool RunningCondition(object arg); + /// + /// run cmd and specify the running condition. If running condition is not met, process will be terminated. + /// + public static int RunCmd(string cmd, string args, out string stdout, out string stderr, RunningCondition rc, object rcArg, string input = null) + { + Test.Logger.Verbose("Running: {0} {1}", cmd, args); + ProcessStartInfo psi = new ProcessStartInfo(cmd, args); + psi.CreateNoWindow = true; + psi.WindowStyle = ProcessWindowStyle.Hidden; + psi.UseShellExecute = false; + psi.RedirectStandardError = true; + psi.RedirectStandardOutput = true; + if (string.IsNullOrEmpty(input)) + { + psi.RedirectStandardInput = false; + } + else + { + psi.RedirectStandardInput = true; + } + + Process p = Process.Start(psi); + // To avoid deadlock between Process.WaitForExit and Process output redirection buffer filled up, we need to async read output before calling Process.WaitForExit + StringBuilder outputBuffer = new StringBuilder(); + p.OutputDataReceived += (sendingProcess, outLine) => + { + if (!String.IsNullOrEmpty(outLine.Data)) + { + outputBuffer.Append(outLine.Data + "\n"); + } + }; + StringBuilder errorBuffer = new StringBuilder(); + p.ErrorDataReceived += (sendingProcess, outLine) => + { + if (!String.IsNullOrEmpty(outLine.Data)) + { + errorBuffer.Append(outLine.Data + "\n"); + } + }; + + if (!string.IsNullOrEmpty(input)) + { + var writer = p.StandardInput; + writer.AutoFlush = true; + writer.WriteLine(input); + writer.Close(); + } + + p.BeginOutputReadLine(); + p.BeginErrorReadLine(); + DateTime nowTime = DateTime.Now; + DateTime timeOut = nowTime.AddMilliseconds(CommandTimeoutInMs); + + bool isTimedOut = false; + + while (rc(rcArg)) + { + if (p.HasExited) + { + // process has existed + break; + } + else if (timeOut < DateTime.Now) + { + //time out + isTimedOut = true; + break; + } + else + { + //continue to wait + Thread.Sleep(100); + } + } + stdout = outputBuffer.ToString(); + stderr = errorBuffer.ToString(); + if (p.HasExited) + { + Test.Logger.Verbose("Stdout: {0}", stdout); + if (!string.IsNullOrEmpty(stderr) + && !string.Equals(stdout, stderr, StringComparison.InvariantCultureIgnoreCase)) + Test.Logger.Verbose("Stderr: {0}", stderr); + return p.ExitCode; + } + else + { + if (isTimedOut) + { + Test.Logger.Verbose("--Command timed out!"); + } + + TestHelper.KillProcess(p); + + Test.Logger.Verbose("Stdout: {0}", stdout); + if (!string.IsNullOrEmpty(stderr) + && !string.Equals(stdout, stderr, StringComparison.InvariantCultureIgnoreCase)) + Test.Logger.Verbose("Stderr: {0}", stderr); + return int.MinValue; + } + } + + public static bool StringMatch(string source, string pattern, RegexOptions? regexOptions = null) + { + Regex r = null; + if (regexOptions.HasValue) + { + r = new Regex(pattern, regexOptions.Value); + } + else + { + r = new Regex(pattern); + } + + Match m = r.Match(source); + return m.Success; + } + + public static void KillProcess(Process process) + { + try + { + process.Kill(); + bool exit = process.WaitForExit(WaitForKillTimeoutInMs); + Test.Assert(exit, "Process {0} should exit after being killed", process.Id); + } + catch (InvalidOperationException e) + { + Test.Info("InvalidOperationException caught while trying to kill process {0}: {1}", process.Id, e.ToString()); + } + } + } +} diff --git a/test/MsTestLib/TestLogger.cs b/test/MsTestLib/TestLogger.cs new file mode 100644 index 00000000..eede97a3 --- /dev/null +++ b/test/MsTestLib/TestLogger.cs @@ -0,0 +1,148 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +//------------------------------------------------------------------------------ +namespace MS.Test.Common.MsTestLib +{ + using System; + using System.Collections.Generic; + + /// + /// the wrapper for the loggers + /// + public class TestLogger + { + public List Loggers; + private Object loggersLock = new Object(); + + public TestLogger() + { + Loggers = new List(); + } + + public TestLogger(TestConfig testConfig) + { + Loggers = new List(); + Init(testConfig); + } + + public bool LogVerbose = false; + public bool LogInfo = true; + public bool LogWarning = false; + public bool LogError = true; + + public void Init(TestConfig testConfig) + { + bool consoleLogger = false; + bool fileLogger = true; + bool.TryParse(testConfig.TestParams["consolelogger"], out consoleLogger); + bool.TryParse(testConfig.TestParams["filelogger"], out fileLogger); + + string logfileName = testConfig.TestParams["logfilename"]; + + if (consoleLogger) + { + Loggers.Add(new ConsoleLogger()); + } + + if (fileLogger) + { + string fileNameString = logfileName + ".txt"; + Loggers.Add(new FileLogger(fileNameString)); + } + + bool.TryParse(testConfig.TestParams["loginfo"], out LogInfo); + bool.TryParse(testConfig.TestParams["logverbose"], out LogVerbose); + bool.TryParse(testConfig.TestParams["logerror"], out LogError); + bool.TryParse(testConfig.TestParams["logwarning"], out LogWarning); + } + + public void Error( + string msg, + params object[] objToLog) + { + this.ForEachLogger((logger) => + { + if (LogError) + { + logger.WriteError(msg, objToLog); + } + }); + } + + public void Info( + string msg, + params object[] objToLog) + { + this.ForEachLogger((logger) => + { + if (LogInfo) + { + logger.WriteInfo(msg, objToLog); + } + }); + } + + public void Warning( + string msg, + params object[] objToLog) + { + this.ForEachLogger((logger) => + { + if (LogWarning) + { + logger.WriteWarning(msg, objToLog); + } + }); + } + + public void Verbose( + string msg, + params object[] objToLog) + { + this.ForEachLogger((logger) => + { + if (LogVerbose) + { + logger.WriteVerbose(msg, objToLog); + } + }); + } + + public void StartTest(string testId) + { + this.ForEachLogger((logger) => + { + logger.StartTest(testId); + }); + } + + public void EndTest(string testId, TestResult testResult) + { + this.ForEachLogger((logger) => + { + logger.EndTest(testId, testResult); + }); + } + + public void Close() + { + this.ForEachLogger((logger) => + { + logger.Close(); + }); + } + + private void ForEachLogger(Action action) + { + lock (this.loggersLock) + { + foreach (ILogger logger in Loggers) + { + action(logger); + } + } + } + } +} diff --git a/tools/AssemblyInfo/SharedAssemblyInfo.cs b/tools/AssemblyInfo/SharedAssemblyInfo.cs new file mode 100644 index 00000000..02c8f72a --- /dev/null +++ b/tools/AssemblyInfo/SharedAssemblyInfo.cs @@ -0,0 +1,32 @@ +//------------------------------------------------------------------------------ +// +// Copyright (c) Microsoft Corporation +// +// +// Assembly global configuration. +// +//------------------------------------------------------------------------------ + +using System; +using System.Reflection; +using System.Resources; +using System.Runtime.InteropServices; + +[assembly: AssemblyVersion("0.1.0.0")] +[assembly: AssemblyFileVersion("0.1.0.2")] + +[assembly: AssemblyCompany("Microsoft")] +[assembly: AssemblyProduct("Microsoft Azure Storage")] +[assembly: AssemblyCopyright("Copyright © 2015 Microsoft Corp.")] +[assembly: AssemblyTrademark("Microsoft ® is a registered trademark of Microsoft Corporation.")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +[assembly: NeutralResourcesLanguageAttribute("en-US")] + +[assembly: CLSCompliant(false)] + diff --git a/tools/analysis/fxcop/azure-storage-dm.ruleset b/tools/analysis/fxcop/azure-storage-dm.ruleset new file mode 100644 index 00000000..332a0a32 --- /dev/null +++ b/tools/analysis/fxcop/azure-storage-dm.ruleset @@ -0,0 +1,363 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tools/apidoc/.gitignore b/tools/apidoc/.gitignore new file mode 100644 index 00000000..1d39e8b1 --- /dev/null +++ b/tools/apidoc/.gitignore @@ -0,0 +1 @@ +Help/ \ No newline at end of file diff --git a/tools/apidoc/dmlib.shfbproj b/tools/apidoc/dmlib.shfbproj new file mode 100644 index 00000000..de501509 --- /dev/null +++ b/tools/apidoc/dmlib.shfbproj @@ -0,0 +1,62 @@ + + + + + Debug + AnyCPU + 2.0 + {c33e44be-7d4f-428f-bed5-2d5b03ac5d90} + 1.9.9.0 + + Documentation + Documentation + Documentation + + .NET Framework 4.5 + .\Help\ + Documentation + en-US + OnlyWarningsAndErrors + shfb.log + Website + False + True + False + False + True + 2 + False + C# + Blank + False + VS2013 + False + Guid + Microsoft Azure Storage Data Movement Library + AboveNamespaces + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tools/nupkg/Microsoft.Azure.Storage.DataMovement.nuspec b/tools/nupkg/Microsoft.Azure.Storage.DataMovement.nuspec new file mode 100644 index 00000000..ad3261d6 --- /dev/null +++ b/tools/nupkg/Microsoft.Azure.Storage.DataMovement.nuspec @@ -0,0 +1,38 @@ + + + + Microsoft.Azure.Storage.DataMovement + 0.0.2 + Microsoft Azure Storage Data Movement Library + Microsoft + Microsoft + http://go.microsoft.com/fwlink/?LinkId=331471 + http://go.microsoft.com/fwlink/?LinkId=235168 + http://go.microsoft.com/fwlink/?LinkID=288890 + true + Microsoft Azure Storage DataMovement Library offers a set of APIs extending the existing Azure Storage .Net Client Library to help customer transfer Azure Blob and File Storage with high-performance, scalability and reliability. + For this release, see notes - https://github.com/Azure/azure-storage-net-data-movement/blob/master/README.md and https://github.com/Azure/azure-storage-net-data-movement/blob/master/changelog.txt + Microsoft Azure Storage team's blog - http://blogs.msdn.com/b/windowsazurestorage/ + A client library designed for high-performance and scalable uploading, downloading and copying data to and from Microsoft Azure Blob and File Storage + Microsoft, Azure, Storage, Blob, File, DataMovement, Upload, Download, Copy, High-Performance, Scalable, Reliable, windowsazureofficial + + + + + + + + + + + + + + + + + + + + + diff --git a/tools/nupkg/buildNupkg.cmd b/tools/nupkg/buildNupkg.cmd new file mode 100644 index 00000000..5c4eff7a --- /dev/null +++ b/tools/nupkg/buildNupkg.cmd @@ -0,0 +1,10 @@ +pushd %~dp0 +rmdir /s /q .\lib +mkdir .\lib\net45 +pushd ..\.. +del /q /f *.nupkg +copy .\lib\bin\Release\Microsoft.WindowsAzure.Storage.DataMovement.dll .\tools\nupkg\lib\net45 +.\.nuget\nuget.exe pack .\tools\nupkg\Microsoft.Azure.Storage.DataMovement.nuspec +popd +rmdir /s /q .\lib +popd \ No newline at end of file diff --git a/tools/scripts/InjectBuildNumber.ps1 b/tools/scripts/InjectBuildNumber.ps1 new file mode 100644 index 00000000..622ccfdc --- /dev/null +++ b/tools/scripts/InjectBuildNumber.ps1 @@ -0,0 +1,30 @@ +Function UpdateVersionInFile +{ + Param ([string]$path, [string]$prefix, [string]$suffix, [int]$verNum) + + if ($env:BUILD_NUMBER) + { + + $lines = Get-Content $path -Encoding UTF8 + + $new_lines = $lines | %{ + if ($_.StartsWith($prefix)) + { + $num = $_.Substring($prefix.Length, $_.Length - $prefix.Length - $suffix.Length) + $num_p = $num.Split('.') + $new_num = [System.String]::Join('.', $num_p[0 .. ($verNum-2)] + $env:BUILD_NUMBER) + return $prefix + $new_num + $suffix + } + else + { + return $_ + } + } + + Set-Content -Path $path -Value $new_lines -Encoding UTF8 + } +} + +UpdateVersionInFile ((Split-Path -Parent $PSCommandPath) + '\..\nupkg\Microsoft.Azure.Storage.DataMovement.nuspec') ' ' '' 3 + +UpdateVersionInFile ((Split-Path -Parent $PSCommandPath) + '\..\AssemblyInfo\SharedAssemblyInfo.cs') '[assembly: AssemblyFileVersion("' '")]' 4 \ No newline at end of file diff --git a/tools/strongnamekeys/fake/windows.snk b/tools/strongnamekeys/fake/windows.snk new file mode 100644 index 00000000..695f1b38 Binary files /dev/null and b/tools/strongnamekeys/fake/windows.snk differ