diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000..1ff0c423
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,63 @@
+###############################################################################
+# Set default behavior to automatically normalize line endings.
+###############################################################################
+* text=auto
+
+###############################################################################
+# Set default behavior for command prompt diff.
+#
+# This is need for earlier builds of msysgit that does not have it on by
+# default for csharp files.
+# Note: This is only used by command line
+###############################################################################
+#*.cs diff=csharp
+
+###############################################################################
+# Set the merge driver for project and solution files
+#
+# Merging from the command prompt will add diff markers to the files if there
+# are conflicts (Merging from VS is not affected by the settings below, in VS
+# the diff markers are never inserted). Diff markers may cause the following
+# file extensions to fail to load in VS. An alternative would be to treat
+# these files as binary and thus will always conflict and require user
+# intervention with every merge. To do so, just uncomment the entries below
+###############################################################################
+#*.sln merge=binary
+#*.csproj merge=binary
+#*.vbproj merge=binary
+#*.vcxproj merge=binary
+#*.vcproj merge=binary
+#*.dbproj merge=binary
+#*.fsproj merge=binary
+#*.lsproj merge=binary
+#*.wixproj merge=binary
+#*.modelproj merge=binary
+#*.sqlproj merge=binary
+#*.wwaproj merge=binary
+
+###############################################################################
+# behavior for image files
+#
+# image files are treated as binary by default.
+###############################################################################
+#*.jpg binary
+#*.png binary
+#*.gif binary
+
+###############################################################################
+# diff behavior for common document formats
+#
+# Convert binary document formats to text before diffing them. This feature
+# is only available from the command line. Turn it on by uncommenting the
+# entries below.
+###############################################################################
+#*.doc diff=astextplain
+#*.DOC diff=astextplain
+#*.docx diff=astextplain
+#*.DOCX diff=astextplain
+#*.dot diff=astextplain
+#*.DOT diff=astextplain
+#*.pdf diff=astextplain
+#*.PDF diff=astextplain
+#*.rtf diff=astextplain
+#*.RTF diff=astextplain
diff --git a/.nuget/NuGet.Config b/.nuget/NuGet.Config
new file mode 100644
index 00000000..67f8ea04
--- /dev/null
+++ b/.nuget/NuGet.Config
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.nuget/NuGet.exe b/.nuget/NuGet.exe
new file mode 100644
index 00000000..9ca66594
Binary files /dev/null and b/.nuget/NuGet.exe differ
diff --git a/.nuget/NuGet.targets b/.nuget/NuGet.targets
new file mode 100644
index 00000000..3f8c37b2
--- /dev/null
+++ b/.nuget/NuGet.targets
@@ -0,0 +1,144 @@
+
+
+
+ $(MSBuildProjectDirectory)\..\
+
+
+ false
+
+
+ false
+
+
+ true
+
+
+ false
+
+
+
+
+
+
+
+
+
+
+ $([System.IO.Path]::Combine($(SolutionDir), ".nuget"))
+
+
+
+
+ $(SolutionDir).nuget
+
+
+
+ $(MSBuildProjectDirectory)\packages.$(MSBuildProjectName.Replace(' ', '_')).config
+ $(MSBuildProjectDirectory)\packages.$(MSBuildProjectName).config
+
+
+
+ $(MSBuildProjectDirectory)\packages.config
+ $(PackagesProjectConfig)
+
+
+
+
+ $(NuGetToolsPath)\NuGet.exe
+ @(PackageSource)
+
+ "$(NuGetExePath)"
+ mono --runtime=v4.0.30319 "$(NuGetExePath)"
+
+ $(TargetDir.Trim('\\'))
+
+ -RequireConsent
+ -NonInteractive
+
+ "$(SolutionDir) "
+ "$(SolutionDir)"
+
+
+ $(NuGetCommand) install "$(PackagesConfig)" -source "$(PackageSources)" $(NonInteractiveSwitch) $(RequireConsentSwitch) -solutionDir $(PaddedSolutionDir)
+ $(NuGetCommand) pack "$(ProjectPath)" -Properties "Configuration=$(Configuration);Platform=$(Platform)" $(NonInteractiveSwitch) -OutputDirectory "$(PackageOutputDir)" -symbols
+
+
+
+ RestorePackages;
+ $(BuildDependsOn);
+
+
+
+
+ $(BuildDependsOn);
+ BuildPackage;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/DataMovement.sln b/DataMovement.sln
new file mode 100644
index 00000000..0edc60f6
--- /dev/null
+++ b/DataMovement.sln
@@ -0,0 +1,82 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 2013
+VisualStudioVersion = 12.0.21005.1
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DataMovement", "lib\DataMovement.csproj", "{B821E031-09CC-48F0-BDC6-2793228D4027}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".nuget", ".nuget", "{E2E6D76F-6339-4E02-96EB-94CC8E6D62B2}"
+ ProjectSection(SolutionItems) = preProject
+ .nuget\NuGet.Config = .nuget\NuGet.Config
+ .nuget\NuGet.exe = .nuget\NuGet.exe
+ .nuget\NuGet.targets = .nuget\NuGet.targets
+ EndProjectSection
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Test", "Test", "{4353D299-C4E9-41FF-BB35-6769BACA424A}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DMLibTest", "test\DMLibTest\DMLibTest.csproj", "{2A4656A4-F744-4653-A9D6-15112E9AB352}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DMLibTestCodeGen", "test\DMLibTestCodeGen\DMLibTestCodeGen.csproj", "{7018EE4E-D389-424E-A8DD-F9B4FFDA5194}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MsTestLib", "test\MsTestLib\MsTestLib.csproj", "{AC39B50F-DC27-4411-9ED4-A4A137190ACB}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Debug|Mixed Platforms = Debug|Mixed Platforms
+ Debug|Win32 = Debug|Win32
+ Release|Any CPU = Release|Any CPU
+ Release|Mixed Platforms = Release|Mixed Platforms
+ Release|Win32 = Release|Win32
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Debug|Win32.ActiveCfg = Debug|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}.Release|Win32.ActiveCfg = Release|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Debug|Win32.ActiveCfg = Debug|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Any CPU.Build.0 = Release|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+ {2A4656A4-F744-4653-A9D6-15112E9AB352}.Release|Win32.ActiveCfg = Release|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Debug|Win32.ActiveCfg = Debug|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Any CPU.Build.0 = Release|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194}.Release|Win32.ActiveCfg = Release|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Debug|Win32.ActiveCfg = Debug|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Any CPU.Build.0 = Release|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB}.Release|Win32.ActiveCfg = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ {2A4656A4-F744-4653-A9D6-15112E9AB352} = {4353D299-C4E9-41FF-BB35-6769BACA424A}
+ {7018EE4E-D389-424E-A8DD-F9B4FFDA5194} = {4353D299-C4E9-41FF-BB35-6769BACA424A}
+ {AC39B50F-DC27-4411-9ED4-A4A137190ACB} = {4353D299-C4E9-41FF-BB35-6769BACA424A}
+ EndGlobalSection
+EndGlobal
diff --git a/LICENSE b/LICENSE
index ad410e11..5761bc66 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,201 +1,21 @@
-Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
\ No newline at end of file
+The MIT License (MIT)
+
+Copyright (c) 2015 Microsoft Corporation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..fb536c37
--- /dev/null
+++ b/README.md
@@ -0,0 +1,154 @@
+# Microsoft Azure Storage Data Movement Library (0.1.0)
+
+The Microsoft Azure Storage Data Movement Library designed for high-performance uploading, downloading and copying Azure Storage Blob and File.
+
+[AzCopy](https://azure.microsoft.com/documentation/articles/storage-use-azcopy/), the Azure Storage data management command line utility, is refering to this library.
+
+For more information about the Azure Storage, please visit [Microsoft Azure Storage Documentation](https://azure.microsoft.com/documentation/services/storage/).
+
+# Features
+
+- Blobs
+ - Download/Upload/Copy Blobs.
+ - Synchronous and asynchronous copy Blobs
+ - Concurrently transfer Blobs and Blob chunks, define number of concurrents
+ - Download Specific Blob Snapshot
+
+- Files
+ - Download/Upload/Copy Files.
+ - Synchronous and asynchronous copy Files
+ - Concurrently transfer Files and File ranges, define number of concurrents
+
+- General
+ - Track data transfer progress
+ - Recover the data transfer
+ - Set Access Condition
+ - Set User Agent Suffix
+
+# Getting started
+
+For the best development experience, we recommend that developers use the official Microsoft NuGet packages for libraries. NuGet packages are regularly updated with new functionality and hotfixes.
+
+
+## Requirements
+
+To call Azure services, you must first have an Azure subscription. Sign up for a [free trial](/en-us/pricing/free-trial/) or use your [MSDN subscriber benefits](/en-us/pricing/member-offers/msdn-benefits-details/).
+
+
+## Download & Install
+
+
+### Via Git
+
+To get the source code of the SDK via git just type:
+
+```bash
+git clone https://github.com/Azure/azure-storage-net-data-movement.git
+cd azure-storage-net-data-movement
+```
+
+### Via NuGet
+
+To get the binaries of this library as distributed by Microsoft, ready for use
+within your project you can also have them installed by the .NET package manager [NuGet](http://www.nuget.org/).
+
+`Install-Package WindowsAzure.Storage.DataMovment`
+
+
+## Dependencies
+
+### Azure Storage Client Library for .NET
+
+This version depends on Azure Storage Client Library for .NET.
+
+- [WindowsAzure.Storage](https://www.nuget.org/packages/WindowsAzure.Storage/)
+
+
+
+## Code Samples
+
+Find more samples at [getting started with Storage Data Movement Library (TBC)]() and the [sample folder (TBC)]().
+
+### Upload a blob
+
+First, include the classes you need, here we include Storage client library, the Storage data movement library and the .NET threading because data movement libary provides Task Asynchronous interfaces to transfer storage objects:
+
+```csharp
+using System;
+using System.Threading;
+using Microsoft.WindowsAzure.Storage;
+using Microsoft.WindowsAzure.Storage.Auth;
+using Microsoft.WindowsAzure.Storage.Blob;
+using Microsoft.WindowsAzure.Storage.DataMovement;
+```
+
+Now use the interfaces provided by Storage client lib to setup the storage context (find more details at [how to use Blob Storage from .NET](https://azure.microsoft.com/documentation/articles/storage-dotnet-how-to-use-blobs/)):
+
+```csharp
+CloudStorageAccount account = CloudStorageAccount.Parse(
+ configurationManager.ConnectionStrings["StorageConnectionString"]);
+CloudBlobClient blobClient = account.CreateCloudBlobClient();
+CloudBlobContainer blobContainer = blobClient.GetContainerReference("mycontainer");
+blobContainer.CreateIfNotExists();
+string sourcePath = "path\\to\\test.txt";
+CloudBlockBlob destBlob = blobContainer.GetBlockBlobReference("myblob");
+```
+
+Once you setup the storage blob context, you can start to use `WindowsAzure.Storage.DataMovement.TransferManager` to upload the blob and track the upload progress,
+
+```csharp
+// Setup the number of the concurrent operations
+TransferManager.Configurations.ParallelOperations = 64;
+// Setup the transfer context and track the upoload progress
+TransferContext context = new TransferContext();
+context.ProgressHandler = new Progress((progress) =>
+{
+ Console.WriteLine("Bytes uploaded: {0}/{1}",
+ progress.BytesTransferred, progress.TotalSize);
+});
+// Upload a local blob
+var task = TransferManager.UploadAsync(
+ sourcePath, destBlob, null, context, CancellationToken.None);
+task.Wait();
+```
+# Best Practice
+
+### Increase .NET HTTP connections limit
+By default, the .Net HTTP connection limit is 2. This implies that only two concurrent connections can be maintained. It prevents more parallel connections accessing Azure blob storage from your application.
+
+AzCopy will set ServicePointManager.DefaultConnectionLimit to the number of eight multiple the core number by default. To have a comparable performance when using Data Movement Library alone, we recommend you set this value as well.
+
+```csharp
+ServicePoint myServicePoint = ServicePointManager.FindServicePoint(myServiceUri);
+myServicePoint.ConnectionLimit = 48
+```
+
+### Turn off 100-continue
+When the property "Expect100Continue" is set to true, client requests that use the PUT and POST methods will add an Expect: 100-continue header to the request and it will expect to receive a 100-Continue response from the server to indicate that the client should send the data to be posted. This mechanism allows clients to avoid sending large amounts of data over the network when the server, based on the request headers, intends to reject the request.
+
+However, once the entire payload is received on the server end, other errors may still occur. And if Windows Azure clients have tested the client well enough to ensure that it is not sending any bad requests, clients could turn off 100-continue so that the entire request is sent in one roundtrip. This is especially true when clients send small size storage objects.
+
+```csharp
+ServicePointManager.Expect100Continue = false;
+```
+
+# Need Help?
+Be sure to check out the Microsoft Azure [Developer Forums on MSDN](http://go.microsoft.com/fwlink/?LinkId=234489) if you have trouble with the provided code or use StackOverflow.
+
+
+# Collaborate & Contribute
+
+We gladly accept community contributions.
+
+- Issues: Please report bugs using the Issues section of GitHub
+- Forums: Interact with the development teams on StackOverflow or the Microsoft Azure Forums
+- Source Code Contributions: Please follow the [contribution guidelines for Microsoft Azure open source](http://azure.github.io/guidelines.html) that details information on onboarding as a contributor
+
+For general suggestions about Microsoft Azure please use our [UserVoice forum](http://feedback.azure.com/forums/34192--general-feedback).
+
+
+# Learn More
+
+- [Storage Data Movement Library API reference (TBC)]()
+- [Storage Client Library Reference for .NET - MSDN](http://msdn.microsoft.com/library/azure/dn495001(v=azure.10).aspx)
+- [Azure Storage Team Blog](http://blogs.msdn.com/b/windowsazurestorage/)
diff --git a/changelog.txt b/changelog.txt
new file mode 100644
index 00000000..3d65d23f
--- /dev/null
+++ b/changelog.txt
@@ -0,0 +1,2 @@
+2015.07.17 Version 0.1.0
+ * Initial Release
diff --git a/lib/AssemblyInfo.cs b/lib/AssemblyInfo.cs
new file mode 100644
index 00000000..513b08a9
--- /dev/null
+++ b/lib/AssemblyInfo.cs
@@ -0,0 +1,15 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+using System;
+using System.Reflection;
+using System.Resources;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("Microsoft.WindowsAzure.Storage.DataMovement.dll")]
+[assembly: AssemblyDescription("")]
diff --git a/lib/Constants.cs b/lib/Constants.cs
new file mode 100644
index 00000000..0dc9f3d6
--- /dev/null
+++ b/lib/Constants.cs
@@ -0,0 +1,140 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Reflection;
+
+ ///
+ /// Constants for use with the transfer classes.
+ ///
+ public static class Constants
+ {
+ ///
+ /// Stores the max block size, 4MB.
+ ///
+ public const int MaxBlockSize = 4 * 1024 * 1024;
+
+ ///
+ /// Default block size, 4MB.
+ ///
+ public const int DefaultBlockSize = 4 * 1024 * 1024;
+
+ ///
+ /// Define cache size for one parallel operation.
+ ///
+ internal const long CacheSizeMultiplierInByte = 12 * 1024 * 1024;
+
+ ///
+ /// Default to root container name if none is specified.
+ ///
+ internal const string DefaultContainerName = "$root";
+
+ ///
+ /// Minimum block size, 256KB.
+ ///
+ internal const int MinBlockSize = 256 * 1024;
+
+ ///
+ /// Stores the max page blob file size, 1TB.
+ ///
+ internal const long MaxPageBlobFileSize = (long)1024 * 1024 * 1024 * 1024;
+
+ ///
+ /// Stores the max block blob file size, 50000 * 4M.
+ ///
+ internal const long MaxBlockBlobFileSize = (long)50000 * 4 * 1024 * 1024;
+
+ ///
+ /// Stores the max cloud file size, 1TB.
+ ///
+ internal const long MaxCloudFileSize = (long)1024 * 1024 * 1024 * 1024;
+
+ ///
+ /// Max transfer window size.
+ /// There can be multiple threads to transfer a file,
+ /// and we need to record transfer window
+ /// and have constant length for a transfer entry record in restart journal,
+ /// so set a limitation for transfer window here.
+ ///
+ internal const int MaxCountInTransferWindow = 128;
+
+ ///
+ /// Length to get page ranges in one request.
+ /// In blog http://blogs.msdn.com/b/windowsazurestorage/archive/2012/03/26/getting-the-page-ranges-of-a-large-page-blob-in-segments.aspx,
+ /// it says that it's safe to get page ranges of 150M in one request.
+ /// We use 148MB which is multiples of 4MB.
+ ///
+ internal const long PageRangesSpanSize = 148 * 1024 * 1024;
+
+ ///
+ /// Length to get file ranges in one request.
+ /// Use the same number as page blob for now because cloud file leverages page blob in implementation.
+ /// TODO: update this number when doc for cloud file is available.
+ ///
+ internal const long FileRangeSpanSize = 148 * 1024 * 1024;
+
+ ///
+ /// Percentage of available we'll try to use for our memory cache.
+ ///
+ internal const double MemoryCacheMultiplier = 0.5;
+
+ ///
+ /// Maximum amount of memory to use for our memory cache.
+ ///
+ internal static readonly long MemoryCacheMaximum = GetMemoryCacheMaximum();
+
+ ///
+ /// Maximum amount of cells in memory manager.
+ ///
+ internal const int MemoryManagerCellsMaximum = 8 * 1024;
+
+ ///
+ /// The life time in minutes of SAS auto generated for blob to blob copy.
+ ///
+ internal const int CopySASLifeTimeInMinutes = 7 * 24 * 60;
+
+ ///
+ /// The time in milliseconds to wait to refresh copy status for asynchronous copy.
+ ///
+ internal const long AsyncCopyStatusRefreshWaitTimeInMilliseconds = 100;
+
+ internal const string BlobTypeMismatch = "Blob type of the blob reference doesn't match blob type of the blob.";
+
+ ///
+ /// The product name used in UserAgent header.
+ ///
+ internal const string UserAgentProductName = "DataMovement";
+
+ ///
+ /// UserAgent header.
+ ///
+ internal static readonly string UserAgent = GetUserAgent();
+
+ internal static readonly string FormatVersion = GetFormatVersion();
+
+ ///
+ /// Gets the UserAgent string.
+ ///
+ /// UserAgent string.
+ private static string GetUserAgent()
+ {
+ AssemblyName assemblyName = Assembly.GetExecutingAssembly().GetName();
+ return UserAgentProductName + "/" + assemblyName.Version.ToString();
+ }
+
+ private static string GetFormatVersion()
+ {
+ AssemblyName assemblyName = Assembly.GetExecutingAssembly().GetName();
+ return assemblyName.Name + "/" + assemblyName.Version.ToString();
+ }
+
+ private static long GetMemoryCacheMaximum()
+ {
+ return Environment.Is64BitProcess ? (long)2 * 1024 * 1024 * 1024 : (long)512 * 1024 * 1024;
+ }
+ }
+}
diff --git a/lib/DataMovement.csproj b/lib/DataMovement.csproj
new file mode 100644
index 00000000..bdd9ec6d
--- /dev/null
+++ b/lib/DataMovement.csproj
@@ -0,0 +1,170 @@
+
+
+
+
+ Debug
+ AnyCPU
+ {B821E031-09CC-48F0-BDC6-2793228D4027}
+ Library
+ Properties
+ Microsoft.WindowsAzure.Storage.DataMovement
+ Microsoft.WindowsAzure.Storage.DataMovement
+ v4.5
+ 512
+ ..\
+
+ true
+
+
+ true
+ full
+ false
+ bin\Debug\
+ DEBUG;TRACE
+ prompt
+ 4
+ false
+ ..\tools\analysis\fxcop\azure-storage-dm.ruleset
+ false
+ true
+ bin\Debug\Microsoft.WindowsAzure.Storage.DataMovement.XML
+
+
+ pdbonly
+ true
+ bin\Release\
+ TRACE
+ prompt
+ 4
+ false
+ ..\tools\analysis\fxcop\azure-storage-dm.ruleset
+ true
+ true
+ true
+
+
+ true
+ true
+ ..\tools\strongnamekeys\fake\windows.snk
+
+
+
+ False
+ ..\packages\Microsoft.Data.Edm.5.6.4\lib\net40\Microsoft.Data.Edm.dll
+
+
+ False
+ ..\packages\Microsoft.Data.OData.5.6.4\lib\net40\Microsoft.Data.OData.dll
+
+
+ False
+ ..\packages\Microsoft.Data.Services.Client.5.6.4\lib\net40\Microsoft.Data.Services.Client.dll
+
+
+ False
+ ..\packages\Microsoft.WindowsAzure.ConfigurationManager.1.8.0.0\lib\net35-full\Microsoft.WindowsAzure.Configuration.dll
+
+
+ False
+ ..\packages\WindowsAzure.Storage.5.0.0\lib\net40\Microsoft.WindowsAzure.Storage.dll
+
+
+ False
+ ..\packages\Newtonsoft.Json.6.0.8\lib\net45\Newtonsoft.Json.dll
+
+
+
+
+
+
+ False
+ ..\packages\System.Spatial.5.6.4\lib\net40\System.Spatial.dll
+
+
+
+
+ SharedAssemblyInfo.cs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ True
+ True
+ Resources.resx
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ResXFileCodeGenerator
+ Resources.Designer.cs
+ Designer
+
+
+
+
+
+
+
+
+
+ This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.
+
+
+
+
\ No newline at end of file
diff --git a/lib/Exceptions/TransferErrorCode.cs b/lib/Exceptions/TransferErrorCode.cs
new file mode 100644
index 00000000..562b30f1
--- /dev/null
+++ b/lib/Exceptions/TransferErrorCode.cs
@@ -0,0 +1,104 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+
+ ///
+ /// Error codes for TransferException.
+ ///
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1027:MarkEnumsWithFlags")]
+ public enum TransferErrorCode
+ {
+ ///
+ /// No error.
+ ///
+ None = 0,
+
+ ///
+ /// Invalid source location specified.
+ ///
+ InvalidSourceLocation = 1,
+
+ ///
+ /// Invalid destination location specified.
+ ///
+ InvalidDestinationLocation = 2,
+
+ ///
+ /// Failed to open file for upload or download.
+ ///
+ OpenFileFailed = 3,
+
+ ///
+ /// The file to transfer is too large for the destination.
+ ///
+ UploadSourceFileSizeTooLarge = 4,
+
+ ///
+ /// The file size is invalid for the specified blob type.
+ ///
+ UploadBlobSourceFileSizeInvalid = 5,
+
+ ///
+ /// User canceled.
+ ///
+ OperationCanceled = 6,
+
+ ///
+ /// Both Source and Destination are locally accessible locations.
+ /// At least one of source and destination should be an Azure Storage location.
+ ///
+ LocalToLocalTransfersUnsupported = 7,
+
+ ///
+ /// Failed to do asynchronous copy.
+ ///
+ AsyncCopyFailed = 8,
+
+ ///
+ /// Source and destination are the same.
+ ///
+ SameSourceAndDestination = 9,
+
+ ///
+ /// AsyncCopyController detects mismatch between copy id stored in transfer entry and
+ /// that retrieved from server.
+ ///
+ MismatchCopyId = 10,
+
+ ///
+ /// AsyncCopyControler fails to retrieve CopyState for the object which we are to monitor.
+ ///
+ FailToRetrieveCopyStateForObject = 11,
+
+ ///
+ /// Fails to allocate memory in MemoryManager.
+ ///
+ FailToAllocateMemory = 12,
+
+ ///
+ /// Fails to get source's last write time.
+ ///
+ FailToGetSourceLastWriteTime = 13,
+
+ ///
+ /// User choose not to overwrite existing destination.
+ ///
+ NotOverwriteExistingDestination = 14,
+
+ ///
+ /// Transfer with the same source and destination already exists.
+ ///
+ TransferAlreadyExists = 15,
+
+ ///
+ /// Uncategorized transfer error.
+ ///
+ Unknown = 32,
+ }
+}
diff --git a/lib/Exceptions/TransferException.cs b/lib/Exceptions/TransferException.cs
new file mode 100644
index 00000000..07cfa787
--- /dev/null
+++ b/lib/Exceptions/TransferException.cs
@@ -0,0 +1,152 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Runtime.Serialization;
+
+ ///
+ /// Base exception class for exceptions thrown by Blob/FileTransferJobs.
+ ///
+ [Serializable]
+ public sealed class TransferException : Exception
+ {
+ ///
+ /// Version of current TransferException serialization format.
+ ///
+ private const int ExceptionVersion = 1;
+
+ ///
+ /// Serialization field name for Version.
+ ///
+ private const string VersionFieldName = "Version";
+
+ ///
+ /// Serialization field name for ErrorCode.
+ ///
+ private const string ErrorCodeFieldName = "ErrorCode";
+
+ ///
+ /// Transfer error code.
+ ///
+ private TransferErrorCode errorCode;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public TransferException()
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The message that describes the error.
+ public TransferException(string message)
+ : base(message)
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The error message that explains the reason for the exception.
+ /// The exception that is the cause of the current exception, or a null reference
+ /// if no inner exception is specified.
+ public TransferException(string message, Exception ex)
+ : base(message, ex)
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Transfer error code.
+ public TransferException(TransferErrorCode errorCode)
+ {
+ this.errorCode = errorCode;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Transfer error code.
+ /// Exception message.
+ public TransferException(
+ TransferErrorCode errorCode,
+ string message)
+ : base(message)
+ {
+ this.errorCode = errorCode;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Transfer error code.
+ /// Exception message.
+ /// Inner exception.
+ public TransferException(
+ TransferErrorCode errorCode,
+ string message,
+ Exception innerException)
+ : base(message, innerException)
+ {
+ this.errorCode = errorCode;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Serialization information.
+ /// Streaming context.
+ private TransferException(
+ SerializationInfo info,
+ StreamingContext context)
+ : base(info, context)
+ {
+ int exceptionVersion = info.GetInt32(VersionFieldName);
+
+ if (exceptionVersion >= 1)
+ {
+ this.errorCode = (TransferErrorCode)info.GetInt32(ErrorCodeFieldName);
+ }
+ }
+
+ ///
+ /// Gets the detailed error code.
+ ///
+ /// The error code of the exception.
+ public TransferErrorCode ErrorCode
+ {
+ get
+ {
+ return this.errorCode;
+ }
+ }
+
+ ///
+ /// Serializes the exception.
+ ///
+ /// Serialization info object.
+ /// Streaming context.
+ public override void GetObjectData(
+ SerializationInfo info,
+ StreamingContext context)
+ {
+ if (null == info)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ info.AddValue(VersionFieldName, ExceptionVersion);
+ info.AddValue(ErrorCodeFieldName, this.errorCode);
+
+ base.GetObjectData(info, context);
+ }
+ }
+}
diff --git a/lib/Extensions/CloudBlobExtensions.cs b/lib/Extensions/CloudBlobExtensions.cs
new file mode 100644
index 00000000..0f65f070
--- /dev/null
+++ b/lib/Extensions/CloudBlobExtensions.cs
@@ -0,0 +1,158 @@
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.IO;
+ using Microsoft.WindowsAzure.Storage.Blob;
+ using Microsoft.WindowsAzure.Storage.DataMovement.TransferJobs;
+
+ ///
+ /// Defines extensions methods for ICloudBlob for use with BlobTransfer.
+ ///
+ public static class CloudBlobExtensions
+ {
+ ///
+ /// Creates a job to start copying from a blob.
+ ///
+ /// Destination blob to copy to.
+ /// User should call the method on this object.
+ /// Source blob to copy from.
+ /// Job object to start copying.
+ public static BlobStartCopyJob CreateStartCopyJob(
+ this ICloudBlob destBlob,
+ ICloudBlob sourceBlob)
+ {
+ return new BlobStartCopyJob()
+ {
+ SourceBlob = sourceBlob,
+ DestBlob = destBlob
+ };
+ }
+
+ ///
+ /// Creates a job to start copying from a URI source.
+ ///
+ /// Destination blob to copy to.
+ /// User should call the method on this object.
+ /// Source to copy from.
+ /// Job object to start copying.
+ public static BlobStartCopyJob CreateStartCopyJob(
+ this ICloudBlob destBlob,
+ Uri sourceUri)
+ {
+ return new BlobStartCopyJob()
+ {
+ SourceUri = sourceUri,
+ DestBlob = destBlob
+ };
+ }
+
+ ///
+ /// Creates a job to copy from a blob.
+ ///
+ /// Destination blob to copy to.
+ /// User should call the method on this object.
+ /// Source blob to copy from.
+ /// Job object to do copying.
+ public static BlobCopyJob CreateCopyJob(
+ this ICloudBlob destBlob,
+ ICloudBlob sourceBlob)
+ {
+ return new BlobCopyJob()
+ {
+ SourceBlob = sourceBlob,
+ DestBlob = destBlob
+ };
+ }
+
+ ///
+ /// Creates a job to copy from a URI source.
+ ///
+ /// Destination blob to copy to.
+ /// User should call the method on this object.
+ /// Source to copy from.
+ /// Job object to do copying.
+ public static BlobCopyJob CreateCopyJob(
+ this ICloudBlob destBlob,
+ Uri sourceUri)
+ {
+ return new BlobCopyJob()
+ {
+ SourceUri = sourceUri,
+ DestBlob = destBlob
+ };
+ }
+
+ ///
+ /// Creates a job to download a blob.
+ ///
+ /// Source blob that to be downloaded.
+ /// Path of destination to download to.
+ /// Job instance to download blob.
+ public static BlobDownloadJob CreateDownloadJob(
+ this ICloudBlob sourceBlob,
+ string destPath)
+ {
+ return new BlobDownloadJob()
+ {
+ SourceBlob = sourceBlob,
+ DestPath = destPath
+ };
+ }
+
+ ///
+ /// Creates a job to download a blob.
+ ///
+ /// Source blob that to be downloaded.
+ /// Destination stream to download to.
+ /// Job instance to download blob.
+ public static BlobDownloadJob CreateDownloadJob(
+ this ICloudBlob sourceBlob,
+ Stream destStream)
+ {
+ return new BlobDownloadJob()
+ {
+ SourceBlob = sourceBlob,
+ DestStream = destStream
+ };
+ }
+
+ ///
+ /// Creates a job to upload a blob.
+ ///
+ /// Destination blob to upload to.
+ /// Path of source file to upload from.
+ /// Job instance to upload blob.
+ public static BlobUploadJob CreateUploadJob(
+ this ICloudBlob destBlob,
+ string sourcePath)
+ {
+ return new BlobUploadJob()
+ {
+ SourcePath = sourcePath,
+ DestBlob = destBlob
+ };
+ }
+
+ ///
+ /// Creates a job to upload a blob.
+ ///
+ /// Destination blob to upload to.
+ /// Path of source file to upload from.
+ /// Job instance to upload blob.
+ public static BlobUploadJob CreateUploadJob(
+ this ICloudBlob destBlob,
+ Stream sourceStream)
+ {
+ return new BlobUploadJob()
+ {
+ SourceStream = sourceStream,
+ DestBlob = destBlob
+ };
+ }
+ }
+}
diff --git a/lib/Extensions/CloudFileExtensions.cs b/lib/Extensions/CloudFileExtensions.cs
new file mode 100644
index 00000000..e4a701c5
--- /dev/null
+++ b/lib/Extensions/CloudFileExtensions.cs
@@ -0,0 +1,100 @@
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.IO;
+ using Microsoft.WindowsAzure.Storage.DataMovement.TransferJobs;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ ///
+ /// Defines extensions methods for CloudFile to create FileTransferJobs.
+ ///
+ public static class CloudFileExtensions
+ {
+ ///
+ /// Creates a job to download a cloud file.
+ ///
+ /// Source file that to be downloaded.
+ /// Path of destination to download to.
+ /// Job instance to download file.
+ public static FileDownloadJob CreateDownloadJob(
+ this CloudFile sourceFile,
+ string destPath)
+ {
+ return new FileDownloadJob()
+ {
+ SourceFile = sourceFile,
+ DestPath = destPath
+ };
+ }
+
+ ///
+ /// Creates a job to download a cloud file.
+ ///
+ /// Source file that to be downloaded.
+ /// Destination stream to download to.
+ /// Job instance to download file.
+ public static FileDownloadJob CreateDownloadJob(
+ this CloudFile sourceFile,
+ Stream destStream)
+ {
+ return new FileDownloadJob()
+ {
+ SourceFile = sourceFile,
+ DestStream = destStream
+ };
+ }
+
+ ///
+ /// Creates a job to upload a cloud file.
+ ///
+ /// Destination file to upload to.
+ /// Path of source file to upload from.
+ /// Job instance to upload file.
+ public static FileUploadJob CreateUploadJob(
+ this CloudFile destFile,
+ string sourcePath)
+ {
+ return new FileUploadJob()
+ {
+ DestFile = destFile,
+ SourcePath = sourcePath
+ };
+ }
+
+ ///
+ /// Creates a job to upload a cloud file.
+ ///
+ /// Destination file to upload to.
+ /// Path of source file to upload from.
+ /// Job instance to upload file.
+ public static FileUploadJob CreateUploadJob(
+ this CloudFile destFile,
+ Stream sourceStream)
+ {
+ return new FileUploadJob()
+ {
+ DestFile = destFile,
+ SourceStream = sourceStream
+ };
+ }
+
+ ///
+ /// Creates a job to delete a cloud file.
+ ///
+ /// File to delete.
+ /// Job instance to delete file.
+ public static FileDeleteJob CreateDeleteJob(
+ this CloudFile fileToDelete)
+ {
+ return new FileDeleteJob()
+ {
+ File = fileToDelete
+ };
+ }
+ }
+}
diff --git a/lib/Extensions/StorageExtensions.cs b/lib/Extensions/StorageExtensions.cs
new file mode 100644
index 00000000..d177cf3e
--- /dev/null
+++ b/lib/Extensions/StorageExtensions.cs
@@ -0,0 +1,189 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Globalization;
+ using Microsoft.WindowsAzure.Storage.Auth;
+ using Microsoft.WindowsAzure.Storage.Blob;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ ///
+ /// Extension methods for CloudBlobs for use with BlobTransfer.
+ ///
+ internal static class StorageExtensions
+ {
+ ///
+ /// Determines whether two blobs have the same Uri and SnapshotTime.
+ ///
+ /// Blob to compare.
+ /// Comparand object.
+ /// True if the two blobs have the same Uri and SnapshotTime; otherwise, false.
+ internal static bool Equals(
+ CloudBlob blob,
+ CloudBlob comparand)
+ {
+ if (blob == comparand)
+ {
+ return true;
+ }
+
+ if (null == blob || null == comparand)
+ {
+ return false;
+ }
+
+ return blob.Uri.Equals(comparand.Uri) &&
+ blob.SnapshotTime.Equals(comparand.SnapshotTime);
+ }
+
+ internal static CloudFile GenerateCopySourceFile(
+ this CloudFile file)
+ {
+ if (null == file)
+ {
+ throw new ArgumentNullException("file");
+ }
+
+ string sasToken = GetFileSASToken(file);
+
+ if (string.IsNullOrEmpty(sasToken))
+ {
+ return file;
+ }
+
+ return new CloudFile(file.Uri, new StorageCredentials(sasToken));
+ }
+
+ private static string GetFileSASToken(CloudFile file)
+ {
+ if (null == file.ServiceClient.Credentials
+ || file.ServiceClient.Credentials.IsAnonymous)
+ {
+ return string.Empty;
+ }
+ else if (file.ServiceClient.Credentials.IsSAS)
+ {
+ return file.ServiceClient.Credentials.SASToken;
+ }
+
+ // SAS life time is at least 10 minutes.
+ TimeSpan sasLifeTime = TimeSpan.FromMinutes(Constants.CopySASLifeTimeInMinutes);
+
+ SharedAccessFilePolicy policy = new SharedAccessFilePolicy()
+ {
+ SharedAccessExpiryTime = DateTime.Now.Add(sasLifeTime),
+ Permissions = SharedAccessFilePermissions.Read,
+ };
+
+ return file.GetSharedAccessSignature(policy);
+ }
+
+ ///
+ /// Append an auto generated SAS to a blob uri.
+ ///
+ /// Blob to append SAS.
+ /// Blob Uri with SAS appended.
+ internal static CloudBlob GenerateCopySourceBlob(
+ this CloudBlob blob)
+ {
+ if (null == blob)
+ {
+ throw new ArgumentNullException("blob");
+ }
+
+ string sasToken = GetBlobSasToken(blob);
+
+ if (string.IsNullOrEmpty(sasToken))
+ {
+ return blob;
+ }
+
+ Uri blobUri = null;
+
+ if (blob.IsSnapshot)
+ {
+ blobUri = blob.SnapshotQualifiedUri;
+ }
+ else
+ {
+ blobUri = blob.Uri;
+ }
+
+ return Utils.GetBlobReference(blobUri, new StorageCredentials(sasToken), blob.BlobType);
+ }
+
+ ///
+ /// Append an auto generated SAS to a blob uri.
+ ///
+ /// Blob to append SAS.
+ /// Blob Uri with SAS appended.
+ internal static Uri GenerateUriWithCredentials(
+ this CloudBlob blob)
+ {
+ if (null == blob)
+ {
+ throw new ArgumentNullException("blob");
+ }
+
+ string sasToken = GetBlobSasToken(blob);
+
+ if (string.IsNullOrEmpty(sasToken))
+ {
+ return blob.SnapshotQualifiedUri;
+ }
+
+ string uriStr = null;
+
+ if (blob.IsSnapshot)
+ {
+ uriStr = string.Format(CultureInfo.InvariantCulture, "{0}&{1}", blob.SnapshotQualifiedUri.AbsoluteUri, sasToken.Substring(1));
+ }
+ else
+ {
+ uriStr = string.Format(CultureInfo.InvariantCulture, "{0}{1}", blob.Uri.AbsoluteUri, sasToken);
+ }
+
+ return new Uri(uriStr);
+ }
+
+ private static string GetBlobSasToken(CloudBlob blob)
+ {
+ if (null == blob.ServiceClient.Credentials
+ || blob.ServiceClient.Credentials.IsAnonymous)
+ {
+ return string.Empty;
+ }
+ else if (blob.ServiceClient.Credentials.IsSAS)
+ {
+ return blob.ServiceClient.Credentials.SASToken;
+ }
+
+ // SAS life time is at least 10 minutes.
+ TimeSpan sasLifeTime = TimeSpan.FromMinutes(Constants.CopySASLifeTimeInMinutes);
+
+ SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy()
+ {
+ SharedAccessExpiryTime = DateTime.Now.Add(sasLifeTime),
+ Permissions = SharedAccessBlobPermissions.Read,
+ };
+
+ CloudBlob rootBlob = null;
+
+ if (!blob.IsSnapshot)
+ {
+ rootBlob = blob;
+ }
+ else
+ {
+ rootBlob = Utils.GetBlobReference(blob.Uri, blob.ServiceClient.Credentials, blob.BlobType);
+ }
+
+ return rootBlob.GetSharedAccessSignature(policy);
+ }
+ }
+}
diff --git a/lib/GlobalMemoryStatusNativeMethods.cs b/lib/GlobalMemoryStatusNativeMethods.cs
new file mode 100644
index 00000000..50d35491
--- /dev/null
+++ b/lib/GlobalMemoryStatusNativeMethods.cs
@@ -0,0 +1,52 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System.Runtime.InteropServices;
+
+ internal class GlobalMemoryStatusNativeMethods
+ {
+ private MEMORYSTATUSEX memStatus;
+
+ public GlobalMemoryStatusNativeMethods()
+ {
+ this.memStatus = new MEMORYSTATUSEX();
+ if (GlobalMemoryStatusEx(this.memStatus))
+ {
+ this.AvailablePhysicalMemory = this.memStatus.ullAvailPhys;
+ }
+ }
+
+ public ulong AvailablePhysicalMemory
+ {
+ get;
+ private set;
+ }
+
+ [return: MarshalAs(UnmanagedType.Bool)]
+ [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)]
+ private static extern bool GlobalMemoryStatusEx([In, Out] MEMORYSTATUSEX lpBuffer);
+
+ [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
+ private class MEMORYSTATUSEX
+ {
+ public uint dwLength;
+ public uint dwMemoryLoad;
+ public ulong ullTotalPhys;
+ public ulong ullAvailPhys;
+ public ulong ullTotalPageFile;
+ public ulong ullAvailPageFile;
+ public ulong ullTotalVirtual;
+ public ulong ullAvailVirtual;
+ public ulong ullAvailExtendedVirtual;
+
+ public MEMORYSTATUSEX()
+ {
+ this.dwLength = (uint)Marshal.SizeOf(typeof(MEMORYSTATUSEX));
+ }
+ }
+ }
+}
diff --git a/lib/MD5HashStream.cs b/lib/MD5HashStream.cs
new file mode 100644
index 00000000..c0784b40
--- /dev/null
+++ b/lib/MD5HashStream.cs
@@ -0,0 +1,462 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Diagnostics;
+ using System.Diagnostics.CodeAnalysis;
+ using System.Globalization;
+ using System.IO;
+ using System.Security.Cryptography;
+ using System.Threading;
+ using System.Threading.Tasks;
+
+ ///
+ /// Class to make thread safe stream access and calculate MD5 hash.
+ ///
+ internal class MD5HashStream : IDisposable
+ {
+ ///
+ /// Stream object.
+ ///
+ private Stream stream;
+
+ ///
+ /// Semaphore object. In our case, we can only have one operation at the same time.
+ ///
+ private SemaphoreSlim semaphore;
+
+ ///
+ /// In restart mode, we start a separate thread to calculate MD5hash of transferred part.
+ /// This variable indicates whether finished to calculate this part of MD5hash.
+ ///
+ private volatile bool finishedSeparateMd5Calculator = false;
+
+ ///
+ /// Indicates whether succeeded in calculating MD5hash of the transferred bytes.
+ ///
+ private bool succeededSeparateMd5Calculator = false;
+
+ ///
+ /// Running md5 hash of the blob being downloaded.
+ ///
+ private MD5CryptoServiceProvider md5hash;
+
+ ///
+ /// Offset of the transferred bytes. We should calculate MD5hash on all bytes before this offset.
+ ///
+ private long md5hashOffset;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Stream object.
+ /// Offset of the transferred bytes.
+ /// Whether need to calculate MD5Hash.
+ public MD5HashStream(
+ Stream stream,
+ long lastTransferOffset,
+ bool md5hashCheck)
+ {
+ this.stream = stream;
+ this.md5hashOffset = lastTransferOffset;
+
+ if ((0 == this.md5hashOffset)
+ || (!md5hashCheck))
+ {
+ this.finishedSeparateMd5Calculator = true;
+ this.succeededSeparateMd5Calculator = true;
+ }
+ else
+ {
+ this.semaphore = new SemaphoreSlim(1, 1);
+ }
+
+ if (md5hashCheck)
+ {
+ this.md5hash = new MD5CryptoServiceProvider();
+ }
+
+ if ((!this.finishedSeparateMd5Calculator)
+ && (!this.stream.CanRead))
+ {
+ throw new NotSupportedException(string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.StreamMustSupportReadException,
+ "Stream"));
+ }
+
+ if (!this.stream.CanSeek)
+ {
+ throw new NotSupportedException(string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.StreamMustSupportSeekException,
+ "Stream"));
+ }
+ }
+
+ ///
+ /// Gets a value indicating whether need to calculate MD5 hash.
+ ///
+ public bool CheckMd5Hash
+ {
+ get
+ {
+ return null != this.md5hash;
+ }
+ }
+
+ ///
+ /// Gets MD5 hash bytes.
+ ///
+ public byte[] Hash
+ {
+ get
+ {
+ return null == this.md5hash ? null : this.md5hash.Hash;
+ }
+ }
+
+ ///
+ /// Gets a value indicating whether already finished to calculate MD5 hash of transferred bytes.
+ ///
+ public bool FinishedSeparateMd5Calculator
+ {
+ get
+ {
+ return this.finishedSeparateMd5Calculator;
+ }
+ }
+
+ ///
+ /// Gets a value indicating whether already succeeded in calculating MD5 hash of transferred bytes.
+ ///
+ public bool SucceededSeparateMd5Calculator
+ {
+ get
+ {
+ this.WaitMD5CalculationToFinish();
+ return this.succeededSeparateMd5Calculator;
+ }
+ }
+
+ ///
+ /// Calculate MD5 hash of transferred bytes.
+ ///
+ /// Reference to MemoryManager object to require buffer from.
+ /// Action to check whether to cancel this calculation.
+ public void CalculateMd5(MemoryManager memoryManager, Action checkCancellation)
+ {
+ if (null == this.md5hash)
+ {
+ return;
+ }
+
+ byte[] buffer = null;
+
+ try
+ {
+ buffer = Utils.RequireBuffer(memoryManager, checkCancellation);
+ }
+ catch (Exception)
+ {
+ lock (this.md5hash)
+ {
+ this.finishedSeparateMd5Calculator = true;
+ }
+
+ throw;
+ }
+
+ long offset = 0;
+ int readLength = 0;
+
+ while (true)
+ {
+ lock (this.md5hash)
+ {
+ if (offset >= this.md5hashOffset)
+ {
+ Debug.Assert(
+ offset == this.md5hashOffset,
+ "We should stop the separate calculator thread just at the transferred offset");
+
+ this.succeededSeparateMd5Calculator = true;
+ this.finishedSeparateMd5Calculator = true;
+ break;
+ }
+
+ readLength = (int)Math.Min(this.md5hashOffset - offset, buffer.Length);
+ }
+
+ try
+ {
+ checkCancellation();
+ readLength = this.Read(offset, buffer, 0, readLength);
+
+ lock (this.md5hash)
+ {
+ this.md5hash.TransformBlock(buffer, 0, readLength, null, 0);
+ }
+ }
+ catch (Exception)
+ {
+ lock (this.md5hash)
+ {
+ this.finishedSeparateMd5Calculator = true;
+ }
+
+ memoryManager.ReleaseBuffer(buffer);
+
+ throw;
+ }
+
+ offset += readLength;
+ }
+
+ memoryManager.ReleaseBuffer(buffer);
+ }
+
+ ///
+ /// Begin async read from stream.
+ ///
+ /// Offset in stream to read from.
+ /// The buffer to read the data into.
+ /// The byte offset in buffer at which to begin writing data read from the stream.
+ /// The maximum number of bytes to read.
+ /// Token used to cancel the asynchronous reading.
+ /// A task that represents the asynchronous read operation. The value of the
+ /// TResult parameter contains the total number of bytes read into the buffer.
+ public async Task ReadAsync(long readOffset, byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ await this.WaitOnSemaphoreAsync(cancellationToken);
+
+ try
+ {
+ this.stream.Position = readOffset;
+
+ return await this.stream.ReadAsync(
+ buffer,
+ offset,
+ count,
+ cancellationToken);
+ }
+ finally
+ {
+ this.ReleaseSemaphore();
+ }
+ }
+
+ ///
+ /// Begin async write to stream.
+ ///
+ /// Offset in stream to write to.
+ /// The buffer to write the data from.
+ /// The byte offset in buffer from which to begin writing.
+ /// The maximum number of bytes to write.
+ /// Token used to cancel the asynchronous writing.
+ /// A task that represents the asynchronous write operation.
+ public async Task WriteAsync(long writeOffset, byte[] buffer, int offset, int count, CancellationToken cancellationToken)
+ {
+ await this.WaitOnSemaphoreAsync(cancellationToken);
+
+ try
+ {
+ this.stream.Position = writeOffset;
+ await this.stream.WriteAsync(
+ buffer,
+ offset,
+ count,
+ cancellationToken);
+ }
+ finally
+ {
+ this.ReleaseSemaphore();
+ }
+ }
+
+ ///
+ /// Computes the hash value for the specified region of the input byte array
+ /// and copies the specified region of the input byte array to the specified
+ /// region of the output byte array.
+ ///
+ /// Offset in stream of the block on which to calculate MD5 hash.
+ /// The input to compute the hash code for.
+ /// The offset into the input byte array from which to begin using data.
+ /// The number of bytes in the input byte array to use as data.
+ /// A copy of the part of the input array used to compute the hash code.
+ /// The offset into the output byte array from which to begin writing data.
+ /// Whether succeeded in calculating MD5 hash
+ /// or not finished the separate thread to calculate MD5 hash at the time.
+ public bool MD5HashTransformBlock(long streamOffset, byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset)
+ {
+ if (null == this.md5hash)
+ {
+ return true;
+ }
+
+ if (!this.finishedSeparateMd5Calculator)
+ {
+ lock (this.md5hash)
+ {
+ if (!this.finishedSeparateMd5Calculator)
+ {
+ if (streamOffset == this.md5hashOffset)
+ {
+ this.md5hashOffset += inputCount;
+ }
+
+ return true;
+ }
+ else
+ {
+ if (!this.succeededSeparateMd5Calculator)
+ {
+ return false;
+ }
+ }
+ }
+ }
+
+ if (streamOffset >= this.md5hashOffset)
+ {
+ Debug.Assert(
+ this.finishedSeparateMd5Calculator,
+ "The separate thread to calculate MD5 hash should have finished or md5hashOffset should get updated.");
+
+ this.md5hash.TransformBlock(inputBuffer, inputOffset, inputCount, outputBuffer, outputOffset);
+ }
+
+ return true;
+ }
+
+ ///
+ /// Computes the hash value for the specified region of the specified byte array.
+ ///
+ /// The input to compute the hash code for.
+ /// The offset into the byte array from which to begin using data.
+ /// The number of bytes in the byte array to use as data.
+ /// An array that is a copy of the part of the input that is hashed.
+ public byte[] MD5HashTransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount)
+ {
+ this.WaitMD5CalculationToFinish();
+
+ if (!this.succeededSeparateMd5Calculator)
+ {
+ return null;
+ }
+
+ return null == this.md5hash ? null : this.md5hash.TransformFinalBlock(inputBuffer, inputOffset, inputCount);
+ }
+
+ ///
+ /// Releases or resets unmanaged resources.
+ ///
+ public virtual void Dispose()
+ {
+ this.Dispose(true);
+ }
+
+ ///
+ /// Private dispose method to release managed/unmanaged objects.
+ /// If disposing = true clean up managed resources as well as unmanaged resources.
+ /// If disposing = false only clean up unmanaged resources.
+ ///
+ /// Indicates whether or not to dispose managed resources.
+ protected virtual void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ if (null != this.md5hash)
+ {
+ this.md5hash.Clear();
+ this.md5hash = null;
+ }
+
+ if (null != this.semaphore)
+ {
+ this.semaphore.Dispose();
+ this.semaphore = null;
+ }
+ }
+ }
+
+ ///
+ /// Read from stream.
+ ///
+ /// Offset in stream to read from.
+ /// An array of bytes. When this method returns, the buffer contains the specified
+ /// byte array with the values between offset and (offset + count - 1) replaced
+ /// by the bytes read from the current source.
+ /// The zero-based byte offset in buffer at which to begin storing the data read from the current stream.
+ /// The maximum number of bytes to be read from the current stream.
+ /// The total number of bytes read into the buffer.
+ private int Read(long readOffset, byte[] buffer, int offset, int count)
+ {
+ if (!this.finishedSeparateMd5Calculator)
+ {
+ this.semaphore.Wait();
+ }
+
+ try
+ {
+ this.stream.Position = readOffset;
+ int readBytes = this.stream.Read(buffer, offset, count);
+
+ return readBytes;
+ }
+ finally
+ {
+ this.ReleaseSemaphore();
+ }
+ }
+
+ ///
+ /// Wait for one semaphore.
+ ///
+ /// Token used to cancel waiting on the semaphore.
+ private async Task WaitOnSemaphoreAsync(CancellationToken cancellationToken)
+ {
+ if (!this.finishedSeparateMd5Calculator)
+ {
+ await this.semaphore.WaitAsync(cancellationToken);
+ }
+ }
+
+ ///
+ /// Release semaphore.
+ ///
+ private void ReleaseSemaphore()
+ {
+ if (!this.finishedSeparateMd5Calculator)
+ {
+ this.semaphore.Release();
+ }
+ }
+
+ ///
+ /// Wait for MD5 calculation to be finished.
+ /// In our test, MD5 calculation is really fast,
+ /// and SpinOnce has sleep mechanism, so use Spin instead of sleep here.
+ ///
+ private void WaitMD5CalculationToFinish()
+ {
+ if (this.finishedSeparateMd5Calculator)
+ {
+ return;
+ }
+
+ SpinWait sw = new SpinWait();
+
+ while (!this.finishedSeparateMd5Calculator)
+ {
+ sw.SpinOnce();
+ }
+
+ sw.Reset();
+ }
+ }
+}
diff --git a/lib/MemoryManager.cs b/lib/MemoryManager.cs
new file mode 100644
index 00000000..04ec40f3
--- /dev/null
+++ b/lib/MemoryManager.cs
@@ -0,0 +1,139 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Collections.Concurrent;
+
+ ///
+ /// Class for maintaining a pool of memory buffer objects.
+ ///
+ internal class MemoryManager
+ {
+ private MemoryPool memoryPool;
+
+ public MemoryManager(
+ long capacity, int bufferSize)
+ {
+ long availableCells = capacity / bufferSize;
+
+ int cellNumber = (int)Math.Min((long)Constants.MemoryManagerCellsMaximum, availableCells);
+
+ this.memoryPool = new MemoryPool(cellNumber, bufferSize);
+ }
+
+ public byte[] RequireBuffer()
+ {
+ return this.memoryPool.GetBuffer();
+ }
+
+ public void ReleaseBuffer(byte[] buffer)
+ {
+ this.memoryPool.AddBuffer(buffer);
+ }
+
+ private class MemoryPool
+ {
+ public readonly int BufferSize;
+
+ private int availableCells;
+ private int allocatedCells;
+ private object cellsListLock;
+ private MemoryCell cellsListHeadCell;
+ private ConcurrentDictionary cellsInUse;
+
+ public MemoryPool(int cellsCount, int bufferSize)
+ {
+ this.BufferSize = bufferSize;
+
+ this.availableCells = cellsCount;
+ this.allocatedCells = 0;
+ this.cellsListLock = new object();
+ this.cellsListHeadCell = null;
+ this.cellsInUse = new ConcurrentDictionary();
+ }
+
+ public byte[] GetBuffer()
+ {
+ if (this.availableCells > 0)
+ {
+ MemoryCell retCell = null;
+
+ lock (this.cellsListLock)
+ {
+ if (this.availableCells > 0)
+ {
+ if (null != this.cellsListHeadCell)
+ {
+ retCell = this.cellsListHeadCell;
+ this.cellsListHeadCell = retCell.NextCell;
+ retCell.NextCell = null;
+ }
+ else
+ {
+ retCell = new MemoryCell(this.BufferSize);
+ ++this.allocatedCells;
+ }
+
+ --this.availableCells;
+ }
+ }
+
+ if (null != retCell)
+ {
+ this.cellsInUse.TryAdd(retCell.Buffer, retCell);
+ return retCell.Buffer;
+ }
+ }
+
+ return null;
+ }
+
+ public void AddBuffer(byte[] buffer)
+ {
+ if (null == buffer)
+ {
+ throw new ArgumentNullException("buffer");
+ }
+
+ MemoryCell cell;
+ if (this.cellsInUse.TryRemove(buffer, out cell))
+ {
+ lock (this.cellsListLock)
+ {
+ cell.NextCell = this.cellsListHeadCell;
+ this.cellsListHeadCell = cell;
+ ++this.availableCells;
+ }
+ }
+ }
+ }
+
+ private class MemoryCell
+ {
+ private byte[] buffer;
+
+ public MemoryCell(int size)
+ {
+ this.buffer = new byte[size];
+ }
+
+ public MemoryCell NextCell
+ {
+ get;
+ set;
+ }
+
+ public byte[] Buffer
+ {
+ get
+ {
+ return this.buffer;
+ }
+ }
+ }
+ }
+}
diff --git a/lib/OverwriteCallback.cs b/lib/OverwriteCallback.cs
new file mode 100644
index 00000000..a3f70e65
--- /dev/null
+++ b/lib/OverwriteCallback.cs
@@ -0,0 +1,17 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ ///
+ /// Callback invoked to tell whether to overwrite an existing destination
+ ///
+ /// Path of the source file used to overwrite the destination.
+ /// Path of the file to be overwritten.
+ /// True if the file should be overwritten; otherwise false.
+ public delegate bool OverwriteCallback(
+ string sourcePath,
+ string destinationPath);
+}
diff --git a/lib/Resources.Designer.cs b/lib/Resources.Designer.cs
new file mode 100644
index 00000000..e7e704fa
--- /dev/null
+++ b/lib/Resources.Designer.cs
@@ -0,0 +1,569 @@
+//------------------------------------------------------------------------------
+//
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement {
+ using System;
+
+
+ ///
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ ///
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ ///
+ /// Returns the cached ResourceManager instance used by this class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Microsoft.WindowsAzure.Storage.DataMovement.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ ///
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ ///
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to AppendBlob.
+ ///
+ internal static string AppendBlob {
+ get {
+ return ResourceManager.GetString("AppendBlob", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Copying from File Storage to append Blob Storage asynchronously is not supported..
+ ///
+ internal static string AsyncCopyFromFileToAppendBlobNotSupportException {
+ get {
+ return ResourceManager.GetString("AsyncCopyFromFileToAppendBlobNotSupportException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Copying from File Storage to page Blob Storage asynchronously is not supported..
+ ///
+ internal static string AsyncCopyFromFileToPageBlobNotSupportException {
+ get {
+ return ResourceManager.GetString("AsyncCopyFromFileToPageBlobNotSupportException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to File size {0} is invalid for {1}, must be a multiple of {2}..
+ ///
+ internal static string BlobFileSizeInvalidException {
+ get {
+ return ResourceManager.GetString("BlobFileSizeInvalidException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to File size {0} is larger than {1} maximum size {2}..
+ ///
+ internal static string BlobFileSizeTooLargeException {
+ get {
+ return ResourceManager.GetString("BlobFileSizeTooLargeException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The blob transfer has been cancelled..
+ ///
+ internal static string BlobTransferCancelledException {
+ get {
+ return ResourceManager.GetString("BlobTransferCancelledException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to BlockBlob.
+ ///
+ internal static string BlockBlob {
+ get {
+ return ResourceManager.GetString("BlockBlob", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to BlockSize must be between {0} and {1}..
+ ///
+ internal static string BlockSizeOutOfRangeException {
+ get {
+ return ResourceManager.GetString("BlockSizeOutOfRangeException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Cannot deserialize to TransferLocation when its TransferLocationType is {0}..
+ ///
+ internal static string CannotDeserializeLocationType {
+ get {
+ return ResourceManager.GetString("CannotDeserializeLocationType", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The TransferLocation cannot be serialized when it represents a stream location..
+ ///
+ internal static string CannotSerializeStreamLocation {
+ get {
+ return ResourceManager.GetString("CannotSerializeStreamLocation", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Destination of asynchronous copying must be File Storage or Blob Storage..
+ ///
+ internal static string CanOnlyCopyToFileOrBlobException {
+ get {
+ return ResourceManager.GetString("CanOnlyCopyToFileOrBlobException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to File size {0} is larger than cloud file maximum size {1} bytes..
+ ///
+ internal static string CloudFileSizeTooLargeException {
+ get {
+ return ResourceManager.GetString("CloudFileSizeTooLargeException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0} Deserialization failed: Version number doesn't match. Version number:{1}, expect:{2}..
+ ///
+ internal static string DeserializationVersionNotMatchException {
+ get {
+ return ResourceManager.GetString("DeserializationVersionNotMatchException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to User specified blob type does not match the blob type of the existing destination blob..
+ ///
+ internal static string DestinationBlobTypeNotMatch {
+ get {
+ return ResourceManager.GetString("DestinationBlobTypeNotMatch", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Destination might be changed by other process or application..
+ ///
+ internal static string DestinationChangedException {
+ get {
+ return ResourceManager.GetString("DestinationChangedException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Destination must be a base blob..
+ ///
+ internal static string DestinationMustBeBaseBlob {
+ get {
+ return ResourceManager.GetString("DestinationMustBeBaseBlob", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The MD5 hash calculated from the downloaded data does not match the MD5 hash stored in the property of source: {0}. Please refer to help or documentation for detail.
+ ///MD5 calculated: {1}
+ ///MD5 in property: {2}.
+ ///
+ internal static string DownloadedMd5MismatchException {
+ get {
+ return ResourceManager.GetString("DownloadedMd5MismatchException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Failed to allocate required memory..
+ ///
+ internal static string FailedToAllocateMemoryException {
+ get {
+ return ResourceManager.GetString("FailedToAllocateMemoryException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Failed to copy from "{0}" to "{1}". Copy status: {2}; Description: {3}..
+ ///
+ internal static string FailedToAsyncCopyObjectException {
+ get {
+ return ResourceManager.GetString("FailedToAsyncCopyObjectException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Failed to retrieve the original BlobType..
+ ///
+ internal static string FailedToGetBlobTypeException {
+ get {
+ return ResourceManager.GetString("FailedToGetBlobTypeException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Failed to open file {0}: {1}..
+ ///
+ internal static string FailedToOpenFileException {
+ get {
+ return ResourceManager.GetString("FailedToOpenFileException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Failed to retrieve CopyState for object "{0}"..
+ ///
+ internal static string FailedToRetrieveCopyStateForObjectException {
+ get {
+ return ResourceManager.GetString("FailedToRetrieveCopyStateForObjectException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The initial entry status {0} is invalid for {1}..
+ ///
+ internal static string InvalidInitialEntryStatusForControllerException {
+ get {
+ return ResourceManager.GetString("InvalidInitialEntryStatusForControllerException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Both Source and Destination are locally accessible locations. At least one of source and destination should be an Azure Storage location..
+ ///
+ internal static string LocalToLocalTransferUnsupportedException {
+ get {
+ return ResourceManager.GetString("LocalToLocalTransferUnsupportedException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The local copy id is different from the one returned from the server..
+ ///
+ internal static string MismatchFoundBetweenLocalAndServerCopyIdsException {
+ get {
+ return ResourceManager.GetString("MismatchFoundBetweenLocalAndServerCopyIdsException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Blob type '{0}' is not supported..
+ ///
+ internal static string NotSupportedBlobType {
+ get {
+ return ResourceManager.GetString("NotSupportedBlobType", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Skiped file "{0}" because target "{1}" already exists..
+ ///
+ internal static string OverwriteCallbackCancelTransferException {
+ get {
+ return ResourceManager.GetString("OverwriteCallbackCancelTransferException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to PageBlob.
+ ///
+ internal static string PageBlob {
+ get {
+ return ResourceManager.GetString("PageBlob", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Parallel operations count must be positive..
+ ///
+ internal static string ParallelCountNotPositiveException {
+ get {
+ return ResourceManager.GetString("ParallelCountNotPositiveException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0} cannot be null..
+ ///
+ internal static string ParameterCannotBeNullException {
+ get {
+ return ResourceManager.GetString("ParameterCannotBeNullException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Exactly one of these parameters must be provided: {0}, {1}, {2}..
+ ///
+ internal static string ProvideExactlyOneOfThreeParameters {
+ get {
+ return ResourceManager.GetString("ProvideExactlyOneOfThreeParameters", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##} bytes.
+ ///
+ internal static string ReadableSizeFormatBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##}EB.
+ ///
+ internal static string ReadableSizeFormatExaBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatExaBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##}GB.
+ ///
+ internal static string ReadableSizeFormatGigaBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatGigaBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##}KB.
+ ///
+ internal static string ReadableSizeFormatKiloBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatKiloBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##}MB.
+ ///
+ internal static string ReadableSizeFormatMegaBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatMegaBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##}PB.
+ ///
+ internal static string ReadableSizeFormatPetaBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatPetaBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0:0.##}TB.
+ ///
+ internal static string ReadableSizeFormatTeraBytes {
+ get {
+ return ResourceManager.GetString("ReadableSizeFormatTeraBytes", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Failed to read restartable info from file..
+ ///
+ internal static string RestartableInfoCorruptedException {
+ get {
+ return ResourceManager.GetString("RestartableInfoCorruptedException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to MaximumCacheSize cannot be less than {0}..
+ ///
+ internal static string SmallMemoryCacheSizeLimitationException {
+ get {
+ return ResourceManager.GetString("SmallMemoryCacheSizeLimitationException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Blob type of source and destination must be the same..
+ ///
+ internal static string SourceAndDestinationBlobTypeDifferent {
+ get {
+ return ResourceManager.GetString("SourceAndDestinationBlobTypeDifferent", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Source and destination cannot be the same..
+ ///
+ internal static string SourceAndDestinationLocationCannotBeEqualException {
+ get {
+ return ResourceManager.GetString("SourceAndDestinationLocationCannotBeEqualException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Source blob does not exist..
+ ///
+ internal static string SourceBlobDoesNotExistException {
+ get {
+ return ResourceManager.GetString("SourceBlobDoesNotExistException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to User specified blob type does not match the blob type of the existing source blob..
+ ///
+ internal static string SourceBlobTypeNotMatch {
+ get {
+ return ResourceManager.GetString("SourceBlobTypeNotMatch", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Source does not exist..
+ ///
+ internal static string SourceDoesNotExistException {
+ get {
+ return ResourceManager.GetString("SourceDoesNotExistException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0} must support Read..
+ ///
+ internal static string StreamMustSupportReadException {
+ get {
+ return ResourceManager.GetString("StreamMustSupportReadException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0} must support Seek..
+ ///
+ internal static string StreamMustSupportSeekException {
+ get {
+ return ResourceManager.GetString("StreamMustSupportSeekException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to {0} must support Write..
+ ///
+ internal static string StreamMustSupportWriteException {
+ get {
+ return ResourceManager.GetString("StreamMustSupportWriteException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The stream is not expandable..
+ ///
+ internal static string StreamNotExpandable {
+ get {
+ return ResourceManager.GetString("StreamNotExpandable", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Copying from uri to Azure Blob Storage synchronously is not supported..
+ ///
+ internal static string SyncCopyFromUriToAzureBlobNotSupportedException {
+ get {
+ return ResourceManager.GetString("SyncCopyFromUriToAzureBlobNotSupportedException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to Copying from uri to Azure File Storage synchronously is not supported..
+ ///
+ internal static string SyncCopyFromUriToAzureFileNotSupportedException {
+ get {
+ return ResourceManager.GetString("SyncCopyFromUriToAzureFileNotSupportedException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to A transfer operation with the same source and destination already exists..
+ ///
+ internal static string TransferAlreadyExists {
+ get {
+ return ResourceManager.GetString("TransferAlreadyExists", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to TransferEntry.CopyId cannot be null or empty because we need it to verify we are monitoring the right blob copying process..
+ ///
+ internal static string TransferEntryCopyIdCannotBeNullOrEmptyException {
+ get {
+ return ResourceManager.GetString("TransferEntryCopyIdCannotBeNullOrEmptyException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The transfer failed..
+ ///
+ internal static string UncategorizedException {
+ get {
+ return ResourceManager.GetString("UncategorizedException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The given blob type {0} is not supported..
+ ///
+ internal static string UnsupportedBlobTypeException {
+ get {
+ return ResourceManager.GetString("UnsupportedBlobTypeException", resourceCulture);
+ }
+ }
+
+ ///
+ /// Looks up a localized string similar to The given transfer location type {0} is not supported..
+ ///
+ internal static string UnsupportedTransferLocationException {
+ get {
+ return ResourceManager.GetString("UnsupportedTransferLocationException", resourceCulture);
+ }
+ }
+ }
+}
diff --git a/lib/Resources.resx b/lib/Resources.resx
new file mode 100644
index 00000000..9e7ab94d
--- /dev/null
+++ b/lib/Resources.resx
@@ -0,0 +1,322 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ text/microsoft-resx
+
+
+ 2.0
+
+
+ System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
+
+
+ File size {0} is invalid for {1}, must be a multiple of {2}.
+ {0} is file size. {1} is the destination blob type. {2} should be 512 bytes for pageblob.
+
+
+ File size {0} is larger than {1} maximum size {2}.
+ {0} is file size. {1} is the destination blob type. {2} is the size limit of the destination.
+
+
+ The blob transfer has been cancelled.
+
+
+ BlockSize must be between {0} and {1}.
+
+
+ Cannot deserialize to TransferLocation when its TransferLocationType is {0}.
+
+
+ The TransferLocation cannot be serialized when it represents a stream location.
+
+
+ File size {0} is larger than cloud file maximum size {1} bytes.
+ {0} is file size. {1} is the size limit of the destination.
+
+
+ Destination of asynchronous copying must be File Storage or Blob Storage.
+
+
+ Copying from File Storage to page Blob Storage asynchronously is not supported.
+
+
+ {0} Deserialization failed: Version number doesn't match. Version number:{1}, expect:{2}.
+ {0} is the class name.
+ {1} is the version number in serialization binary.
+ {2} is the expect version number.
+
+
+ Destination might be changed by other process or application.
+
+
+ The MD5 hash calculated from the downloaded data does not match the MD5 hash stored in the property of source: {0}. Please refer to help or documentation for detail.
+MD5 calculated: {1}
+MD5 in property: {2}
+ {0} is the uri of source, {1} is the calculated MD5, {2} is the MD5 stored in the source property
+
+
+ User specified blob type does not match the blob type of the existing destination blob.
+
+
+ Destination must be a base blob.
+
+
+ Failed to allocate required memory.
+
+
+ Failed to copy from "{0}" to "{1}". Copy status: {2}; Description: {3}.
+ {0} is uri of source, {1} is uri of destination. {2} is the copy status, {3} is the copy status description.
+
+
+ Failed to retrieve CopyState for object "{0}".
+ {0} is uri of target object.
+
+
+ Failed to retrieve the original BlobType.
+
+
+ Failed to open file {0}: {1}.
+ {0} is file name, {1} is detailed error message.
+
+
+ The initial entry status {0} is invalid for {1}.
+ {0} is the initial entry status, {1} is the controller.
+
+
+ Both Source and Destination are locally accessible locations. At least one of source and destination should be an Azure Storage location.
+
+
+ The local copy id is different from the one returned from the server.
+
+
+ Blob type '{0}' is not supported.
+ {0} is the blob type name.
+
+
+ Skiped file "{0}" because target "{1}" already exists.
+ {0} is source file name, {1} is destination file name.
+
+
+ Parallel operations count must be positive.
+
+
+ {0} cannot be null.
+ {0} is the property or parameter name.
+
+
+ Exactly one of these parameters must be provided: {0}, {1}, {2}.
+ {0} is the first parameter, {1} is the second one, {2} is the third one.
+
+
+ {0:0.##} bytes
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ {0:0.##}EB
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ {0:0.##}GB
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ {0:0.##}KB
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ {0:0.##}MB
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ {0:0.##}PB
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ {0:0.##}TB
+ {0: -> take value from the first parameter.
+0.##} ->Display at least 1 digit before the decimal point and up to 2 digits after the decimal point.
+
+
+ Failed to read restartable info from file.
+
+
+ MaximumCacheSize cannot be less than {0}.
+ {0} is minimum memory cache size limitation
+
+
+ Blob type of source and destination must be the same.
+
+
+ Source and destination cannot be the same.
+
+
+ Source does not exist.
+
+
+ Source blob does not exist.
+
+
+ User specified blob type does not match the blob type of the existing source blob.
+
+
+ {0} must support Read.
+
+
+ {0} must support Seek.
+
+
+ {0} must support Write.
+
+
+ The stream is not expandable.
+
+
+ TransferEntry.CopyId cannot be null or empty because we need it to verify we are monitoring the right blob copying process.
+
+
+ The given blob type {0} is not supported.
+ {0} is the given blob type.
+
+
+ The given transfer location type {0} is not supported.
+ {0} is the given transfer location type.
+
+
+ Copying from File Storage to append Blob Storage asynchronously is not supported.
+
+
+ AppendBlob
+
+
+ BlockBlob
+
+
+ PageBlob
+
+
+ Copying from uri to Azure Blob Storage synchronously is not supported.
+
+
+ Copying from uri to Azure File Storage synchronously is not supported.
+
+
+ A transfer operation with the same source and destination already exists.
+
+
+ The transfer failed.
+
+
\ No newline at end of file
diff --git a/lib/SerializationHelper/SerializableAccessCondition.cs b/lib/SerializationHelper/SerializableAccessCondition.cs
new file mode 100644
index 00000000..88650747
--- /dev/null
+++ b/lib/SerializationHelper/SerializableAccessCondition.cs
@@ -0,0 +1,161 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper
+{
+ using System;
+ using System.Runtime.Serialization;
+
+ [Serializable]
+ internal sealed class SerializableAccessCondition : ISerializable
+ {
+ private const string IfMatchETagName = "IfMatchETag";
+ private const string IfModifiedSinceTimeName = "IfModifiedSinceTime";
+ private const string IfNoneMatchETagName = "IfNoneMatchETag";
+ private const string IfNotModifiedSinceTimeName = "IfNotModifiedSinceTime";
+ private const string IfSequenceNumberEqualName = "IfSequenceNumberEqual";
+ private const string IfSequenceNumberLessThanName = "IfSequenceNumberLessThan";
+ private const string IfSequenceNumberLessThanOrEqualName = "IfSequenceNumberLessThanOrEqual";
+ private const string LeaseIdName = "LeaseId";
+
+ private AccessCondition accessCondition;
+
+ public SerializableAccessCondition()
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Serialization information.
+ /// Streaming context.
+ private SerializableAccessCondition(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ string ifMatchETag = info.GetString(IfMatchETagName);
+ DateTimeOffset? ifModifiedSinceTime = (DateTimeOffset?)info.GetValue(IfModifiedSinceTimeName, typeof(DateTimeOffset?));
+ string ifNoneMatchETag = info.GetString(IfNoneMatchETagName);
+ DateTimeOffset? ifNotModifiedSinceTime = (DateTimeOffset?)info.GetValue(IfNotModifiedSinceTimeName, typeof(DateTimeOffset?));
+ long? ifSequenceNumberEqual = (long?)info.GetValue(IfSequenceNumberEqualName, typeof(long?));
+ long? ifSequenceNumberLessThan = (long?)info.GetValue(IfSequenceNumberLessThanName, typeof(long?));
+ long? ifSequenceNumberLessThanOrEqual = (long?)info.GetValue(IfSequenceNumberLessThanOrEqualName, typeof(long?));
+ string leaseId = info.GetString(LeaseIdName);
+
+ if (!string.IsNullOrEmpty(ifMatchETag)
+ || null != ifModifiedSinceTime
+ || !string.IsNullOrEmpty(ifNoneMatchETag)
+ || null != ifNotModifiedSinceTime
+ || null != ifSequenceNumberEqual
+ || null != ifSequenceNumberLessThan
+ || null != ifSequenceNumberLessThanOrEqual
+ || !string.IsNullOrEmpty(leaseId))
+ {
+ this.accessCondition = new AccessCondition()
+ {
+ IfMatchETag = ifMatchETag,
+ IfModifiedSinceTime = ifModifiedSinceTime,
+ IfNoneMatchETag = ifNoneMatchETag,
+ IfNotModifiedSinceTime = ifNotModifiedSinceTime,
+ IfSequenceNumberEqual = ifSequenceNumberEqual,
+ IfSequenceNumberLessThan = ifSequenceNumberLessThan,
+ IfSequenceNumberLessThanOrEqual = ifSequenceNumberLessThanOrEqual,
+ LeaseId = leaseId
+ };
+ }
+ else
+ {
+ this.accessCondition = null;
+ }
+ }
+
+ internal AccessCondition AccessCondition
+ {
+ get
+ {
+ return this.accessCondition;
+ }
+
+ set
+ {
+ this.accessCondition = value;
+ }
+ }
+
+ ///
+ /// Serializes the object.
+ ///
+ /// Serialization info object.
+ /// Streaming context.
+ public void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ if (null == this.accessCondition)
+ {
+ info.AddValue(IfMatchETagName, null);
+ info.AddValue(IfModifiedSinceTimeName, null);
+ info.AddValue(IfNoneMatchETagName, null);
+ info.AddValue(IfNotModifiedSinceTimeName, null);
+ info.AddValue(IfSequenceNumberEqualName, null);
+ info.AddValue(IfSequenceNumberLessThanName, null);
+ info.AddValue(IfSequenceNumberLessThanOrEqualName, null);
+ info.AddValue(LeaseIdName, null);
+ }
+ else
+ {
+
+ info.AddValue(IfMatchETagName, this.accessCondition.IfMatchETag);
+ info.AddValue(IfModifiedSinceTimeName, this.accessCondition.IfModifiedSinceTime);
+ info.AddValue(IfNoneMatchETagName, this.accessCondition.IfNoneMatchETag);
+ info.AddValue(IfNotModifiedSinceTimeName, this.accessCondition.IfNotModifiedSinceTime);
+ info.AddValue(IfSequenceNumberEqualName, this.accessCondition.IfSequenceNumberEqual);
+ info.AddValue(IfSequenceNumberLessThanName, this.accessCondition.IfSequenceNumberLessThan);
+ info.AddValue(IfSequenceNumberLessThanOrEqualName, this.accessCondition.IfSequenceNumberLessThanOrEqual);
+ info.AddValue(LeaseIdName, this.accessCondition.LeaseId);
+ }
+ }
+
+ internal static AccessCondition GetAccessCondition(SerializableAccessCondition serialization)
+ {
+ if (null == serialization)
+ {
+ return null;
+ }
+
+ return serialization.AccessCondition;
+ }
+
+ internal static void SetAccessCondition(
+ ref SerializableAccessCondition serialization,
+ AccessCondition value)
+ {
+ if ((null == serialization)
+ && (null == value))
+ {
+ return;
+ }
+
+ if (null != serialization)
+ {
+ serialization.AccessCondition = value;
+ }
+ else
+ {
+ serialization = new SerializableAccessCondition()
+ {
+ AccessCondition = value
+ };
+ }
+ }
+ }
+}
diff --git a/lib/SerializationHelper/SerializableBlobRequestOptions.cs b/lib/SerializationHelper/SerializableBlobRequestOptions.cs
new file mode 100644
index 00000000..c886d84e
--- /dev/null
+++ b/lib/SerializationHelper/SerializableBlobRequestOptions.cs
@@ -0,0 +1,105 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.Serialization;
+ using Microsoft.WindowsAzure.Storage.Blob;
+
+ [Serializable]
+ internal sealed class SerializableBlobRequestOptions : SerializableRequestOptions
+ {
+ private const string DisableContentMD5ValidationName = "DisableContentMD5Validation";
+ private const string MaximumExecutionTimeName = "MaximumExecutionTime";
+ private const string ServerTimeoutName = "ServerTimeout";
+ private const string StoreBlobContentMD5Name = "StoreBlobContentMD5";
+ private const string UseTransactionalMD5Name = "UseTransactionalMD5";
+
+ private BlobRequestOptions blobRequestOptions;
+
+ public SerializableBlobRequestOptions()
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Serialization information.
+ /// Streaming context.
+ private SerializableBlobRequestOptions(SerializationInfo info, StreamingContext context)
+ : base(info, context)
+ {
+ bool? disableContentMD5Validation = (bool?)info.GetValue(DisableContentMD5ValidationName, typeof(bool?));
+ TimeSpan? maximumExecutionTime = (TimeSpan?)info.GetValue(MaximumExecutionTimeName, typeof(TimeSpan?));
+ TimeSpan? serverTimeout = (TimeSpan?)info.GetValue(ServerTimeoutName, typeof(TimeSpan?));
+ bool? storeBlobContentMD5 = (bool?)info.GetValue(StoreBlobContentMD5Name, typeof(bool?));
+ bool? useTransactionalMD5 = (bool?)info.GetValue(UseTransactionalMD5Name, typeof(bool?));
+
+ if (null != disableContentMD5Validation
+ || null != maximumExecutionTime
+ || null != serverTimeout
+ || null != storeBlobContentMD5
+ || null != useTransactionalMD5)
+ {
+ this.blobRequestOptions = Transfer_RequestOptions.DefaultBlobRequestOptions;
+
+ this.blobRequestOptions.DisableContentMD5Validation = disableContentMD5Validation;
+ this.blobRequestOptions.MaximumExecutionTime = maximumExecutionTime;
+ this.blobRequestOptions.ServerTimeout = serverTimeout;
+ this.blobRequestOptions.StoreBlobContentMD5 = storeBlobContentMD5;
+ this.blobRequestOptions.UseTransactionalMD5 = useTransactionalMD5;
+ }
+ else
+ {
+ this.blobRequestOptions = null;
+ }
+ }
+
+ protected override IRequestOptions RequestOptions
+ {
+ get
+ {
+ return this.blobRequestOptions;
+ }
+
+ set
+ {
+ BlobRequestOptions requestOptions = value as BlobRequestOptions;
+ Debug.Assert(null != requestOptions, "Setting RequestOptions in BlobRequestOptionsSerializer, but the value is not a BlobRequestOptions instance.");
+ this.blobRequestOptions = requestOptions;
+ }
+ }
+
+ ///
+ /// Serializes the object.
+ ///
+ /// Serialization info object.
+ /// Streaming context.
+ public override void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ base.GetObjectData(info, context);
+
+ if (null == this.blobRequestOptions)
+ {
+ info.AddValue(DisableContentMD5ValidationName, null);
+ info.AddValue(MaximumExecutionTimeName, null, typeof(TimeSpan?));
+ info.AddValue(ServerTimeoutName, null, typeof(TimeSpan?));
+ info.AddValue(StoreBlobContentMD5Name, null);
+ info.AddValue(UseTransactionalMD5Name, null);
+ }
+ else
+ {
+ info.AddValue(DisableContentMD5ValidationName, this.blobRequestOptions.DisableContentMD5Validation);
+ info.AddValue(MaximumExecutionTimeName, this.blobRequestOptions.MaximumExecutionTime, typeof(TimeSpan?));
+ info.AddValue(ServerTimeoutName, this.blobRequestOptions.ServerTimeout, typeof(TimeSpan?));
+ info.AddValue(StoreBlobContentMD5Name, this.blobRequestOptions.StoreBlobContentMD5);
+ info.AddValue(UseTransactionalMD5Name, this.blobRequestOptions.UseTransactionalMD5);
+ }
+ }
+ }
+}
diff --git a/lib/SerializationHelper/SerializableCloudBlob.cs b/lib/SerializationHelper/SerializableCloudBlob.cs
new file mode 100644
index 00000000..45ece249
--- /dev/null
+++ b/lib/SerializationHelper/SerializableCloudBlob.cs
@@ -0,0 +1,134 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper
+{
+ using System;
+ using System.Globalization;
+ using System.Runtime.Serialization;
+ using Microsoft.WindowsAzure.Storage.Auth;
+ using Microsoft.WindowsAzure.Storage.Blob;
+
+ [Serializable]
+ internal class SerializableCloudBlob : ISerializable
+ {
+ private const string BlobUriName = "BlobUri";
+ private const string BlobTypeName = "BlobType";
+
+ private Uri blobUri;
+
+ private BlobType blobType;
+
+ private CloudBlob blob;
+
+ public SerializableCloudBlob()
+ {
+ }
+
+ private SerializableCloudBlob(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ this.blobUri = (Uri)info.GetValue(BlobUriName, typeof(Uri));
+ this.blobType = (BlobType)info.GetValue(BlobTypeName, typeof(BlobType));
+ this.CreateCloudBlobInstance(null);
+ }
+
+ internal CloudBlob Blob
+ {
+ get
+ {
+ return this.blob;
+ }
+
+ set
+ {
+ this.blob = value;
+
+ if (null == this.blob)
+ {
+ this.blobUri = null;
+ this.blobType = BlobType.Unspecified;
+ }
+ else
+ {
+ this.blobUri = this.blob.SnapshotQualifiedUri;
+ this.blobType = this.blob.BlobType;
+ }
+ }
+ }
+
+ public void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ info.AddValue(BlobUriName, this.blobUri, typeof(Uri));
+ info.AddValue(BlobTypeName, this.blobType);
+ }
+
+ internal static CloudBlob GetBlob(SerializableCloudBlob blobSerialization)
+ {
+ if (null == blobSerialization)
+ {
+ return null;
+ }
+
+ return blobSerialization.Blob;
+ }
+
+ internal static void SetBlob(ref SerializableCloudBlob blobSerialization, CloudBlob value)
+ {
+ if ((null == blobSerialization)
+ && (null == value))
+ {
+ return;
+ }
+
+ if (null != blobSerialization)
+ {
+ blobSerialization.Blob = value;
+ }
+ else
+ {
+ blobSerialization = new SerializableCloudBlob()
+ {
+ Blob = value
+ };
+ }
+ }
+
+ internal void UpdateStorageCredentials(StorageCredentials credentials)
+ {
+ this.CreateCloudBlobInstance(credentials);
+ }
+
+ private void CreateCloudBlobInstance(StorageCredentials credentials)
+ {
+ if ((null != this.blob)
+ && this.blob.ServiceClient.Credentials == credentials)
+ {
+ return;
+ }
+
+ if (null == this.blobUri)
+ {
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ParameterCannotBeNullException,
+ "blobUri"));
+ }
+
+ this.blob = Utils.GetBlobReference(this.blobUri, credentials, this.blobType);
+ }
+ }
+}
diff --git a/lib/SerializationHelper/SerializableCloudFile.cs b/lib/SerializationHelper/SerializableCloudFile.cs
new file mode 100644
index 00000000..cbc2773f
--- /dev/null
+++ b/lib/SerializationHelper/SerializableCloudFile.cs
@@ -0,0 +1,127 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper
+{
+ using System;
+ using System.Diagnostics;
+ using System.Globalization;
+ using System.Runtime.Serialization;
+ using Microsoft.WindowsAzure.Storage.Auth;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ [Serializable]
+ internal class SerializableCloudFile : ISerializable
+ {
+ private const string FileUriName = "FileUri";
+
+ private Uri fileUri;
+
+ private CloudFile file;
+
+ public SerializableCloudFile()
+ {
+ }
+
+ private SerializableCloudFile(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ this.fileUri = (Uri)info.GetValue(FileUriName, typeof(Uri));
+ this.CreateCloudFileInstance(null);
+ }
+
+ internal CloudFile File
+ {
+ get
+ {
+ return this.file;
+ }
+
+ set
+ {
+ this.file = value;
+
+ if (null == this.file)
+ {
+ this.fileUri = null;
+ }
+ else
+ {
+ this.fileUri = this.file.Uri;
+ }
+ }
+ }
+
+ public void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ info.AddValue(FileUriName, this.fileUri, typeof(Uri));
+ }
+
+ internal static CloudFile GetFile(SerializableCloudFile fileSerialization)
+ {
+ if (null == fileSerialization)
+ {
+ return null;
+ }
+
+ return fileSerialization.File;
+ }
+
+ internal static void SetFile(ref SerializableCloudFile fileSerialization, CloudFile value)
+ {
+ if (null == fileSerialization
+ && null == value)
+ {
+ return;
+ }
+
+ if (null != fileSerialization)
+ {
+ fileSerialization.File = value;
+ }
+ else
+ {
+ fileSerialization = new SerializableCloudFile()
+ {
+ File = value
+ };
+ }
+ }
+
+ internal void UpdateStorageCredentials(StorageCredentials credentials)
+ {
+ this.CreateCloudFileInstance(credentials);
+ }
+
+ private void CreateCloudFileInstance(StorageCredentials credentials)
+ {
+ if (null != this.file
+ && this.file.ServiceClient.Credentials == credentials)
+ {
+ return;
+ }
+
+ if (null == this.fileUri)
+ {
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ParameterCannotBeNullException,
+ "fileUri"));
+ }
+
+ this.file = new CloudFile(this.fileUri, credentials);
+ }
+ }
+}
diff --git a/lib/SerializationHelper/SerializableFileRequestOptions.cs b/lib/SerializationHelper/SerializableFileRequestOptions.cs
new file mode 100644
index 00000000..03014954
--- /dev/null
+++ b/lib/SerializationHelper/SerializableFileRequestOptions.cs
@@ -0,0 +1,111 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.Serialization;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ ///
+ /// Define class to serialize FileRequestOptions instance.
+ ///
+ [Serializable]
+ internal sealed class SerializableFileRequestOptions : SerializableRequestOptions, ISerializable
+ {
+ private const string DisableContentMD5ValidationName = "DisableContentMD5Validation";
+ private const string MaximumExecutionTimeName = "MaximumExecutionTime";
+ private const string ServerTimeoutName = "ServerTimeout";
+ private const string StoreFileContentMD5Name = "StoreFileContentMD5";
+ private const string UseTransactionalMD5Name = "UseTransactionalMD5";
+
+ private FileRequestOptions fileRequestOptions;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public SerializableFileRequestOptions()
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Serialization information.
+ /// Streaming context.
+ private SerializableFileRequestOptions(SerializationInfo info, StreamingContext context)
+ : base (info, context)
+ {
+ bool? disableContentMD5Validation = (bool?)info.GetValue(DisableContentMD5ValidationName, typeof(bool?));
+ TimeSpan? maximumExecutionTime = (TimeSpan?)info.GetValue(MaximumExecutionTimeName, typeof(TimeSpan?));
+ TimeSpan? serverTimeout = (TimeSpan?)info.GetValue(ServerTimeoutName, typeof(TimeSpan?));
+ bool? storeFileContentMD5 = (bool?)info.GetValue(StoreFileContentMD5Name, typeof(bool?));
+ bool? useTransactionalMD5 = (bool?)info.GetValue(UseTransactionalMD5Name, typeof(bool?));
+
+ if (null != disableContentMD5Validation
+ || null != maximumExecutionTime
+ || null != serverTimeout
+ || null != storeFileContentMD5
+ || null != useTransactionalMD5)
+ {
+ this.fileRequestOptions = Transfer_RequestOptions.DefaultFileRequestOptions;
+
+ this.fileRequestOptions.DisableContentMD5Validation = disableContentMD5Validation;
+ this.fileRequestOptions.MaximumExecutionTime = maximumExecutionTime;
+ this.fileRequestOptions.ServerTimeout = serverTimeout;
+ this.fileRequestOptions.StoreFileContentMD5 = storeFileContentMD5;
+ this.fileRequestOptions.UseTransactionalMD5 = useTransactionalMD5;
+ }
+ else
+ {
+ this.fileRequestOptions = null;
+ }
+ }
+
+ protected override IRequestOptions RequestOptions
+ {
+ get
+ {
+ return this.fileRequestOptions;
+ }
+
+ set
+ {
+ FileRequestOptions requestOptions = value as FileRequestOptions;
+ Debug.Assert(null != requestOptions, "Setting RequestOptions in FlobRequestOptionsSerializer, but the value is not a FileRequestOptions instance.");
+
+ this.fileRequestOptions = requestOptions;
+ }
+ }
+
+ ///
+ /// Serializes the object.
+ ///
+ /// Serialization info object.
+ /// Streaming context.
+ public override void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ base.GetObjectData(info, context);
+
+ if (null == this.fileRequestOptions)
+ {
+ info.AddValue(DisableContentMD5ValidationName, null);
+ info.AddValue(MaximumExecutionTimeName, null, typeof(TimeSpan?));
+ info.AddValue(ServerTimeoutName, null, typeof(TimeSpan?));
+ info.AddValue(StoreFileContentMD5Name, null);
+ info.AddValue(UseTransactionalMD5Name, null);
+ }
+ else
+ {
+ info.AddValue(DisableContentMD5ValidationName, this.fileRequestOptions.DisableContentMD5Validation);
+ info.AddValue(MaximumExecutionTimeName, this.fileRequestOptions.MaximumExecutionTime, typeof(TimeSpan?));
+ info.AddValue(ServerTimeoutName, this.fileRequestOptions.ServerTimeout, typeof(TimeSpan?));
+ info.AddValue(StoreFileContentMD5Name, this.fileRequestOptions.StoreFileContentMD5);
+ info.AddValue(UseTransactionalMD5Name, this.fileRequestOptions.UseTransactionalMD5);
+ }
+ }
+ }
+}
diff --git a/lib/SerializationHelper/SerializableRequestOptions.cs b/lib/SerializationHelper/SerializableRequestOptions.cs
new file mode 100644
index 00000000..5424c771
--- /dev/null
+++ b/lib/SerializationHelper/SerializableRequestOptions.cs
@@ -0,0 +1,111 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.SerializationHelper
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.Serialization;
+ using Microsoft.WindowsAzure.Storage.Blob;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ [Serializable]
+ internal abstract class SerializableRequestOptions : ISerializable
+ {
+ protected SerializableRequestOptions()
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Serialization information.
+ /// Streaming context.
+ protected SerializableRequestOptions(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new System.ArgumentNullException("info");
+ }
+ }
+
+ abstract protected IRequestOptions RequestOptions
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Serializes the object.
+ ///
+ /// Serialization info object.
+ /// Streaming context.
+ public virtual void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new System.ArgumentNullException("info");
+ }
+ }
+
+ internal static IRequestOptions GetRequestOptions(SerializableRequestOptions serializer)
+ {
+ if (null == serializer)
+ {
+ return null;
+ }
+
+ return serializer.RequestOptions;
+ }
+
+ internal static void SetRequestOptions(ref SerializableRequestOptions serializer, IRequestOptions requestOptions)
+ {
+ if (null == serializer && null == requestOptions)
+ {
+ return;
+ }
+
+ if (null == serializer)
+ {
+ serializer = CreateSerializableRequestOptions(requestOptions);
+ }
+ else
+ {
+ if ((requestOptions is FileRequestOptions)
+ && (serializer is SerializableBlobRequestOptions))
+ {
+ serializer = new SerializableFileRequestOptions();
+ }
+ else if ((requestOptions is BlobRequestOptions)
+ && (serializer is SerializableFileRequestOptions))
+ {
+ serializer = new SerializableBlobRequestOptions();
+ }
+
+ serializer.RequestOptions = requestOptions;
+ }
+ }
+
+ private static SerializableRequestOptions CreateSerializableRequestOptions(IRequestOptions requestOptions)
+ {
+ if (requestOptions is FileRequestOptions)
+ {
+ return new SerializableFileRequestOptions()
+ {
+ RequestOptions = requestOptions
+ };
+ }
+ else
+ {
+ Debug.Assert(requestOptions is BlobRequestOptions, "Request options should be an instance of BlobRequestOptions when code reach here.");
+ return new SerializableBlobRequestOptions()
+ {
+ RequestOptions = requestOptions
+ };
+ }
+ }
+ }
+}
diff --git a/lib/TransferCheckpoint.cs b/lib/TransferCheckpoint.cs
new file mode 100644
index 00000000..faf9e7c9
--- /dev/null
+++ b/lib/TransferCheckpoint.cs
@@ -0,0 +1,139 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Collections.Concurrent;
+ using System.Collections.Generic;
+ using System.Runtime.Serialization;
+ using TransferKey = System.Tuple;
+
+ ///
+ /// Represents a checkpoint from which a transfer may be resumed and continue.
+ ///
+ [Serializable]
+ public class TransferCheckpoint : ISerializable
+ {
+ private const string SingleObjectTransfersName = "SingleObjectTransfers";
+
+ ///
+ /// Transfers associated with this transfer checkpoint.
+ ///
+ private ConcurrentDictionary transfers = new ConcurrentDictionary();
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ internal TransferCheckpoint()
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Serialization information.
+ /// Streaming context.
+ protected TransferCheckpoint(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new System.ArgumentNullException("info");
+ }
+
+ var singleObjectTransfers = (List)info.GetValue(SingleObjectTransfersName, typeof(List));
+ foreach(var transfer in singleObjectTransfers)
+ {
+ this.AddTransfer(transfer);
+ }
+ }
+
+
+ ///
+ /// Gets a list of all transfers
+ ///
+ internal ICollection AllTransfers
+ {
+ get
+ {
+ return this.transfers.Values;
+ }
+ }
+
+ ///
+ /// Serializes the checkpoint.
+ ///
+ /// Serialization info object.
+ /// Streaming context.
+ public virtual void GetObjectData(SerializationInfo info, StreamingContext context)
+ {
+ if (info == null)
+ {
+ throw new ArgumentNullException("info");
+ }
+
+ List singleObjectTransfers = new List();
+ foreach(var kvPair in this.transfers)
+ {
+ SingleObjectTransfer transfer = kvPair.Value as SingleObjectTransfer;
+ if (transfer != null)
+ {
+ singleObjectTransfers.Add(transfer);
+ }
+ }
+
+ info.AddValue(SingleObjectTransfersName, singleObjectTransfers, typeof(List));
+ }
+
+ ///
+ /// Adds a transfer to the transfer checkpoint.
+ ///
+ /// The transfer to be kept track of.
+ internal void AddTransfer(Transfer transfer)
+ {
+ this.transfers.TryAdd(new TransferKey(transfer.Source, transfer.Destination), transfer);
+ }
+
+ ///
+ /// Gets a transfer with the specified source location, destination location and transfer method.
+ ///
+ /// Source location of the transfer.
+ /// Destination location of the transfer.
+ /// Transfer method.
+ /// A transfer that matches the specified source location, destination location and transfer method; Or null if no matches.
+ internal Transfer GetTransfer(TransferLocation sourceLocation, TransferLocation destLocation, TransferMethod transferMethod)
+ {
+ Transfer transfer = null;
+ if (this.transfers.TryGetValue(new TransferKey(sourceLocation, destLocation), out transfer))
+ {
+ if (transfer.TransferMethod == transferMethod)
+ {
+ return transfer;
+ }
+ }
+
+ return null;
+ }
+
+ ///
+ /// Gets a static snapshot of this transfer checkpoint
+ ///
+ /// A snapshot of current transfer checkpoint
+ internal TransferCheckpoint Copy()
+ {
+ TransferCheckpoint copyObj = new TransferCheckpoint();
+ foreach (var kvPair in this.transfers)
+ {
+ SingleObjectTransfer transfer = kvPair.Value as SingleObjectTransfer;
+ if (transfer != null)
+ {
+ copyObj.AddTransfer(transfer.Copy());
+ }
+ }
+
+ return copyObj;
+ }
+ }
+}
diff --git a/lib/TransferConfigurations.cs b/lib/TransferConfigurations.cs
new file mode 100644
index 00000000..ce3284e4
--- /dev/null
+++ b/lib/TransferConfigurations.cs
@@ -0,0 +1,156 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+ using System.Globalization;
+ using System.Reflection;
+ using ClientLibraryConstants = Microsoft.WindowsAzure.Storage.Shared.Protocol.Constants;
+
+ ///
+ /// TransferConfigurations class.
+ ///
+ public class TransferConfigurations
+ {
+ ///
+ /// Stores the BlockSize to use for Windows Azure Storage transfers.
+ ///
+ private int blockSize;
+
+ ///
+ /// How many work items to process in parallel.
+ ///
+ private int parallelOperations;
+
+ ///
+ /// Maximum amount of cache memory to use in bytes.
+ ///
+ private long maximumCacheSize;
+
+ ///
+ /// Instance to call native methods to get current memory status.
+ ///
+ private GlobalMemoryStatusNativeMethods memStatus = new GlobalMemoryStatusNativeMethods();
+
+ ///
+ /// Initializes a new instance of the
+ /// class.
+ ///
+ public TransferConfigurations()
+ {
+ // setup default values.
+ this.ParallelOperations = Environment.ProcessorCount * 8;
+ this.BlockSize = Constants.DefaultBlockSize;
+ }
+
+ ///
+ /// Gets or sets a value indicating how many work items to process
+ /// concurrently. Downloading or uploading a single blob can consist
+ /// of a large number of work items.
+ ///
+ /// How many work items to process concurrently.
+ public int ParallelOperations
+ {
+ get
+ {
+ return this.parallelOperations;
+ }
+
+ set
+ {
+ if (value <= 0)
+ {
+ throw new ArgumentException(string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ParallelCountNotPositiveException));
+ }
+
+ this.parallelOperations = value;
+ this.SetMaxMemoryCacheSize();
+ }
+ }
+
+ ///
+ /// Gets or sets the user agent suffix
+ ///
+ public string UserAgentSuffix
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Gets or sets a value indicating how much memory we can cache
+ /// during upload/download.
+ ///
+ /// Maximum amount of cache memory to use in bytes.
+ internal long MaximumCacheSize
+ {
+ get
+ {
+ return this.maximumCacheSize;
+ }
+
+ set
+ {
+ if (value < Constants.MaxBlockSize)
+ {
+ throw new ArgumentException(string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.SmallMemoryCacheSizeLimitationException,
+ Utils.BytesToHumanReadableSize(Constants.MaxBlockSize)));
+ }
+
+ this.maximumCacheSize = value;
+ }
+ }
+
+ ///
+ /// Gets or sets the BlockSize to use for Windows Azure Storage transfers.
+ ///
+ /// BlockSize to use for Windows Azure Storage transfers.
+ internal int BlockSize
+ {
+ get
+ {
+ return this.blockSize;
+ }
+
+ set
+ {
+ if (Constants.MinBlockSize > value || value > Constants.MaxBlockSize)
+ {
+ string errorMessage = string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.BlockSizeOutOfRangeException,
+ Utils.BytesToHumanReadableSize(Constants.MinBlockSize),
+ Utils.BytesToHumanReadableSize(Constants.MaxBlockSize));
+
+ throw new ArgumentOutOfRangeException("value", value, errorMessage);
+ }
+
+ this.blockSize = value;
+ }
+ }
+
+ private void SetMaxMemoryCacheSize()
+ {
+ if (0 == this.memStatus.AvailablePhysicalMemory)
+ {
+ this.MaximumCacheSize = Constants.CacheSizeMultiplierInByte * this.ParallelOperations;
+ }
+ else
+ {
+ this.MaximumCacheSize =
+ Math.Min(
+ Constants.CacheSizeMultiplierInByte * this.ParallelOperations,
+ Math.Min(
+ (long)(this.memStatus.AvailablePhysicalMemory * Constants.MemoryCacheMultiplier),
+ Constants.MemoryCacheMaximum));
+ }
+ }
+ }
+}
diff --git a/lib/TransferContext.cs b/lib/TransferContext.cs
new file mode 100644
index 00000000..ec35df8c
--- /dev/null
+++ b/lib/TransferContext.cs
@@ -0,0 +1,124 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement
+{
+ using System;
+
+ ///
+ /// Represents the context for a transfer, and provides additional runtime information about its execution.
+ ///
+ public class TransferContext
+ {
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public TransferContext()
+ : this(null)
+ {
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// An object representing the last checkpoint from which the transfer continues on.
+ public TransferContext(TransferCheckpoint checkpoint)
+ {
+ if (checkpoint == null)
+ {
+ this.Checkpoint = new TransferCheckpoint();
+ }
+ else
+ {
+ this.Checkpoint = checkpoint.Copy();
+ }
+
+ this.OverallProgressTracker = new TransferProgressTracker();
+ foreach(Transfer transfer in this.Checkpoint.AllTransfers)
+ {
+ this.OverallProgressTracker.AddBytesTransferred(transfer.ProgressTracker.BytesTransferred);
+ this.OverallProgressTracker.AddNumberOfFilesTransferred(transfer.ProgressTracker.NumberOfFilesTransferred);
+ this.OverallProgressTracker.AddNumberOfFilesSkipped(transfer.ProgressTracker.NumberOfFilesSkipped);
+ this.OverallProgressTracker.AddNumberOfFilesFailed(transfer.ProgressTracker.NumberOfFilesFailed);
+ }
+ }
+
+ ///
+ /// Gets or sets the client request id.
+ ///
+ /// A string containing the client request id.
+ ///
+ /// Setting this property modifies all the requests involved in the related transfer operation to include the the HTTP x-ms-client-request-id header.
+ ///
+ public string ClientRequestId
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Gets or sets the logging level to be used for the related tranfer operation.
+ ///
+ /// A value of type that specifies which events are logged for the related transfer operation.
+ public LogLevel LogLevel
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Gets the last checkpoint of the transfer.
+ ///
+ public TransferCheckpoint LastCheckpoint
+ {
+ get
+ {
+ return this.Checkpoint.Copy();
+ }
+ }
+
+ ///
+ /// Callback invoked to tell whether to overwrite an existing destination.
+ ///
+ public OverwriteCallback OverwriteCallback
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Gets or sets the progress update handler.
+ ///
+ public IProgress ProgressHandler
+ {
+ get
+ {
+ return this.OverallProgressTracker.ProgressHandler;
+ }
+ set
+ {
+ this.OverallProgressTracker.ProgressHandler = value;
+ }
+ }
+
+ ///
+ /// Gets the overall transfer progress.
+ ///
+ internal TransferProgressTracker OverallProgressTracker
+ {
+ get;
+ set;
+ }
+
+ ///
+ /// Gets the transfer checkpoint that tracks all transfers related to this transfer context.
+ ///
+ internal TransferCheckpoint Checkpoint
+ {
+ get;
+ private set;
+ }
+ }
+}
diff --git a/lib/TransferControllers/AsyncCopyControllers/AsyncCopyController.cs b/lib/TransferControllers/AsyncCopyControllers/AsyncCopyController.cs
new file mode 100644
index 00000000..ed5a80a0
--- /dev/null
+++ b/lib/TransferControllers/AsyncCopyControllers/AsyncCopyController.cs
@@ -0,0 +1,678 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers
+{
+ using System;
+ using System.Collections.Generic;
+ using System.Diagnostics;
+ using System.Globalization;
+ using System.Net;
+ using System.Threading;
+ using System.Threading.Tasks;
+ using Microsoft.WindowsAzure.Storage.Blob;
+ using Microsoft.WindowsAzure.Storage.Blob.Protocol;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ internal abstract class AsyncCopyController : TransferControllerBase
+ {
+ ///
+ /// Timer to signal refresh status.
+ ///
+ private Timer statusRefreshTimer;
+
+ ///
+ /// Lock to protect statusRefreshTimer.
+ ///
+ private object statusRefreshTimerLock = new object();
+
+ ///
+ /// Keeps track of the internal state-machine state.
+ ///
+ private volatile State state;
+
+ ///
+ /// Indicates whether the controller has work available
+ /// or not for the calling code.
+ ///
+ private bool hasWork;
+
+ ///
+ /// Indicates the BytesCopied value of last CopyState
+ ///
+ private long lastBytesCopied;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Scheduler object which creates this object.
+ /// Instance of job to start async copy.
+ /// Token user input to notify about cancellation.
+ internal AsyncCopyController(
+ TransferScheduler scheduler,
+ TransferJob transferJob,
+ CancellationToken userCancellationToken)
+ : base(scheduler, transferJob, userCancellationToken)
+ {
+ if (null == transferJob.Destination)
+ {
+ throw new ArgumentException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ParameterCannotBeNullException,
+ "Dest"),
+ "transferJob");
+ }
+
+ if ((null == transferJob.Source.SourceUri && null == transferJob.Source.Blob && null == transferJob.Source.AzureFile)
+ || (null != transferJob.Source.SourceUri && null != transferJob.Source.Blob)
+ || (null != transferJob.Source.Blob && null != transferJob.Source.AzureFile)
+ || (null != transferJob.Source.SourceUri && null != transferJob.Source.AzureFile))
+ {
+ throw new ArgumentException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ProvideExactlyOneOfThreeParameters,
+ "Source.SourceUri",
+ "Source.Blob",
+ "Source.AzureFile"),
+ "transferJob");
+ }
+
+ this.SourceUri = this.TransferJob.Source.SourceUri;
+ this.SourceBlob = this.TransferJob.Source.Blob;
+ this.SourceFile = this.TransferJob.Source.AzureFile;
+
+ // initialize the status refresh timer
+ this.statusRefreshTimer = new Timer(
+ new TimerCallback(
+ delegate(object timerState)
+ {
+ this.hasWork = true;
+ }));
+
+ this.SetInitialStatus();
+ }
+
+ ///
+ /// Internal state values.
+ ///
+ private enum State
+ {
+ FetchSourceAttributes,
+ GetDestination,
+ StartCopy,
+ GetCopyState,
+ Finished,
+ Error,
+ }
+
+ public override bool HasWork
+ {
+ get
+ {
+ return this.hasWork;
+ }
+ }
+
+ protected CloudBlob SourceBlob
+ {
+ get;
+ private set;
+ }
+
+ protected CloudFile SourceFile
+ {
+ get;
+ private set;
+ }
+
+ protected Uri SourceUri
+ {
+ get;
+ private set;
+ }
+
+ protected abstract Uri DestUri
+ {
+ get;
+ }
+
+ public static AsyncCopyController CreateAsyncCopyController(TransferScheduler transferScheduler, TransferJob transferJob, CancellationToken cancellationToken)
+ {
+ if (transferJob.Destination.TransferLocationType == TransferLocationType.AzureFile)
+ {
+ return new FileAsyncCopyController(transferScheduler, transferJob, cancellationToken);
+ }
+
+ if (transferJob.Destination.TransferLocationType == TransferLocationType.AzureBlob)
+ {
+ return new BlobAsyncCopyController(transferScheduler, transferJob, cancellationToken);
+ }
+
+ throw new InvalidOperationException(Resources.CanOnlyCopyToFileOrBlobException);
+ }
+
+ ///
+ /// Do work in the controller.
+ /// A controller controls the whole transfer from source to destination,
+ /// which could be split into several work items. This method is to let controller to do one of those work items.
+ /// There could be several work items to do at the same time in the controller.
+ ///
+ /// Whether the controller has completed. This is to tell TransferScheduler
+ /// whether the controller can be disposed.
+ protected override async Task DoWorkInternalAsync()
+ {
+ switch (this.state)
+ {
+ case State.FetchSourceAttributes:
+ await this.FetchSourceAttributesAsync();
+ break;
+ case State.GetDestination:
+ await this.GetDestinationAsync();
+ break;
+ case State.StartCopy:
+ await this.StartCopyAsync();
+ break;
+ case State.GetCopyState:
+ await this.GetCopyStateAsync();
+ break;
+ case State.Finished:
+ case State.Error:
+ default:
+ break;
+ }
+
+ return (State.Error == this.state || State.Finished == this.state);
+ }
+
+ ///
+ /// Sets the state of the controller to Error, while recording
+ /// the last occurred exception and setting the HasWork and
+ /// IsFinished fields.
+ ///
+ /// Exception to record.
+ protected override void SetErrorState(Exception ex)
+ {
+ Debug.Assert(
+ this.state != State.Finished,
+ "SetErrorState called, while controller already in Finished state");
+
+ this.state = State.Error;
+ this.hasWork = false;
+ }
+
+ ///
+ /// Taken from Microsoft.WindowsAzure.Storage.Core.Util.HttpUtility: Parse the http query string.
+ ///
+ /// Http query string.
+ /// A dictionary of query pairs.
+ protected static Dictionary ParseQueryString(string query)
+ {
+ Dictionary retVal = new Dictionary();
+ if (query == null || query.Length == 0)
+ {
+ return retVal;
+ }
+
+ // remove ? if present
+ if (query.StartsWith("?", StringComparison.OrdinalIgnoreCase))
+ {
+ query = query.Substring(1);
+ }
+
+ string[] valuePairs = query.Split(new string[] { "&" }, StringSplitOptions.RemoveEmptyEntries);
+
+ foreach (string vp in valuePairs)
+ {
+ int equalDex = vp.IndexOf("=", StringComparison.OrdinalIgnoreCase);
+ if (equalDex < 0)
+ {
+ retVal.Add(Uri.UnescapeDataString(vp), null);
+ continue;
+ }
+
+ string key = vp.Substring(0, equalDex);
+ string value = vp.Substring(equalDex + 1);
+
+ retVal.Add(Uri.UnescapeDataString(key), Uri.UnescapeDataString(value));
+ }
+
+ return retVal;
+ }
+
+ private void SetInitialStatus()
+ {
+ switch (this.TransferJob.Status)
+ {
+ case TransferJobStatus.NotStarted:
+ this.TransferJob.Status = TransferJobStatus.Transfer;
+ break;
+ case TransferJobStatus.Transfer:
+ break;
+ case TransferJobStatus.Monitor:
+ break;
+ case TransferJobStatus.Finished:
+ default:
+ throw new ArgumentException(string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.InvalidInitialEntryStatusForControllerException,
+ this.TransferJob.Status,
+ this.GetType().Name));
+ }
+
+ this.SetHasWorkAfterStatusChanged();
+ }
+
+ private void SetHasWorkAfterStatusChanged()
+ {
+ if (TransferJobStatus.Transfer == this.TransferJob.Status)
+ {
+ if (null != this.SourceUri)
+ {
+ this.state = State.GetDestination;
+ }
+ else
+ {
+ this.state = State.FetchSourceAttributes;
+ }
+ }
+ else if(TransferJobStatus.Monitor == this.TransferJob.Status)
+ {
+ this.state = State.GetCopyState;
+ }
+ else
+ {
+ Debug.Fail("We should never be here");
+ }
+
+ this.hasWork = true;
+ }
+
+ private async Task FetchSourceAttributesAsync()
+ {
+ Debug.Assert(
+ this.state == State.FetchSourceAttributes,
+ "FetchSourceAttributesAsync called, but state isn't FetchSourceAttributes");
+
+ this.hasWork = false;
+ this.StartCallbackHandler();
+
+ try
+ {
+ await this.DoFetchSourceAttributesAsync();
+ }
+ catch (StorageException e)
+ {
+ HandleFetchSourceAttributesException(e);
+ throw;
+ }
+
+ this.TransferJob.Source.CheckedAccessCondition = true;
+
+ this.state = State.GetDestination;
+ this.hasWork = true;
+ }
+
+ private static void HandleFetchSourceAttributesException(StorageException e)
+ {
+ // Getting a storage exception is expected if the source doesn't
+ // exist. For those cases that indicate the source doesn't exist
+ // we will set a specific error state.
+ if (null != e.RequestInformation &&
+ e.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound)
+ {
+ throw new InvalidOperationException(Resources.SourceDoesNotExistException);
+ }
+ }
+
+ private async Task GetDestinationAsync()
+ {
+ Debug.Assert(
+ this.state == State.GetDestination,
+ "GetDestinationAsync called, but state isn't GetDestination");
+
+ this.hasWork = false;
+ this.StartCallbackHandler();
+
+ try
+ {
+ await this.DoFetchDestAttributesAsync();
+ }
+ catch (StorageException se)
+ {
+ if (!this.HandleGetDestinationResult(se))
+ {
+ throw se;
+ }
+ return;
+ }
+
+ this.HandleGetDestinationResult(null);
+ }
+
+ private bool HandleGetDestinationResult(Exception e)
+ {
+ bool destExist = true;
+
+ if (null != e)
+ {
+ StorageException se = e as StorageException;
+
+ // Getting a storage exception is expected if the destination doesn't
+ // exist. In this case we won't error out, but set the
+ // destExist flag to false to indicate we will copy to
+ // a new blob/file instead of overwriting an existing one.
+ if (null != se &&
+ null != se.RequestInformation &&
+ se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound)
+ {
+ destExist = false;
+ }
+ else
+ {
+ this.DoHandleGetDestinationException(se);
+ return false;
+ }
+ }
+
+ this.TransferJob.Destination.CheckedAccessCondition = true;
+
+ if ((TransferJobStatus.Monitor == this.TransferJob.Status)
+ && string.IsNullOrEmpty(this.TransferJob.CopyId))
+ {
+ throw new InvalidOperationException(Resources.RestartableInfoCorruptedException);
+ }
+
+ // If destination file exists, query user whether to overwrite it.
+
+ Uri sourceUri = this.GetSourceUri();
+ this.CheckOverwrite(
+ destExist,
+ sourceUri.ToString(),
+ this.DestUri.ToString());
+
+ this.UpdateProgressAddBytesTransferred(0);
+
+ this.state = State.StartCopy;
+
+ this.hasWork = true;
+ return true;
+ }
+
+ private async Task StartCopyAsync()
+ {
+ Debug.Assert(
+ this.state == State.StartCopy,
+ "StartCopyAsync called, but state isn't StartCopy");
+
+ this.hasWork = false;
+
+ try
+ {
+ this.TransferJob.CopyId = await this.DoStartCopyAsync();
+ }
+ catch (StorageException se)
+ {
+ if (!this.HandleStartCopyResult(se))
+ {
+ throw;
+ }
+
+ return;
+ }
+
+ this.HandleStartCopyResult(null);
+ }
+
+ private bool HandleStartCopyResult(StorageException se)
+ {
+ if (null != se)
+ {
+ if (null != se.RequestInformation
+ && null != se.RequestInformation.ExtendedErrorInformation
+ && BlobErrorCodeStrings.PendingCopyOperation == se.RequestInformation.ExtendedErrorInformation.ErrorCode)
+ {
+ CopyState copyState = this.FetchCopyStateAsync().Result;
+
+ if (null == copyState)
+ {
+ return false;
+ }
+
+ string baseUriString = copyState.Source.GetComponents(
+ UriComponents.Host | UriComponents.Port | UriComponents.Path, UriFormat.UriEscaped);
+
+ Uri sourceUri = this.GetSourceUri();
+
+ string ourBaseUriString = sourceUri.GetComponents(UriComponents.Host | UriComponents.Port | UriComponents.Path, UriFormat.UriEscaped);
+
+ DateTimeOffset? baseSnapshot = null;
+ DateTimeOffset? ourSnapshot = null == this.SourceBlob ? null : this.SourceBlob.SnapshotTime;
+
+ string snapshotString;
+ if (ParseQueryString(copyState.Source.Query).TryGetValue("snapshot", out snapshotString))
+ {
+ if (!string.IsNullOrEmpty(snapshotString))
+ {
+ DateTimeOffset snapshotTime;
+ if (DateTimeOffset.TryParse(
+ snapshotString,
+ CultureInfo.CurrentCulture,
+ DateTimeStyles.AdjustToUniversal,
+ out snapshotTime))
+ {
+ baseSnapshot = snapshotTime;
+ }
+ }
+ }
+
+ if (!baseUriString.Equals(ourBaseUriString) ||
+ !baseSnapshot.Equals(ourSnapshot))
+ {
+ return false;
+ }
+
+ if (string.IsNullOrEmpty(this.TransferJob.CopyId))
+ {
+ this.TransferJob.CopyId = copyState.CopyId;
+ }
+ }
+ else
+ {
+ return false;
+ }
+ }
+
+ this.state = State.GetCopyState;
+ this.hasWork = true;
+ return true;
+ }
+
+ private async Task GetCopyStateAsync()
+ {
+ Debug.Assert(
+ this.state == State.GetCopyState,
+ "GetCopyStateAsync called, but state isn't GetCopyState");
+
+ this.hasWork = false;
+ this.StartCallbackHandler();
+
+ CopyState copyState = null;
+
+ try
+ {
+ copyState = await this.FetchCopyStateAsync();
+ }
+ catch (StorageException se)
+ {
+ if (null != se.RequestInformation &&
+ se.RequestInformation.HttpStatusCode == (int)HttpStatusCode.NotFound)
+ {
+ // The reason of 404 (Not Found) may be that the destination blob has not been created yet.
+ this.RestartTimer();
+ }
+ else
+ {
+ throw;
+ }
+ }
+
+ this.HandleFetchCopyStateResult(copyState);
+ }
+
+ private void HandleFetchCopyStateResult(CopyState copyState)
+ {
+ if (null == copyState)
+ {
+ // Reach here, the destination should already exist.
+ string exceptionMessage = string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.FailedToRetrieveCopyStateForObjectException,
+ this.DestUri.ToString());
+
+ throw new TransferException(
+ TransferErrorCode.FailToRetrieveCopyStateForObject,
+ exceptionMessage);
+ }
+ else
+ {
+ // Verify we are monitoring the right blob copying process.
+ if (!this.TransferJob.CopyId.Equals(copyState.CopyId))
+ {
+ throw new TransferException(
+ TransferErrorCode.MismatchCopyId,
+ Resources.MismatchFoundBetweenLocalAndServerCopyIdsException);
+ }
+
+ if (CopyStatus.Success == copyState.Status)
+ {
+ this.UpdateTransferProgress(copyState);
+
+ this.DisposeStatusRefreshTimer();
+
+ this.SetFinished();
+ }
+ else if (CopyStatus.Pending == copyState.Status)
+ {
+ this.UpdateTransferProgress(copyState);
+
+ // Wait a period to restart refresh the status.
+ this.RestartTimer();
+ }
+ else
+ {
+ string exceptionMessage = string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.FailedToAsyncCopyObjectException,
+ this.GetSourceUri().ToString(),
+ this.DestUri.ToString(),
+ copyState.Status.ToString(),
+ copyState.StatusDescription);
+
+ // CopyStatus.Invalid | Failed | Aborted
+ throw new TransferException(
+ TransferErrorCode.AsyncCopyFailed,
+ exceptionMessage);
+ }
+ }
+ }
+
+ private void UpdateTransferProgress(CopyState copyState)
+ {
+ if (null != copyState &&
+ copyState.TotalBytes.HasValue)
+ {
+ Debug.Assert(
+ copyState.BytesCopied.HasValue,
+ "BytesCopied cannot be null as TotalBytes is not null.");
+
+ if (this.TransferContext != null)
+ {
+ long bytesTransferred = copyState.BytesCopied.Value;
+ this.UpdateProgressAddBytesTransferred(bytesTransferred - this.lastBytesCopied);
+
+ this.lastBytesCopied = bytesTransferred;
+ }
+ }
+ }
+
+ private void SetFinished()
+ {
+ this.state = State.Finished;
+ this.hasWork = false;
+
+ this.FinishCallbackHandler(null);
+ }
+
+ private void RestartTimer()
+ {
+ // Wait a period to restart refresh the status.
+ this.statusRefreshTimer.Change(
+ TimeSpan.FromMilliseconds(Constants.AsyncCopyStatusRefreshWaitTimeInMilliseconds),
+ new TimeSpan(-1));
+ }
+
+ private void DisposeStatusRefreshTimer()
+ {
+ if (null != this.statusRefreshTimer)
+ {
+ lock (this.statusRefreshTimerLock)
+ {
+ if (null != this.statusRefreshTimer)
+ {
+ this.statusRefreshTimer.Dispose();
+ this.statusRefreshTimer = null;
+ }
+ }
+ }
+ }
+
+ private Uri GetSourceUri()
+ {
+ if (null != this.SourceUri)
+ {
+ return this.SourceUri;
+ }
+
+ if (null != this.SourceBlob)
+ {
+ return this.SourceBlob.SnapshotQualifiedUri;
+ }
+
+ return this.SourceFile.Uri;
+ }
+
+ protected async Task DoFetchSourceAttributesAsync()
+ {
+ AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition(
+ this.TransferJob.Source.AccessCondition,
+ this.TransferJob.Source.CheckedAccessCondition);
+ OperationContext operationContext = Utils.GenerateOperationContext(this.TransferContext);
+
+ if (this.SourceBlob != null)
+ {
+ await this.SourceBlob.FetchAttributesAsync(
+ accessCondition,
+ Utils.GenerateBlobRequestOptions(this.TransferJob.Source.BlobRequestOptions),
+ operationContext,
+ this.CancellationToken);
+ }
+ else if (this.SourceFile != null)
+ {
+ await this.SourceFile.FetchAttributesAsync(
+ accessCondition,
+ Utils.GenerateFileRequestOptions(this.TransferJob.Source.FileRequestOptions),
+ operationContext,
+ this.CancellationToken);
+ }
+ }
+
+ protected abstract Task DoFetchDestAttributesAsync();
+ protected abstract Task DoStartCopyAsync();
+ protected abstract void DoHandleGetDestinationException(StorageException se);
+ protected abstract Task FetchCopyStateAsync();
+ }
+}
diff --git a/lib/TransferControllers/AsyncCopyControllers/BlobAsyncCopyController.cs b/lib/TransferControllers/AsyncCopyControllers/BlobAsyncCopyController.cs
new file mode 100644
index 00000000..848d7598
--- /dev/null
+++ b/lib/TransferControllers/AsyncCopyControllers/BlobAsyncCopyController.cs
@@ -0,0 +1,173 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers
+{
+ using System;
+ using System.Globalization;
+ using System.Threading;
+ using System.Threading.Tasks;
+ using Microsoft.WindowsAzure.Storage;
+ using Microsoft.WindowsAzure.Storage.Blob;
+ using Microsoft.WindowsAzure.Storage.DataMovement;
+
+ ///
+ /// Blob asynchronous copy.
+ ///
+ internal class BlobAsyncCopyController : AsyncCopyController
+ {
+ private CloudBlob destBlob;
+
+ public BlobAsyncCopyController(
+ TransferScheduler transferScheduler,
+ TransferJob transferJob,
+ CancellationToken cancellationToken)
+ : base(transferScheduler, transferJob, cancellationToken)
+ {
+ CloudBlob transferDestBlob = transferJob.Destination.Blob;
+ if (null == transferDestBlob)
+ {
+ throw new ArgumentException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ParameterCannotBeNullException,
+ "Dest.Blob"),
+ "transferJob");
+ }
+
+ if (transferDestBlob.IsSnapshot)
+ {
+ throw new ArgumentException(Resources.DestinationMustBeBaseBlob, "transferJob");
+ }
+
+ CloudBlob transferSourceBlob = transferJob.Source.Blob;
+
+ if (null != transferSourceBlob && transferDestBlob.BlobType != transferSourceBlob.BlobType)
+ {
+ throw new ArgumentException(Resources.SourceAndDestinationBlobTypeDifferent, "transferJob");
+ }
+
+ if ((null != transferSourceBlob)
+ && (StorageExtensions.Equals(transferSourceBlob, transferDestBlob)))
+ {
+ throw new InvalidOperationException(Resources.SourceAndDestinationLocationCannotBeEqualException);
+ }
+
+ this.destBlob = transferDestBlob;
+ }
+
+ protected override Uri DestUri
+ {
+ get
+ {
+ return this.destBlob.Uri;
+ }
+ }
+
+ protected override Task DoFetchDestAttributesAsync()
+ {
+ AccessCondition accessCondition = Utils.GenerateConditionWithCustomerCondition(
+ this.TransferJob.Destination.AccessCondition,
+ this.TransferJob.Destination.CheckedAccessCondition);
+
+ return this.destBlob.FetchAttributesAsync(
+ accessCondition,
+ Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions),
+ Utils.GenerateOperationContext(this.TransferContext),
+ this.CancellationToken);
+ }
+
+ protected override Task DoStartCopyAsync()
+ {
+ AccessCondition destAccessCondition = Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition);
+
+ if (null != this.SourceUri)
+ {
+ return this.destBlob.StartCopyAsync(
+ this.SourceUri,
+ null,
+ destAccessCondition,
+ Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions),
+ Utils.GenerateOperationContext(this.TransferContext),
+ this.CancellationToken);
+ }
+ else if (null != this.SourceBlob)
+ {
+ AccessCondition sourceAccessCondition =
+ AccessCondition.GenerateIfMatchCondition(this.SourceBlob.Properties.ETag);
+
+ return this.destBlob.StartCopyAsync(
+ this.SourceBlob.GenerateUriWithCredentials(),
+ sourceAccessCondition,
+ destAccessCondition,
+ Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions),
+ Utils.GenerateOperationContext(this.TransferContext),
+ this.CancellationToken);
+ }
+ else
+ {
+ if (BlobType.BlockBlob == this.destBlob.BlobType)
+ {
+ return (this.destBlob as CloudBlockBlob).StartCopyAsync(
+ this.SourceFile.GenerateCopySourceFile(),
+ null,
+ destAccessCondition,
+ Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions),
+ Utils.GenerateOperationContext(this.TransferContext),
+ this.CancellationToken);
+ }
+ else if (BlobType.PageBlob == this.destBlob.BlobType)
+ {
+ throw new InvalidOperationException(Resources.AsyncCopyFromFileToPageBlobNotSupportException);
+ }
+ else if (BlobType.AppendBlob == this.destBlob.BlobType)
+ {
+ throw new InvalidOperationException(Resources.AsyncCopyFromFileToAppendBlobNotSupportException);
+ }
+ else
+ {
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.NotSupportedBlobType,
+ this.destBlob.BlobType));
+ }
+ }
+ }
+
+ protected override void DoHandleGetDestinationException(StorageException se)
+ {
+ if (null != se)
+ {
+ if (0 == string.Compare(se.Message, Constants.BlobTypeMismatch, StringComparison.OrdinalIgnoreCase))
+ {
+ // Current use error message to decide whether it caused by blob type mismatch,
+ // We should ask xscl to expose an error code for this..
+ // Opened workitem 1487579 to track this.
+ throw new InvalidOperationException(Resources.DestinationBlobTypeNotMatch);
+ }
+ }
+ else
+ {
+ if (null != this.SourceBlob && this.SourceBlob.Properties.BlobType != this.destBlob.Properties.BlobType)
+ {
+ throw new InvalidOperationException(Resources.SourceAndDestinationBlobTypeDifferent);
+ }
+ }
+ }
+
+ protected override async Task FetchCopyStateAsync()
+ {
+ await this.destBlob.FetchAttributesAsync(
+ Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition),
+ Utils.GenerateBlobRequestOptions(this.TransferJob.Destination.BlobRequestOptions),
+ Utils.GenerateOperationContext(this.TransferContext),
+ this.CancellationToken);
+
+ return this.destBlob.CopyState;
+ }
+ }
+}
diff --git a/lib/TransferControllers/AsyncCopyControllers/FileAsyncCopyController.cs b/lib/TransferControllers/AsyncCopyControllers/FileAsyncCopyController.cs
new file mode 100644
index 00000000..e4f31628
--- /dev/null
+++ b/lib/TransferControllers/AsyncCopyControllers/FileAsyncCopyController.cs
@@ -0,0 +1,125 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers
+{
+ using System;
+ using System.Globalization;
+ using System.Threading;
+ using System.Threading.Tasks;
+ using Microsoft.WindowsAzure.Storage;
+ using Microsoft.WindowsAzure.Storage.Blob;
+ using Microsoft.WindowsAzure.Storage.DataMovement;
+ using Microsoft.WindowsAzure.Storage.File;
+
+ ///
+ /// Azure file asynchronous copy.
+ ///
+ internal class FileAsyncCopyController : AsyncCopyController
+ {
+ private CloudFile destFile;
+
+ public FileAsyncCopyController(
+ TransferScheduler transferScheduler,
+ TransferJob transferJob,
+ CancellationToken cancellationToken)
+ : base(transferScheduler, transferJob, cancellationToken)
+ {
+ if (null == transferJob.Destination.AzureFile)
+ {
+ throw new ArgumentException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ParameterCannotBeNullException,
+ "Dest.AzureFile"),
+ "transferJob");
+ }
+
+ if ((null == transferJob.Source.SourceUri && null == transferJob.Source.Blob && null == transferJob.Source.AzureFile)
+ || (null != transferJob.Source.SourceUri && null != transferJob.Source.Blob)
+ || (null != transferJob.Source.Blob && null != transferJob.Source.AzureFile)
+ || (null != transferJob.Source.SourceUri && null != transferJob.Source.AzureFile))
+ {
+ throw new ArgumentException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.ProvideExactlyOneOfThreeParameters,
+ "Source.SourceUri",
+ "Source.Blob",
+ "Source.AzureFile"),
+ "transferJob");
+ }
+
+ this.destFile = this.TransferJob.Destination.AzureFile;
+ }
+
+ protected override Uri DestUri
+ {
+ get
+ {
+ return this.destFile.Uri;
+ }
+ }
+
+ protected override Task DoFetchDestAttributesAsync()
+ {
+ return this.destFile.FetchAttributesAsync(
+ null,
+ Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions),
+ null,
+ this.CancellationToken);
+ }
+
+ protected override Task DoStartCopyAsync()
+ {
+ OperationContext operationContext = Utils.GenerateOperationContext(this.TransferContext);
+ if (null != this.SourceUri)
+ {
+ return this.destFile.StartCopyAsync(
+ this.SourceUri,
+ null,
+ null,
+ Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions),
+ operationContext,
+ this.CancellationToken);
+ }
+ else if (null != this.SourceBlob)
+ {
+ return this.destFile.StartCopyAsync(
+ this.SourceBlob.GenerateCopySourceBlob(),
+ null,
+ null,
+ Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions),
+ operationContext,
+ this.CancellationToken);
+ }
+ else
+ {
+ return this.destFile.StartCopyAsync(
+ this.SourceFile.GenerateCopySourceFile(),
+ null,
+ null,
+ Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions),
+ operationContext,
+ this.CancellationToken);
+ }
+ }
+
+ protected override void DoHandleGetDestinationException(StorageException se)
+ {
+ }
+
+ protected override async Task FetchCopyStateAsync()
+ {
+ await this.destFile.FetchAttributesAsync(
+ Utils.GenerateConditionWithCustomerCondition(this.TransferJob.Destination.AccessCondition),
+ Utils.GenerateFileRequestOptions(this.TransferJob.Destination.FileRequestOptions),
+ Utils.GenerateOperationContext(this.TransferContext),
+ this.CancellationToken);
+
+ return this.destFile.CopyState;
+ }
+ }
+}
diff --git a/lib/TransferControllers/ITransferController.cs b/lib/TransferControllers/ITransferController.cs
new file mode 100644
index 00000000..12f94fbf
--- /dev/null
+++ b/lib/TransferControllers/ITransferController.cs
@@ -0,0 +1,27 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers
+{
+ using System;
+ using System.Threading.Tasks;
+
+ internal interface ITransferController
+ {
+ bool HasWork
+ {
+ get;
+ }
+
+ bool IsFinished
+ {
+ get;
+ }
+
+ Task DoWorkAsync();
+
+ void CancelWork();
+ }
+}
diff --git a/lib/TransferControllers/SyncTransferController.cs b/lib/TransferControllers/SyncTransferController.cs
new file mode 100644
index 00000000..a6c7bfdf
--- /dev/null
+++ b/lib/TransferControllers/SyncTransferController.cs
@@ -0,0 +1,201 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers
+{
+ using System;
+ using System.Collections.Concurrent;
+ using System.Globalization;
+ using System.Threading;
+ using System.Threading.Tasks;
+ using Microsoft.WindowsAzure.Storage.Blob;
+
+ internal class SyncTransferController : TransferControllerBase
+ {
+ private TransferReaderWriterBase reader;
+ private TransferReaderWriterBase writer;
+
+ public SyncTransferController(
+ TransferScheduler transferScheduler,
+ TransferJob transferJob,
+ CancellationToken userCancellationToken)
+ : base(transferScheduler, transferJob, userCancellationToken)
+ {
+ if (null == transferScheduler)
+ {
+ throw new ArgumentNullException("transferScheduler");
+ }
+
+ if (null == transferJob)
+ {
+ throw new ArgumentNullException("transferJob");
+ }
+
+ this.SharedTransferData = new SharedTransferData()
+ {
+ TransferJob = this.TransferJob,
+ AvailableData = new ConcurrentDictionary(),
+ };
+
+ if (null == transferJob.CheckPoint)
+ {
+ transferJob.CheckPoint = new SingleObjectCheckpoint();
+ }
+
+ reader = this.GetReader(transferJob.Source);
+ writer = this.GetWriter(transferJob.Destination);
+ }
+
+ public SharedTransferData SharedTransferData
+ {
+ get;
+ private set;
+ }
+
+ public bool ErrorOccurred
+ {
+ get;
+ private set;
+ }
+
+ public override bool HasWork
+ {
+ get
+ {
+ var hasWork = (!this.reader.PreProcessed && this.reader.HasWork) || (this.reader.PreProcessed && this.writer.HasWork) || (this.writer.PreProcessed && this.reader.HasWork);
+ return !this.ErrorOccurred && hasWork;
+ }
+ }
+
+ protected override async Task DoWorkInternalAsync()
+ {
+ if (!this.reader.PreProcessed && this.reader.HasWork)
+ {
+ await this.reader.DoWorkInternalAsync();
+ }
+ else if (this.reader.PreProcessed && this.writer.HasWork)
+ {
+ await this.writer.DoWorkInternalAsync();
+ }
+ else if (this.writer.PreProcessed && this.reader.HasWork)
+ {
+ await this.reader.DoWorkInternalAsync();
+ }
+
+ return this.ErrorOccurred || this.writer.IsFinished;
+ }
+
+ protected override void SetErrorState(Exception ex)
+ {
+ this.ErrorOccurred = true;
+ }
+
+ private TransferReaderWriterBase GetReader(TransferLocation sourceLocation)
+ {
+ switch (sourceLocation.TransferLocationType)
+ {
+ case TransferLocationType.Stream:
+ return new StreamedReader(this.Scheduler, this, this.CancellationToken);
+ case TransferLocationType.FilePath:
+ return new StreamedReader(this.Scheduler, this, this.CancellationToken);
+ case TransferLocationType.AzureBlob:
+ if (sourceLocation.Blob is CloudPageBlob)
+ {
+ return new PageBlobReader(this.Scheduler, this, this.CancellationToken);
+ }
+ else if (sourceLocation.Blob is CloudBlockBlob)
+ {
+ return new BlockBasedBlobReader(this.Scheduler, this, this.CancellationToken);
+ }
+ else if (sourceLocation.Blob is CloudAppendBlob)
+ {
+ return new BlockBasedBlobReader(this.Scheduler, this, this.CancellationToken);
+ }
+ else
+ {
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.UnsupportedBlobTypeException,
+ sourceLocation.Blob.BlobType));
+ }
+ case TransferLocationType.AzureFile:
+ return new CloudFileReader(this.Scheduler, this, this.CancellationToken);
+ default:
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.UnsupportedTransferLocationException,
+ sourceLocation.TransferLocationType));
+ }
+ }
+
+ private TransferReaderWriterBase GetWriter(TransferLocation destLocation)
+ {
+ switch (destLocation.TransferLocationType)
+ {
+ case TransferLocationType.Stream:
+ return new StreamedWriter(this.Scheduler, this, this.CancellationToken);
+ case TransferLocationType.FilePath:
+ return new StreamedWriter(this.Scheduler, this, this.CancellationToken);
+ case TransferLocationType.AzureBlob:
+ if (destLocation.Blob is CloudPageBlob)
+ {
+ return new PageBlobWriter(this.Scheduler, this, this.CancellationToken);
+ }
+ else if (destLocation.Blob is CloudBlockBlob)
+ {
+ return new BlockBlobWriter(this.Scheduler, this, this.CancellationToken);
+ }
+ else if (destLocation.Blob is CloudAppendBlob)
+ {
+ return new AppendBlobWriter(this.Scheduler, this, this.CancellationToken);
+ }
+ else
+ {
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.UnsupportedBlobTypeException,
+ destLocation.Blob.BlobType));
+ }
+ case TransferLocationType.AzureFile:
+ return new CloudFileWriter(this.Scheduler, this, this.CancellationToken);
+ default:
+ throw new InvalidOperationException(
+ string.Format(
+ CultureInfo.CurrentCulture,
+ Resources.UnsupportedTransferLocationException,
+ destLocation.TransferLocationType));
+ }
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ base.Dispose(disposing);
+
+ if (disposing)
+ {
+ if (null != this.reader)
+ {
+ this.reader.Dispose();
+ }
+
+ if (null != this.writer)
+ {
+ this.writer.Dispose();
+ }
+
+ foreach(var transferData in this.SharedTransferData.AvailableData.Values)
+ {
+ transferData.Dispose();
+ }
+
+ this.SharedTransferData.AvailableData.Clear();
+ }
+ }
+ }
+}
diff --git a/lib/TransferControllers/TransferControllerBase.cs b/lib/TransferControllers/TransferControllerBase.cs
new file mode 100644
index 00000000..fe8ee5f0
--- /dev/null
+++ b/lib/TransferControllers/TransferControllerBase.cs
@@ -0,0 +1,336 @@
+//------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft Corporation
+//
+//------------------------------------------------------------------------------
+
+namespace Microsoft.WindowsAzure.Storage.DataMovement.TransferControllers
+{
+ using System;
+ using System.Globalization;
+ using System.Threading;
+ using System.Threading.Tasks;
+ using Microsoft.WindowsAzure.Storage.DataMovement;
+
+ internal abstract class TransferControllerBase : ITransferController, IDisposable
+ {
+ ///
+ /// Count of active tasks in this controller.
+ ///
+ private int activeTasks;
+
+ private volatile bool isFinished = false;
+
+ private object lockOnFinished = new object();
+
+ private int notifiedFinish;
+
+ private CancellationTokenSource cancellationTokenSource = new CancellationTokenSource();
+
+ private CancellationTokenRegistration transferSchedulerCancellationTokenRegistration;
+
+ private CancellationTokenRegistration userCancellationTokenRegistration;
+
+ protected TransferControllerBase(TransferScheduler transferScheduler, TransferJob transferJob, CancellationToken userCancellationToken)
+ {
+ if (null == transferScheduler)
+ {
+ throw new ArgumentNullException("transferScheduler");
+ }
+
+ if (null == transferJob)
+ {
+ throw new ArgumentNullException("transferJob");
+ }
+
+ this.Scheduler = transferScheduler;
+ this.TransferJob = transferJob;
+
+ this.transferSchedulerCancellationTokenRegistration =
+ this.Scheduler.CancellationTokenSource.Token.Register(this.CancelWork);
+
+ this.userCancellationTokenRegistration = userCancellationToken.Register(this.CancelWork);
+ this.TaskCompletionSource = new TaskCompletionSource