@@ -137,6 +137,30 @@ def list_share_files(cmd, client, directory_name=None, timeout=None, exclude_dir
137
137
return results
138
138
139
139
140
+ def storage_file_upload (client , local_file_path , content_settings = None ,
141
+ metadata = None , validate_content = False , progress_callback = None , max_connections = 2 , timeout = None ):
142
+ upload_args = {
143
+ 'content_settings' : content_settings ,
144
+ 'metadata' : metadata ,
145
+ 'validate_content' : validate_content ,
146
+ 'max_concurrency' : max_connections ,
147
+ 'timeout' : timeout
148
+ }
149
+ if progress_callback :
150
+ upload_args ['raw_response_hook' ] = progress_callback
151
+ # Because the contents of the uploaded file may be too large, it should be passed into the a stream object,
152
+ # upload_file() read file data in batches to avoid OOM problems
153
+ count = os .path .getsize (local_file_path )
154
+ with open (local_file_path , 'rb' ) as stream :
155
+ response = client .upload_file (data = stream , length = count , ** upload_args )
156
+
157
+ if 'content_md5' in response :
158
+ if isinstance (response ['content_md5' ], bytearray ):
159
+ response ['content_md5' ] = '' .join (hex (x ) for x in response ['content_md5' ])
160
+
161
+ return response
162
+
163
+
140
164
def storage_file_upload_batch (cmd , client , destination , source , destination_path = None , pattern = None , dryrun = False ,
141
165
validate_content = False , content_settings = None , max_connections = 1 , metadata = None ,
142
166
progress_callback = None ):
@@ -145,37 +169,41 @@ def storage_file_upload_batch(cmd, client, destination, source, destination_path
145
169
from azure .cli .command_modules .storage .util import glob_files_locally , normalize_blob_file_path
146
170
147
171
source_files = list (glob_files_locally (source , pattern ))
148
- settings_class = cmd .get_models ('file.models #ContentSettings' )
172
+ settings_class = cmd .get_models ('_models #ContentSettings' )
149
173
150
174
if dryrun :
151
175
logger .info ('upload files to file share' )
152
176
logger .info (' account %s' , client .account_name )
153
177
logger .info (' share %s' , destination )
154
178
logger .info (' total %d' , len (source_files ))
155
- return [{'File' : client .make_file_url (destination , os .path .dirname (dst ) or None , os .path .basename (dst )),
179
+ dst = None
180
+ kwargs = {
181
+ 'dir_name' : os .path .dirname (dst ),
182
+ 'file_name' : os .path .basename (dst )
183
+ }
184
+
185
+ return [{'File' : create_file_url (client , ** kwargs ),
156
186
'Type' : guess_content_type (src , content_settings , settings_class ).content_type } for src , dst in
157
187
source_files ]
158
188
159
189
# TODO: Performance improvement
160
190
# 1. Upload files in parallel
161
- def _upload_action (src , dst ):
162
- dst = normalize_blob_file_path (destination_path , dst )
163
- dir_name = os .path .dirname (dst )
164
- file_name = os .path .basename (dst )
191
+ def _upload_action (src , dst2 ):
192
+ dst2 = normalize_blob_file_path (destination_path , dst2 )
193
+ dir_name = os .path .dirname (dst2 )
194
+ file_name = os .path .basename (dst2 )
165
195
166
- _make_directory_in_files_share (client , destination , dir_name )
167
- create_file_args = {'share_name' : destination , 'directory_name' : dir_name , 'file_name' : file_name ,
168
- 'local_file_path' : src , 'progress_callback' : progress_callback ,
169
- 'content_settings' : guess_content_type (src , content_settings , settings_class ),
170
- 'metadata' : metadata , 'max_connections' : max_connections }
171
-
172
- if cmd .supported_api_version (min_api = '2016-05-31' ):
173
- create_file_args ['validate_content' ] = validate_content
196
+ _make_directory_in_files_share (client , destination , dir_name , V2 = True )
174
197
175
198
logger .warning ('uploading %s' , src )
176
- client .create_file_from_path (** create_file_args )
199
+ storage_file_upload (client .get_file_client (dst2 ), src , content_settings , metadata , validate_content ,
200
+ progress_callback , max_connections )
177
201
178
- return client .make_file_url (destination , dir_name , file_name )
202
+ args = {
203
+ 'dir_name' : dir_name ,
204
+ 'file_name' : file_name
205
+ }
206
+ return create_file_url (client , ** args )
179
207
180
208
return list (_upload_action (src , dst ) for src , dst in source_files )
181
209
@@ -384,7 +412,6 @@ def _create_file_and_directory_from_file(cmd, file_service, source_file_service,
384
412
def _make_directory_in_files_share (file_service , file_share , directory_path , existing_dirs = None , V2 = False ):
385
413
"""
386
414
Create directories recursively.
387
-
388
415
This method accept a existing_dirs set which serves as the cache of existing directory. If the
389
416
parameter is given, the method will search the set first to avoid repeatedly create directory
390
417
which already exists.
0 commit comments