1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=too-few-public-methods, too-many-instance-attributes
# pylint: disable=super-init-not-called, too-many-lines
from enum import Enum
from typing import Any, Callable, Dict, List, Optional, Union, TYPE_CHECKING
from azure.core import CaseInsensitiveEnumMeta
from azure.core.paging import PageIterator
from azure.core.exceptions import HttpResponseError
from ._shared import decode_base64_to_bytes
from ._shared.response_handlers import return_context_and_deserialized, process_storage_error
from ._shared.models import DictMixin, get_enum_value
from ._generated.models import AccessPolicy as GenAccessPolicy
from ._generated.models import ArrowField
from ._generated.models import CorsRule as GeneratedCorsRule
from ._generated.models import Logging as GeneratedLogging
from ._generated.models import Metrics as GeneratedMetrics
from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy
from ._generated.models import StaticWebsite as GeneratedStaticWebsite
if TYPE_CHECKING:
from datetime import datetime
from ._generated.models import PageList
# Parse a generated PageList into a single list of PageRange sorted by start.
def parse_page_list(page_list: "PageList") -> List["PageRange"]:
page_ranges = page_list.page_range
clear_ranges = page_list.clear_range
if page_ranges is None:
raise ValueError("PageList's 'page_range' is malformed or None.")
if clear_ranges is None:
raise ValueError("PageList's 'clear_ranges' is malformed or None.")
ranges = []
p_i, c_i = 0, 0
# Combine page ranges and clear ranges into single list, sorted by start
while p_i < len(page_ranges) and c_i < len(clear_ranges):
p, c = page_ranges[p_i], clear_ranges[c_i]
if p.start < c.start:
ranges.append(
PageRange(start=p.start, end=p.end, cleared=False)
)
p_i += 1
else:
ranges.append(
PageRange(start=c.start, end=c.end, cleared=True)
)
c_i += 1
# Grab remaining elements in either list
ranges += [PageRange(start=r.start, end=r.end, cleared=False) for r in page_ranges[p_i:]]
ranges += [PageRange(start=r.start, end=r.end, cleared=True) for r in clear_ranges[c_i:]]
return ranges
class BlobType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
BLOCKBLOB = "BlockBlob"
PAGEBLOB = "PageBlob"
APPENDBLOB = "AppendBlob"
class BlockState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Block blob block types."""
COMMITTED = 'Committed' #: Committed blocks.
LATEST = 'Latest' #: Latest blocks.
UNCOMMITTED = 'Uncommitted' #: Uncommitted blocks.
class StandardBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""
Specifies the blob tier to set the blob to. This is only applicable for
block blobs on standard storage accounts.
"""
ARCHIVE = 'Archive' #: Archive
COOL = 'Cool' #: Cool
COLD = 'Cold' #: Cold
HOT = 'Hot' #: Hot
class PremiumPageBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""
Specifies the page blob tier to set the blob to. This is only applicable to page
blobs on premium storage accounts. Please take a look at:
https://learn.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets
for detailed information on the corresponding IOPS and throughput per PageBlobTier.
"""
P4 = 'P4' #: P4 Tier
P6 = 'P6' #: P6 Tier
P10 = 'P10' #: P10 Tier
P15 = 'P15' #: P15 Tier
P20 = 'P20' #: P20 Tier
P30 = 'P30' #: P30 Tier
P40 = 'P40' #: P40 Tier
P50 = 'P50' #: P50 Tier
P60 = 'P60' #: P60 Tier
class QuickQueryDialect(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Specifies the quick query input/output dialect."""
DELIMITEDTEXT = 'DelimitedTextDialect'
DELIMITEDJSON = 'DelimitedJsonDialect'
PARQUET = 'ParquetDialect'
class SequenceNumberAction(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Sequence number actions."""
INCREMENT = 'increment'
"""
Increments the value of the sequence number by 1. If specifying this option,
do not include the x-ms-blob-sequence-number header.
"""
MAX = 'max'
"""
Sets the sequence number to be the higher of the value included with the
request and the value currently stored for the blob.
"""
UPDATE = 'update'
"""Sets the sequence number to the value included with the request."""
class PublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""
Specifies whether data in the container may be accessed publicly and the level of access.
"""
OFF = 'off'
"""
Specifies that there is no public read access for both the container and blobs within the container.
Clients cannot enumerate the containers within the storage account as well as the blobs within the container.
"""
BLOB = 'blob'
"""
Specifies public read access for blobs. Blob data within this container can be read
via anonymous request, but container data is not available. Clients cannot enumerate
blobs within the container via anonymous request.
"""
CONTAINER = 'container'
"""
Specifies full public read access for container and blob data. Clients can enumerate
blobs within the container via anonymous request, but cannot enumerate containers
within the storage account.
"""
class BlobImmutabilityPolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""
Specifies the immutability policy mode to set on the blob.
"Mutable" can only be returned by service, don't set to "Mutable".
"""
UNLOCKED = "Unlocked"
LOCKED = "Locked"
MUTABLE = "Mutable"
class RetentionPolicy(GeneratedRetentionPolicy):
"""The retention policy which determines how long the associated data should
persist.
:param bool enabled:
Indicates whether a retention policy is enabled for the storage service.
The default value is False.
:param Optional[int] days:
Indicates the number of days that metrics or logging or
soft-deleted data should be retained. All data older than this value will
be deleted. If enabled=True, the number of days must be specified.
"""
enabled: bool = False
days: Optional[int] = None
def __init__(self, enabled: bool = False, days: Optional[int] = None) -> None:
super(RetentionPolicy, self).__init__(enabled=enabled, days=days, allow_permanent_delete=None)
if self.enabled and (self.days is None):
raise ValueError("If policy is enabled, 'days' must be specified.")
@classmethod
def _from_generated(cls, generated):
if not generated:
return cls()
return cls(
enabled=generated.enabled,
days=generated.days,
)
class BlobAnalyticsLogging(GeneratedLogging):
"""Azure Analytics Logging settings.
:keyword str version:
The version of Storage Analytics to configure. The default value is 1.0.
:keyword bool delete:
Indicates whether all delete requests should be logged. The default value is `False`.
:keyword bool read:
Indicates whether all read requests should be logged. The default value is `False`.
:keyword bool write:
Indicates whether all write requests should be logged. The default value is `False`.
:keyword ~azure.storage.blob.RetentionPolicy retention_policy:
Determines how long the associated data should persist. If not specified the retention
policy will be disabled by default.
"""
version: str = '1.0'
"""The version of Storage Analytics to configure."""
delete: bool = False
"""Indicates whether all delete requests should be logged."""
read: bool = False
"""Indicates whether all read requests should be logged."""
write: bool = False
"""Indicates whether all write requests should be logged."""
retention_policy: RetentionPolicy = RetentionPolicy()
"""Determines how long the associated data should persist."""
def __init__(self, **kwargs: Any) -> None:
self.version = kwargs.get('version', '1.0')
self.delete = kwargs.get('delete', False)
self.read = kwargs.get('read', False)
self.write = kwargs.get('write', False)
self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy()
@classmethod
def _from_generated(cls, generated):
if not generated:
return cls()
return cls(
version=generated.version,
delete=generated.delete,
read=generated.read,
write=generated.write,
retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access
)
class Metrics(GeneratedMetrics):
"""A summary of request statistics grouped by API in hour or minute aggregates
for blobs.
:keyword str version:
The version of Storage Analytics to configure. The default value is 1.0.
:keyword bool enabled:
Indicates whether metrics are enabled for the Blob service.
The default value is `False`.
:keyword bool include_apis:
Indicates whether metrics should generate summary statistics for called API operations.
:keyword ~azure.storage.blob.RetentionPolicy retention_policy:
Determines how long the associated data should persist. If not specified the retention
policy will be disabled by default.
"""
version: str = '1.0'
"""The version of Storage Analytics to configure."""
enabled: bool = False
"""Indicates whether metrics are enabled for the Blob service."""
include_apis: Optional[bool]
"""Indicates whether metrics should generate summary statistics for called API operations."""
retention_policy: RetentionPolicy = RetentionPolicy()
"""Determines how long the associated data should persist."""
def __init__(self, **kwargs: Any) -> None:
self.version = kwargs.get('version', '1.0')
self.enabled = kwargs.get('enabled', False)
self.include_apis = kwargs.get('include_apis')
self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy()
@classmethod
def _from_generated(cls, generated):
if not generated:
return cls()
return cls(
version=generated.version,
enabled=generated.enabled,
include_apis=generated.include_apis,
retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access
)
class StaticWebsite(GeneratedStaticWebsite):
"""The properties that enable an account to host a static website.
:keyword bool enabled:
Indicates whether this account is hosting a static website.
The default value is `False`.
:keyword str index_document:
The default name of the index page under each directory.
:keyword str error_document404_path:
The absolute path of the custom 404 page.
:keyword str default_index_document_path:
Absolute path of the default index page.
"""
enabled: bool = False
"""Indicates whether this account is hosting a static website."""
index_document: Optional[str]
"""The default name of the index page under each directory."""
error_document404_path: Optional[str]
"""The absolute path of the custom 404 page."""
default_index_document_path: Optional[str]
"""Absolute path of the default index page."""
def __init__(self, **kwargs: Any) -> None:
self.enabled = kwargs.get('enabled', False)
if self.enabled:
self.index_document = kwargs.get('index_document')
self.error_document404_path = kwargs.get('error_document404_path')
self.default_index_document_path = kwargs.get('default_index_document_path')
else:
self.index_document = None
self.error_document404_path = None
self.default_index_document_path = None
@classmethod
def _from_generated(cls, generated):
if not generated:
return cls()
return cls(
enabled=generated.enabled,
index_document=generated.index_document,
error_document404_path=generated.error_document404_path,
default_index_document_path=generated.default_index_document_path
)
class CorsRule(GeneratedCorsRule):
"""CORS is an HTTP feature that enables a web application running under one
domain to access resources in another domain. Web browsers implement a
security restriction known as same-origin policy that prevents a web page
from calling APIs in a different domain; CORS provides a secure way to
allow one domain (the origin domain) to call APIs in another domain.
:param list(str) allowed_origins:
A list of origin domains that will be allowed via CORS, or "*" to allow
all domains. The list of must contain at least one entry. Limited to 64
origin domains. Each allowed origin can have up to 256 characters.
:param list(str) allowed_methods:
A list of HTTP methods that are allowed to be executed by the origin.
The list of must contain at least one entry. For Azure Storage,
permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT.
:keyword list(str) allowed_headers:
Defaults to an empty list. A list of headers allowed to be part of
the cross-origin request. Limited to 64 defined headers and 2 prefixed
headers. Each header can be up to 256 characters.
:keyword list(str) exposed_headers:
Defaults to an empty list. A list of response headers to expose to CORS
clients. Limited to 64 defined headers and two prefixed headers. Each
header can be up to 256 characters.
:keyword int max_age_in_seconds:
The number of seconds that the client/browser should cache a
preflight response.
"""
allowed_origins: str
"""The comma-delimited string representation of the list of origin domains that will be allowed via
CORS, or "*" to allow all domains."""
allowed_methods: str
"""The comma-delimited string representation of the list HTTP methods that are allowed to be executed
by the origin."""
exposed_headers: str
"""The comma-delimited string representation of the list of response headers to expose to CORS clients."""
allowed_headers: str
"""The comma-delimited string representation of the list of headers allowed to be part of the cross-origin
request."""
max_age_in_seconds: int
"""The number of seconds that the client/browser should cache a pre-flight response."""
def __init__(self, allowed_origins: List[str], allowed_methods: List[str], **kwargs: Any) -> None:
self.allowed_origins = ','.join(allowed_origins)
self.allowed_methods = ','.join(allowed_methods)
self.allowed_headers = ','.join(kwargs.get('allowed_headers', []))
self.exposed_headers = ','.join(kwargs.get('exposed_headers', []))
self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0)
@staticmethod
def _to_generated(rules: Optional[List["CorsRule"]]) -> Optional[List[GeneratedCorsRule]]:
if rules is None:
return rules
generated_cors_list = []
for cors_rule in rules:
generated_cors = GeneratedCorsRule(
allowed_origins=cors_rule.allowed_origins,
allowed_methods=cors_rule.allowed_methods,
allowed_headers=cors_rule.allowed_headers,
exposed_headers=cors_rule.exposed_headers,
max_age_in_seconds=cors_rule.max_age_in_seconds
)
generated_cors_list.append(generated_cors)
return generated_cors_list
@classmethod
def _from_generated(cls, generated):
return cls(
[generated.allowed_origins],
[generated.allowed_methods],
allowed_headers=[generated.allowed_headers],
exposed_headers=[generated.exposed_headers],
max_age_in_seconds=generated.max_age_in_seconds,
)
class ContainerProperties(DictMixin):
"""Blob container's properties class.
Returned ``ContainerProperties`` instances expose these values through a
dictionary interface, for example: ``container_props["last_modified"]``.
Additionally, the container name is available as ``container_props["name"]``."""
name: str
"""Name of the container."""
last_modified: "datetime"
"""A datetime object representing the last time the container was modified."""
etag: str
"""The ETag contains a value that you can use to perform operations conditionally."""
lease: "LeaseProperties"
"""Stores all the lease information for the container."""
public_access: Optional[str]
"""Specifies whether data in the container may be accessed publicly and the level of access."""
has_immutability_policy: bool
"""Represents whether the container has an immutability policy."""
has_legal_hold: bool
"""Represents whether the container has a legal hold."""
immutable_storage_with_versioning_enabled: bool
"""Represents whether immutable storage with versioning enabled on the container."""
metadata: Dict[str, Any]
"""A dict with name-value pairs to associate with the container as metadata."""
encryption_scope: Optional["ContainerEncryptionScope"]
"""The default encryption scope configuration for the container."""
deleted: Optional[bool]
"""Whether this container was deleted."""
version: Optional[str]
"""The version of a deleted container."""
def __init__(self, **kwargs: Any) -> None:
self.name = None # type: ignore [assignment]
self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment]
self.etag = kwargs.get('ETag') # type: ignore [assignment]
self.lease = LeaseProperties(**kwargs)
self.public_access = kwargs.get('x-ms-blob-public-access')
self.has_immutability_policy = kwargs.get('x-ms-has-immutability-policy') # type: ignore [assignment]
self.deleted = None
self.version = None
self.has_legal_hold = kwargs.get('x-ms-has-legal-hold') # type: ignore [assignment]
self.metadata = kwargs.get('metadata') # type: ignore [assignment]
self.encryption_scope = None
self.immutable_storage_with_versioning_enabled = kwargs.get('x-ms-immutable-storage-with-versioning-enabled') # type: ignore [assignment] # pylint: disable=name-too-long
default_encryption_scope = kwargs.get('x-ms-default-encryption-scope')
if default_encryption_scope:
self.encryption_scope = ContainerEncryptionScope(
default_encryption_scope=default_encryption_scope,
prevent_encryption_scope_override=kwargs.get('x-ms-deny-encryption-scope-override', False)
)
@classmethod
def _from_generated(cls, generated):
props = cls()
props.name = generated.name
props.last_modified = generated.properties.last_modified
props.etag = generated.properties.etag
props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access
props.public_access = generated.properties.public_access
props.has_immutability_policy = generated.properties.has_immutability_policy
props.immutable_storage_with_versioning_enabled = generated.properties.is_immutable_storage_with_versioning_enabled # pylint: disable=line-too-long, name-too-long
props.deleted = generated.deleted
props.version = generated.version
props.has_legal_hold = generated.properties.has_legal_hold
props.metadata = generated.metadata
props.encryption_scope = ContainerEncryptionScope._from_generated(generated) #pylint: disable=protected-access
return props
class ContainerPropertiesPaged(PageIterator):
"""An Iterable of Container properties.
:param Callable command: Function to retrieve the next page of items.
:param Optional[str] prefix: Filters the results to return only containers whose names
begin with the specified prefix.
:param Optional[int] results_per_page: The maximum number of container names to retrieve per call.
:param Optional[str] continuation_token: An opaque continuation token.
"""
service_endpoint: Optional[str]
"""The service URL."""
prefix: Optional[str]
"""A container name prefix being used to filter the list."""
marker: Optional[str]
"""The continuation token of the current page of results."""
results_per_page: Optional[int]
"""The maximum number of results retrieved per API call."""
continuation_token: Optional[str]
"""The continuation token to retrieve the next page of results."""
location_mode: Optional[str]
"""The location mode being used to list results."""
current_page: List["ContainerProperties"]
"""The current page of listed results."""
def __init__(
self, command: Callable,
prefix: Optional[str] = None,
results_per_page: Optional[int] = None,
continuation_token: Optional[str] = None
) -> None:
super(ContainerPropertiesPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.service_endpoint = None
self.prefix = prefix
self.marker = None
self.results_per_page = results_per_page
self.location_mode = None
self.current_page = []
def _get_next_cb(self, continuation_token):
try:
return self._command(
marker=continuation_token or None,
maxresults=self.results_per_page,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except HttpResponseError as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.service_endpoint = self._response.service_endpoint
self.prefix = self._response.prefix
self.marker = self._response.marker
self.results_per_page = self._response.max_results
self.current_page = [self._build_item(item) for item in self._response.container_items]
return self._response.next_marker or None, self.current_page
@staticmethod
def _build_item(item):
return ContainerProperties._from_generated(item) # pylint: disable=protected-access
class ImmutabilityPolicy(DictMixin):
"""Optional parameters for setting the immutability policy of a blob, blob snapshot or blob version.
.. versionadded:: 12.10.0
This was introduced in API version '2020-10-02'.
:keyword ~datetime.datetime expiry_time:
Specifies the date time when the blobs immutability policy is set to expire.
:keyword str or ~azure.storage.blob.BlobImmutabilityPolicyMode policy_mode:
Specifies the immutability policy mode to set on the blob.
Possible values to set include: "Locked", "Unlocked".
"Mutable" can only be returned by service, don't set to "Mutable".
"""
expiry_time: Optional["datetime"] = None
"""Specifies the date time when the blobs immutability policy is set to expire."""
policy_mode: Optional[str] = None
"""Specifies the immutability policy mode to set on the blob."""
def __init__(self, **kwargs: Any) -> None:
self.expiry_time = kwargs.pop('expiry_time', None)
self.policy_mode = kwargs.pop('policy_mode', None)
@classmethod
def _from_generated(cls, generated):
immutability_policy = cls()
immutability_policy.expiry_time = generated.properties.immutability_policy_expires_on
immutability_policy.policy_mode = generated.properties.immutability_policy_mode
return immutability_policy
class FilteredBlob(DictMixin):
"""Blob info from a Filter Blobs API call."""
name: str
"""Blob name"""
container_name: Optional[str]
"""Container name."""
tags: Optional[Dict[str, str]]
"""Key value pairs of blob tags."""
def __init__(self, **kwargs: Any) -> None:
self.name = kwargs.get('name', None)
self.container_name = kwargs.get('container_name', None)
self.tags = kwargs.get('tags', None)
class LeaseProperties(DictMixin):
"""Blob Lease Properties."""
status: str
"""The lease status of the blob. Possible values: locked|unlocked"""
state: str
"""Lease state of the blob. Possible values: available|leased|expired|breaking|broken"""
duration: Optional[str]
"""When a blob is leased, specifies whether the lease is of infinite or fixed duration."""
def __init__(self, **kwargs: Any) -> None:
self.status = get_enum_value(kwargs.get('x-ms-lease-status'))
self.state = get_enum_value(kwargs.get('x-ms-lease-state'))
self.duration = get_enum_value(kwargs.get('x-ms-lease-duration'))
@classmethod
def _from_generated(cls, generated):
lease = cls()
lease.status = get_enum_value(generated.properties.lease_status)
lease.state = get_enum_value(generated.properties.lease_state)
lease.duration = get_enum_value(generated.properties.lease_duration)
return lease
class ContentSettings(DictMixin):
"""The content settings of a blob.
:param Optional[str] content_type:
The content type specified for the blob. If no content type was
specified, the default content type is application/octet-stream.
:param Optional[str] content_encoding:
If the content_encoding has previously been set
for the blob, that value is stored.
:param Optional[str] content_language:
If the content_language has previously been set
for the blob, that value is stored.
:param Optional[str] content_disposition:
content_disposition conveys additional information about how to
process the response payload, and also can be used to attach
additional metadata. If content_disposition has previously been set
for the blob, that value is stored.
:param Optional[str] cache_control:
If the cache_control has previously been set for
the blob, that value is stored.
:param Optional[bytearray] content_md5:
If the content_md5 has been set for the blob, this response
header is stored so that the client can check for message content
integrity.
"""
content_type: Optional[str] = None
"""The content type specified for the blob."""
content_encoding: Optional[str] = None
"""The content encoding specified for the blob."""
content_language: Optional[str] = None
"""The content language specified for the blob."""
content_disposition: Optional[str] = None
"""The content disposition specified for the blob."""
cache_control: Optional[str] = None
"""The cache control specified for the blob."""
content_md5: Optional[bytearray] = None
"""The content md5 specified for the blob."""
def __init__(
self, content_type: Optional[str] = None,
content_encoding: Optional[str] = None,
content_language: Optional[str] = None,
content_disposition: Optional[str] = None,
cache_control: Optional[str] = None,
content_md5: Optional[bytearray] = None,
**kwargs: Any
) -> None:
self.content_type = content_type or kwargs.get('Content-Type')
self.content_encoding = content_encoding or kwargs.get('Content-Encoding')
self.content_language = content_language or kwargs.get('Content-Language')
self.content_md5 = content_md5 or kwargs.get('Content-MD5')
self.content_disposition = content_disposition or kwargs.get('Content-Disposition')
self.cache_control = cache_control or kwargs.get('Cache-Control')
@classmethod
def _from_generated(cls, generated):
settings = cls()
settings.content_type = generated.properties.content_type or None
settings.content_encoding = generated.properties.content_encoding or None
settings.content_language = generated.properties.content_language or None
settings.content_md5 = generated.properties.content_md5 or None
settings.content_disposition = generated.properties.content_disposition or None
settings.cache_control = generated.properties.cache_control or None
return settings
class CopyProperties(DictMixin):
"""Blob Copy Properties.
These properties will be `None` if this blob has never been the destination
in a Copy Blob operation, or if this blob has been modified after a concluded
Copy Blob operation, for example, using Set Blob Properties, Upload Blob, or Commit Block List.
"""
id: Optional[str]
"""String identifier for the last attempted Copy Blob operation where this blob
was the destination blob."""
source: Optional[str]
"""URL up to 2 KB in length that specifies the source blob used in the last attempted
Copy Blob operation where this blob was the destination blob."""
status: Optional[str]
"""State of the copy operation identified by Copy ID, with these values:
success: Copy completed successfully.
pending: Copy is in progress. Check copy_status_description if intermittent, non-fatal errors impede copy progress
but don't cause failure.
aborted: Copy was ended by Abort Copy Blob.
failed: Copy failed. See copy_status_description for failure details."""
progress: Optional[str]
"""Contains the number of bytes copied and the total bytes in the source in the last
attempted Copy Blob operation where this blob was the destination blob. Can show
between 0 and Content-Length bytes copied."""
completion_time: Optional["datetime"]
"""Conclusion time of the last attempted Copy Blob operation where this blob was the
destination blob. This value can specify the time of a completed, aborted, or
failed copy attempt."""
status_description: Optional[str]
"""Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal
or non-fatal copy operation failure."""
incremental_copy: Optional[bool]
"""Copies the snapshot of the source page blob to a destination page blob.
The snapshot is copied such that only the differential changes between
the previously copied snapshot are transferred to the destination."""
destination_snapshot: Optional["datetime"]
"""Included if the blob is incremental copy blob or incremental copy snapshot,
if x-ms-copy-status is success. Snapshot time of the last successful
incremental copy snapshot for this blob."""
def __init__(self, **kwargs: Any) -> None:
self.id = kwargs.get('x-ms-copy-id')
self.source = kwargs.get('x-ms-copy-source')
self.status = get_enum_value(kwargs.get('x-ms-copy-status'))
self.progress = kwargs.get('x-ms-copy-progress')
self.completion_time = kwargs.get('x-ms-copy-completion-time')
self.status_description = kwargs.get('x-ms-copy-status-description')
self.incremental_copy = kwargs.get('x-ms-incremental-copy')
self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot')
@classmethod
def _from_generated(cls, generated):
copy = cls()
copy.id = generated.properties.copy_id or None
copy.status = get_enum_value(generated.properties.copy_status) or None
copy.source = generated.properties.copy_source or None
copy.progress = generated.properties.copy_progress or None
copy.completion_time = generated.properties.copy_completion_time or None
copy.status_description = generated.properties.copy_status_description or None
copy.incremental_copy = generated.properties.incremental_copy or None
copy.destination_snapshot = generated.properties.destination_snapshot or None
return copy
class BlobBlock(DictMixin):
"""BlockBlob Block class.
:param str block_id:
Block id.
:param BlockState state:
Block state. Possible values: BlockState.COMMITTED | BlockState.UNCOMMITTED
"""
block_id: str
"""Block id."""
state: BlockState
"""Block state."""
size: int
"""Block size."""
def __init__(self, block_id: str, state: BlockState = BlockState.LATEST) -> None:
self.id = block_id
self.state = state
self.size = None # type: ignore [assignment]
@classmethod
def _from_generated(cls, generated):
try:
decoded_bytes = decode_base64_to_bytes(generated.name)
block_id = decoded_bytes.decode('utf-8')
# this is to fix a bug. When large blocks are uploaded through upload_blob the block id isn't base64 encoded
# while service expected block id is base64 encoded, so when we get block_id if we cannot base64 decode, it
# means we didn't base64 encode it when stage the block, we want to use the returned block_id directly.
except UnicodeDecodeError:
block_id = generated.name
block = cls(block_id)
block.size = generated.size
return block
class PageRange(DictMixin):
"""Page Range for page blob.
:param int start:
Start of page range in bytes.
:param int end:
End of page range in bytes.
"""
start: Optional[int] = None
"""Start of page range in bytes."""
end: Optional[int] = None
"""End of page range in bytes."""
cleared: bool
"""Whether the range has been cleared."""
def __init__(self, start: Optional[int] = None, end: Optional[int] = None, *, cleared: bool = False) -> None:
self.start = start
self.end = end
self.cleared = cleared
class PageRangePaged(PageIterator):
def __init__(self, command, results_per_page=None, continuation_token=None):
super(PageRangePaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.results_per_page = results_per_page
self.location_mode = None
self.current_page = []
def _get_next_cb(self, continuation_token):
try:
return self._command(
marker=continuation_token or None,
maxresults=self.results_per_page,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except HttpResponseError as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.current_page = self._build_page(self._response)
return self._response.next_marker or None, self.current_page
@staticmethod
def _build_page(response):
if not response:
raise StopIteration
return parse_page_list(response)
class ContainerSasPermissions(object):
"""ContainerSasPermissions class to be used with the
:func:`~azure.storage.blob.generate_container_sas` function and
for the AccessPolicies used with
:func:`~azure.storage.blob.ContainerClient.set_container_access_policy`.
:param bool read:
Read the content, properties, metadata or block list of any blob in the
container. Use any blob in the container as the source of a copy operation.
:param bool write:
For any blob in the container, create or write content, properties,
metadata, or block list. Snapshot or lease the blob. Resize the blob
(page blob only). Use the blob as the destination of a copy operation
within the same account. Note: You cannot grant permissions to read or
write container properties or metadata, nor to lease a container, with
a container SAS. Use an account SAS instead.
:param bool delete:
Delete any blob in the container. Note: You cannot grant permissions to
delete a container with a container SAS. Use an account SAS instead.
:param bool delete_previous_version:
Delete the previous blob version for the versioning enabled storage account.
:param bool list:
List blobs in the container.
:param bool tag:
Set or get tags on the blobs in the container.
:keyword bool add:
Add a block to an append blob.
:keyword bool create:
Write a new blob, snapshot a blob, or copy a blob to a new blob.
:keyword bool permanent_delete:
To enable permanent delete on the blob is permitted.
:keyword bool filter_by_tags:
To enable finding blobs by tags.
:keyword bool move:
Move a blob or a directory and its contents to a new location.
:keyword bool execute:
Get the system properties and, if the hierarchical namespace is enabled for the storage account,
get the POSIX ACL of a blob.
:keyword bool set_immutability_policy:
To enable operations related to set/delete immutability policy.
To get immutability policy, you just need read permission.
"""
read: bool = False
"""The read permission for container SAS."""
write: bool = False
"""The write permission for container SAS."""
delete: bool = False
"""The delete permission for container SAS."""
delete_previous_version: bool = False
"""Permission to delete previous blob version for versioning enabled
storage accounts."""
list: bool = False
"""The list permission for container SAS."""
tag: bool = False
"""Set or get tags on the blobs in the container."""
add: Optional[bool]
"""Add a block to an append blob."""
create: Optional[bool]
"""Write a new blob, snapshot a blob, or copy a blob to a new blob."""
permanent_delete: Optional[bool]
"""To enable permanent delete on the blob is permitted."""
move: Optional[bool]
"""Move a blob or a directory and its contents to a new location."""
execute: Optional[bool]
"""Get the system properties and, if the hierarchical namespace is enabled for the storage account,
get the POSIX ACL of a blob."""
set_immutability_policy: Optional[bool]
"""To get immutability policy, you just need read permission."""
def __init__(
self, read: bool = False,
write: bool = False,
delete: bool = False,
list: bool = False,
delete_previous_version: bool = False,
tag: bool = False,
**kwargs: Any
) -> None:
self.read = read
self.add = kwargs.pop('add', False)
self.create = kwargs.pop('create', False)
self.write = write
self.delete = delete
self.delete_previous_version = delete_previous_version
self.permanent_delete = kwargs.pop('permanent_delete', False)
self.list = list
self.tag = tag
self.filter_by_tags = kwargs.pop('filter_by_tags', False)
self.move = kwargs.pop('move', False)
self.execute = kwargs.pop('execute', False)
self.set_immutability_policy = kwargs.pop('set_immutability_policy', False)
self._str = (('r' if self.read else '') +
('a' if self.add else '') +
('c' if self.create else '') +
('w' if self.write else '') +
('d' if self.delete else '') +
('x' if self.delete_previous_version else '') +
('y' if self.permanent_delete else '') +
('l' if self.list else '') +
('t' if self.tag else '') +
('f' if self.filter_by_tags else '') +
('m' if self.move else '') +
('e' if self.execute else '') +
('i' if self.set_immutability_policy else ''))
def __str__(self):
return self._str
@classmethod
def from_string(cls, permission: str) -> "ContainerSasPermissions":
"""Create a ContainerSasPermissions from a string.
To specify read, write, delete, or list permissions you need only to
include the first letter of the word in the string. E.g. For read and
write permissions, you would provide a string "rw".
:param str permission: The string which dictates the read, write, delete,
and list permissions.
:return: A ContainerSasPermissions object
:rtype: ~azure.storage.blob.ContainerSasPermissions
"""
p_read = 'r' in permission
p_add = 'a' in permission
p_create = 'c' in permission
p_write = 'w' in permission
p_delete = 'd' in permission
p_delete_previous_version = 'x' in permission
p_permanent_delete = 'y' in permission
p_list = 'l' in permission
p_tag = 't' in permission
p_filter_by_tags = 'f' in permission
p_move = 'm' in permission
p_execute = 'e' in permission
p_set_immutability_policy = 'i' in permission
parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list,
delete_previous_version=p_delete_previous_version, tag=p_tag, add=p_add,
create=p_create, permanent_delete=p_permanent_delete, filter_by_tags=p_filter_by_tags,
move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy)
return parsed
class AccessPolicy(GenAccessPolicy):
"""Access Policy class used by the set and get access policy methods in each service.
A stored access policy can specify the start time, expiry time, and
permissions for the Shared Access Signatures with which it's associated.
Depending on how you want to control access to your resource, you can
specify all of these parameters within the stored access policy, and omit
them from the URL for the Shared Access Signature. Doing so permits you to
modify the associated signature's behavior at any time, as well as to revoke
it. Or you can specify one or more of the access policy parameters within
the stored access policy, and the others on the URL. Finally, you can
specify all of the parameters on the URL. In this case, you can use the
stored access policy to revoke the signature, but not to modify its behavior.
Together the Shared Access Signature and the stored access policy must
include all fields required to authenticate the signature. If any required
fields are missing, the request will fail. Likewise, if a field is specified
both in the Shared Access Signature URL and in the stored access policy, the
request will fail with status code 400 (Bad Request).
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: Optional[Union[ContainerSasPermissions, str]]
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:paramtype expiry: Optional[Union[str, datetime]]
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:paramtype start: Optional[Union[str, datetime]]
"""
permission: Optional[Union[ContainerSasPermissions, str]] # type: ignore [assignment]
"""The permissions associated with the shared access signature. The user is restricted to
operations allowed by the permissions."""
expiry: Optional[Union["datetime", str]] # type: ignore [assignment]
"""The time at which the shared access signature becomes invalid."""
start: Optional[Union["datetime", str]] # type: ignore [assignment]
"""The time at which the shared access signature becomes valid."""
def __init__(
self, permission: Optional[Union["ContainerSasPermissions", str]] = None,
expiry: Optional[Union[str, "datetime"]] = None,
start: Optional[Union[str, "datetime"]] = None
) -> None:
self.start = start
self.expiry = expiry
self.permission = permission
class BlobSasPermissions(object):
"""BlobSasPermissions class to be used with the
:func:`~azure.storage.blob.generate_blob_sas` function.
:param bool read:
Read the content, properties, metadata and block list. Use the blob as
the source of a copy operation.
:param bool add:
Add a block to an append blob.
:param bool create:
Write a new blob, snapshot a blob, or copy a blob to a new blob.
:param bool write:
Create or write content, properties, metadata, or block list. Snapshot
or lease the blob. Resize the blob (page blob only). Use the blob as the
destination of a copy operation within the same account.
:param bool delete:
Delete the blob.
:param bool delete_previous_version:
Delete the previous blob version for the versioning enabled storage account.
:param bool tag:
Set or get tags on the blob.
:keyword bool permanent_delete:
To enable permanent delete on the blob is permitted.
:keyword bool move:
Move a blob or a directory and its contents to a new location.
:keyword bool execute:
Get the system properties and, if the hierarchical namespace is enabled for the storage account,
get the POSIX ACL of a blob.
:keyword bool set_immutability_policy:
To enable operations related to set/delete immutability policy.
To get immutability policy, you just need read permission.
"""
read: bool = False
"""The read permission for Blob SAS."""
add: Optional[bool]
"""The add permission for Blob SAS."""
create: Optional[bool]
"""Write a new blob, snapshot a blob, or copy a blob to a new blob."""
write: bool = False
"""The write permission for Blob SAS."""
delete: bool = False
"""The delete permission for Blob SAS."""
delete_previous_version: bool = False
"""Permission to delete previous blob version for versioning enabled
storage accounts."""
tag: bool = False
"""Set or get tags on the blobs in the Blob."""
permanent_delete: Optional[bool]
"""To enable permanent delete on the blob is permitted."""
move: Optional[bool]
"""Move a blob or a directory and its contents to a new location."""
execute: Optional[bool]
"""Get the system properties and, if the hierarchical namespace is enabled for the storage account,
get the POSIX ACL of a blob."""
set_immutability_policy: Optional[bool]
"""To get immutability policy, you just need read permission."""
def __init__(
self, read: bool = False,
add: bool = False,
create: bool = False,
write: bool = False,
delete: bool = False,
delete_previous_version: bool = False,
tag: bool = False,
**kwargs: Any
) -> None:
self.read = read
self.add = add
self.create = create
self.write = write
self.delete = delete
self.delete_previous_version = delete_previous_version
self.permanent_delete = kwargs.pop('permanent_delete', False)
self.tag = tag
self.move = kwargs.pop('move', False)
self.execute = kwargs.pop('execute', False)
self.set_immutability_policy = kwargs.pop('set_immutability_policy', False)
self._str = (('r' if self.read else '') +
('a' if self.add else '') +
('c' if self.create else '') +
('w' if self.write else '') +
('d' if self.delete else '') +
('x' if self.delete_previous_version else '') +
('y' if self.permanent_delete else '') +
('t' if self.tag else '') +
('m' if self.move else '') +
('e' if self.execute else '') +
('i' if self.set_immutability_policy else ''))
def __str__(self):
return self._str
@classmethod
def from_string(cls, permission: str) -> "BlobSasPermissions":
"""Create a BlobSasPermissions from a string.
To specify read, add, create, write, or delete permissions you need only to
include the first letter of the word in the string. E.g. For read and
write permissions, you would provide a string "rw".
:param str permission: The string which dictates the read, add, create,
write, or delete permissions.
:return: A BlobSasPermissions object
:rtype: ~azure.storage.blob.BlobSasPermissions
"""
p_read = 'r' in permission
p_add = 'a' in permission
p_create = 'c' in permission
p_write = 'w' in permission
p_delete = 'd' in permission
p_delete_previous_version = 'x' in permission
p_permanent_delete = 'y' in permission
p_tag = 't' in permission
p_move = 'm' in permission
p_execute = 'e' in permission
p_set_immutability_policy = 'i' in permission
parsed = cls(read=p_read, add=p_add, create=p_create, write=p_write, delete=p_delete,
delete_previous_version=p_delete_previous_version, tag=p_tag, permanent_delete=p_permanent_delete,
move=p_move, execute=p_execute, set_immutability_policy=p_set_immutability_policy)
return parsed
class CustomerProvidedEncryptionKey(object):
"""
All data in Azure Storage is encrypted at-rest using an account-level encryption key.
In versions 2018-06-17 and newer, you can manage the key used to encrypt blob contents
and application metadata per-blob by providing an AES-256 encryption key in requests to the storage service.
When you use a customer-provided key, Azure Storage does not manage or persist your key.
When writing data to a blob, the provided key is used to encrypt your data before writing it to disk.
A SHA-256 hash of the encryption key is written alongside the blob contents,
and is used to verify that all subsequent operations against the blob use the same encryption key.
This hash cannot be used to retrieve the encryption key or decrypt the contents of the blob.
When reading a blob, the provided key is used to decrypt your data after reading it from disk.
In both cases, the provided encryption key is securely discarded
as soon as the encryption or decryption process completes.
:param str key_value:
Base64-encoded AES-256 encryption key value.
:param str key_hash:
Base64-encoded SHA256 of the encryption key.
"""
key_value: str
"""Base64-encoded AES-256 encryption key value."""
key_hash: str
"""Base64-encoded SHA256 of the encryption key."""
algorithm: str
"""Specifies the algorithm to use when encrypting data using the given key. Must be AES256."""
def __init__(self, key_value: str, key_hash: str) -> None:
self.key_value = key_value
self.key_hash = key_hash
self.algorithm = 'AES256'
class ContainerEncryptionScope(object):
"""The default encryption scope configuration for a container.
This scope is used implicitly for all future writes within the container,
but can be overridden per blob operation.
.. versionadded:: 12.2.0
:param str default_encryption_scope:
Specifies the default encryption scope to set on the container and use for
all future writes.
:param bool prevent_encryption_scope_override:
If true, prevents any request from specifying a different encryption scope than the scope
set on the container. Default value is false.
"""
default_encryption_scope: str
"""Specifies the default encryption scope to set on the container and use for
all future writes."""
prevent_encryption_scope_override: bool
"""If true, prevents any request from specifying a different encryption scope than the scope
set on the container."""
def __init__(self, default_encryption_scope: str, **kwargs: Any) -> None:
self.default_encryption_scope = default_encryption_scope
self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', False)
@classmethod
def _from_generated(cls, generated):
if generated.properties.default_encryption_scope:
scope = cls(
generated.properties.default_encryption_scope,
prevent_encryption_scope_override=generated.properties.prevent_encryption_scope_override or False
)
return scope
return None
class DelimitedJsonDialect(DictMixin):
"""Defines the input or output JSON serialization for a blob data query.
:keyword str delimiter: The line separator character, default value is '\\\\n'.
"""
def __init__(self, **kwargs: Any) -> None:
self.delimiter = kwargs.pop('delimiter', '\n')
class DelimitedTextDialect(DictMixin):
"""Defines the input or output delimited (CSV) serialization for a blob query request.
:keyword str delimiter:
Column separator, defaults to ','.
:keyword str quotechar:
Field quote, defaults to '"'.
:keyword str lineterminator:
Record separator, defaults to '\\\\n'.
:keyword str escapechar:
Escape char, defaults to empty.
:keyword bool has_header:
Whether the blob data includes headers in the first line. The default value is False, meaning that the
data will be returned inclusive of the first line. If set to True, the data will be returned exclusive
of the first line.
"""
def __init__(self, **kwargs: Any) -> None:
self.delimiter = kwargs.pop('delimiter', ',')
self.quotechar = kwargs.pop('quotechar', '"')
self.lineterminator = kwargs.pop('lineterminator', '\n')
self.escapechar = kwargs.pop('escapechar', "")
self.has_header = kwargs.pop('has_header', False)
class ArrowDialect(ArrowField):
"""field of an arrow schema.
All required parameters must be populated in order to send to Azure.
:param ~azure.storage.blob.ArrowType type: Arrow field type.
:keyword str name: The name of the field.
:keyword int precision: The precision of the field.
:keyword int scale: The scale of the field.
"""
def __init__(self, type, **kwargs: Any) -> None: # pylint: disable=redefined-builtin
super(ArrowDialect, self).__init__(type=type, **kwargs)
class ArrowType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
INT64 = "int64"
BOOL = "bool"
TIMESTAMP_MS = "timestamp[ms]"
STRING = "string"
DOUBLE = "double"
DECIMAL = 'decimal'
class ObjectReplicationRule(DictMixin):
"""Policy id and rule ids applied to a blob."""
rule_id: str
"""Rule id."""
status: str
"""The status of the rule. It could be "Complete" or "Failed" """
def __init__(self, **kwargs: Any) -> None:
self.rule_id = kwargs.pop('rule_id', None) # type: ignore [assignment]
self.status = kwargs.pop('status', None) # type: ignore [assignment]
class ObjectReplicationPolicy(DictMixin):
"""Policy id and rule ids applied to a blob."""
policy_id: str
"""Policy id for the blob. A replication policy gets created (policy id) when creating a source/destination pair."""
rules: List[ObjectReplicationRule]
"""Within each policy there may be multiple replication rules.
e.g. rule 1= src/container/.pdf to dst/container2/; rule2 = src/container1/.jpg to dst/container3"""
def __init__(self, **kwargs: Any) -> None:
self.policy_id = kwargs.pop('policy_id', None) # type: ignore [assignment]
self.rules = kwargs.pop('rules', [])
class BlobProperties(DictMixin):
"""Blob Properties."""
name: str
"""The name of the blob."""
container: str
"""The container in which the blob resides."""
snapshot: Optional[str]
"""Datetime value that uniquely identifies the blob snapshot."""
blob_type: "BlobType"
"""String indicating this blob's type."""
metadata: Dict[str, str]
"""Name-value pairs associated with the blob as metadata."""
last_modified: "datetime"
"""A datetime object representing the last time the blob was modified."""
etag: str
"""The ETag contains a value that you can use to perform operations
conditionally."""
size: int
"""The size of the content returned. If the entire blob was requested,
the length of blob in bytes. If a subset of the blob was requested, the
length of the returned subset."""
content_range: Optional[str]
"""Indicates the range of bytes returned in the event that the client
requested a subset of the blob."""
append_blob_committed_block_count: Optional[int]
"""(For Append Blobs) Number of committed blocks in the blob."""
is_append_blob_sealed: Optional[bool]
"""Indicate if the append blob is sealed or not."""
page_blob_sequence_number: Optional[int]
"""(For Page Blobs) Sequence number for page blob used for coordinating
concurrent writes."""
server_encrypted: bool
"""Set to true if the blob is encrypted on the server."""
copy: "CopyProperties"
"""Stores all the copy properties for the blob."""
content_settings: ContentSettings
"""Stores all the content settings for the blob."""
lease: LeaseProperties
"""Stores all the lease information for the blob."""
blob_tier: Optional[StandardBlobTier]
"""Indicates the access tier of the blob. The hot tier is optimized
for storing data that is accessed frequently. The cool storage tier
is optimized for storing data that is infrequently accessed and stored
for at least a month. The archive tier is optimized for storing
data that is rarely accessed and stored for at least six months
with flexible latency requirements."""
rehydrate_priority: Optional[str]
"""Indicates the priority with which to rehydrate an archived blob"""
blob_tier_change_time: Optional["datetime"]
"""Indicates when the access tier was last changed."""
blob_tier_inferred: Optional[bool]
"""Indicates whether the access tier was inferred by the service.
If false, it indicates that the tier was set explicitly."""
deleted: Optional[bool]
"""Whether this blob was deleted."""
deleted_time: Optional["datetime"]
"""A datetime object representing the time at which the blob was deleted."""
remaining_retention_days: Optional[int]
"""The number of days that the blob will be retained before being permanently deleted by the service."""
creation_time: "datetime"
"""Indicates when the blob was created, in UTC."""
archive_status: Optional[str]
"""Archive status of blob."""
encryption_key_sha256: Optional[str]
"""The SHA-256 hash of the provided encryption key."""
encryption_scope: Optional[str]
"""A predefined encryption scope used to encrypt the data on the service. An encryption
scope can be created using the Management API and referenced here by name. If a default
encryption scope has been defined at the container, this value will override it if the
container-level scope is configured to allow overrides. Otherwise an error will be raised."""
request_server_encrypted: Optional[bool]
"""Whether this blob is encrypted."""
object_replication_source_properties: Optional[List[ObjectReplicationPolicy]]
"""Only present for blobs that have policy ids and rule ids applied to them."""
object_replication_destination_policy: Optional[str]
"""Represents the Object Replication Policy Id that created this blob."""
last_accessed_on: Optional["datetime"]
"""Indicates when the last Read/Write operation was performed on a Blob."""
tag_count: Optional[int]
"""Tags count on this blob."""
tags: Optional[Dict[str, str]]
"""Key value pair of tags on this blob."""
has_versions_only: Optional[bool]
"""A true value indicates the root blob is deleted"""
immutability_policy: ImmutabilityPolicy
"""Specifies the immutability policy of a blob, blob snapshot or blob version."""
has_legal_hold: Optional[bool]
"""Specified if a legal hold should be set on the blob.
Currently this parameter of upload_blob() API is for BlockBlob only."""
def __init__(self, **kwargs: Any) -> None:
self.name = kwargs.get('name') # type: ignore [assignment]
self.container = None # type: ignore [assignment]
self.snapshot = kwargs.get('x-ms-snapshot')
self.version_id = kwargs.get('x-ms-version-id')
self.is_current_version = kwargs.get('x-ms-is-current-version')
self.blob_type = BlobType(kwargs['x-ms-blob-type']) if (
kwargs.get('x-ms-blob-type')) else None # type: ignore [assignment]
self.metadata = kwargs.get('metadata') # type: ignore [assignment]
self.encrypted_metadata = kwargs.get('encrypted_metadata')
self.last_modified = kwargs.get('Last-Modified') # type: ignore [assignment]
self.etag = kwargs.get('ETag') # type: ignore [assignment]
self.size = kwargs.get('Content-Length') # type: ignore [assignment]
self.content_range = kwargs.get('Content-Range')
self.append_blob_committed_block_count = kwargs.get('x-ms-blob-committed-block-count')
self.is_append_blob_sealed = kwargs.get('x-ms-blob-sealed')
self.page_blob_sequence_number = kwargs.get('x-ms-blob-sequence-number')
self.server_encrypted = kwargs.get('x-ms-server-encrypted') # type: ignore [assignment]
self.copy = CopyProperties(**kwargs)
self.content_settings = ContentSettings(**kwargs)
self.lease = LeaseProperties(**kwargs)
self.blob_tier = kwargs.get('x-ms-access-tier')
self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority')
self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time')
self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred')
self.deleted = False
self.deleted_time = None
self.remaining_retention_days = None
self.creation_time = kwargs.get('x-ms-creation-time') # type: ignore [assignment]
self.archive_status = kwargs.get('x-ms-archive-status')
self.encryption_key_sha256 = kwargs.get('x-ms-encryption-key-sha256')
self.encryption_scope = kwargs.get('x-ms-encryption-scope')
self.request_server_encrypted = kwargs.get('x-ms-server-encrypted')
self.object_replication_source_properties = kwargs.get('object_replication_source_properties')
self.object_replication_destination_policy = kwargs.get('x-ms-or-policy-id')
self.last_accessed_on = kwargs.get('x-ms-last-access-time')
self.tag_count = kwargs.get('x-ms-tag-count')
self.tags = None
self.immutability_policy = ImmutabilityPolicy(expiry_time=kwargs.get('x-ms-immutability-policy-until-date'),
policy_mode=kwargs.get('x-ms-immutability-policy-mode'))
self.has_legal_hold = kwargs.get('x-ms-legal-hold')
self.has_versions_only = None
class BlobQueryError(object):
"""The error happened during quick query operation."""
error: Optional[str]
"""The name of the error."""
is_fatal: bool
"""If true, this error prevents further query processing. More result data may be returned,
but there is no guarantee that all of the original data will be processed.
If false, this error does not prevent further query processing."""
description: Optional[str]
"""A description of the error."""
position: Optional[int]
"""The blob offset at which the error occurred."""
def __init__(
self, error: Optional[str] = None,
is_fatal: bool = False,
description: Optional[str] = None,
position: Optional[int] = None
) -> None:
self.error = error
self.is_fatal = is_fatal
self.description = description
self.position = position
|