Skip to content

Commit 1f62acc

Browse files
williexuderekbekoe
authored andcommitted
Storage preview (#156)
* readd storage preview extension setup * added codeowner for storage extension
1 parent ed05b4b commit 1f62acc

76 files changed

Lines changed: 40230 additions & 1 deletion

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.github/CODEOWNERS

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,4 +22,6 @@
2222

2323
/src/signalr/ @zackliu
2424

25-
/src/eventgrid/ @kalyanaj
25+
/src/eventgrid/ @kalyanaj
26+
27+
/src/storage-preview/ @williexu
Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
# --------------------------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for license information.
4+
# --------------------------------------------------------------------------------------------
5+
6+
from azure.cli.core import AzCommandsLoader
7+
from azure.cli.core.profiles import ResourceType
8+
from azure.cli.core.commands import AzCommandGroup, AzArgumentContext
9+
10+
import azext_storage_preview._help # pylint: disable=unused-import
11+
12+
13+
class StorageCommandsLoader(AzCommandsLoader):
14+
def __init__(self, cli_ctx=None):
15+
from azure.cli.core.commands import CliCommandType
16+
17+
storage_custom = CliCommandType(operations_tmpl='azext_storage_preview.custom#{}')
18+
super(StorageCommandsLoader, self).__init__(cli_ctx=cli_ctx,
19+
resource_type=ResourceType.DATA_STORAGE,
20+
custom_command_type=storage_custom,
21+
command_group_cls=StorageCommandGroup,
22+
argument_context_cls=StorageArgumentContext)
23+
24+
def load_command_table(self, args):
25+
super(StorageCommandsLoader, self).load_command_table(args)
26+
from .commands import load_command_table
27+
load_command_table(self, args)
28+
return self.command_table
29+
30+
def load_arguments(self, command):
31+
super(StorageCommandsLoader, self).load_arguments(command)
32+
from ._params import load_arguments
33+
load_arguments(self, command)
34+
35+
36+
class StorageArgumentContext(AzArgumentContext):
37+
def register_sas_arguments(self):
38+
from ._validators import ipv4_range_type, get_datetime_type
39+
self.argument('ip', type=ipv4_range_type,
40+
help='Specifies the IP address or range of IP addresses from which to accept requests. Supports '
41+
'only IPv4 style addresses.')
42+
self.argument('expiry', type=get_datetime_type(True),
43+
help='Specifies the UTC datetime (Y-m-d\'T\'H:M\'Z\') at which the SAS becomes invalid. Do not '
44+
'use if a stored access policy is referenced with --id that specifies this value.')
45+
self.argument('start', type=get_datetime_type(True),
46+
help='Specifies the UTC datetime (Y-m-d\'T\'H:M\'Z\') at which the SAS becomes valid. Do not use '
47+
'if a stored access policy is referenced with --id that specifies this value. Defaults to '
48+
'the time of the request.')
49+
self.argument('protocol', options_list=('--https-only',), action='store_const', const='https',
50+
help='Only permit requests made with the HTTPS protocol. If omitted, requests from both the HTTP '
51+
'and HTTPS protocol are permitted.')
52+
53+
def register_content_settings_argument(self, settings_class, update, arg_group=None, guess_from_file=None):
54+
from ._validators import get_content_setting_validator
55+
56+
self.ignore('content_settings')
57+
self.extra('content_type', default=None, help='The content MIME type.', arg_group=arg_group,
58+
validator=get_content_setting_validator(settings_class, update, guess_from_file=guess_from_file))
59+
self.extra('content_encoding', default=None, help='The content encoding type.', arg_group=arg_group)
60+
self.extra('content_language', default=None, help='The content language.', arg_group=arg_group)
61+
self.extra('content_disposition', default=None, arg_group=arg_group,
62+
help='Conveys additional information about how to process the response payload, and can also be '
63+
'used to attach additional metadata.')
64+
self.extra('content_cache_control', default=None, help='The cache control string.', arg_group=arg_group)
65+
self.extra('content_md5', default=None, help='The content\'s MD5 hash.', arg_group=arg_group)
66+
67+
def register_path_argument(self, default_file_param=None, options_list=None):
68+
from ._validators import get_file_path_validator
69+
from .completers import file_path_completer
70+
71+
path_help = 'The path to the file within the file share.'
72+
if default_file_param:
73+
path_help = '{} If the file name is omitted, the source file name will be used.'.format(path_help)
74+
self.extra('path', options_list=options_list or ('--path', '-p'),
75+
required=default_file_param is None, help=path_help,
76+
validator=get_file_path_validator(default_file_param=default_file_param),
77+
completer=file_path_completer)
78+
self.ignore('file_name')
79+
self.ignore('directory_name')
80+
81+
def register_source_uri_arguments(self, validator, blob_only=False):
82+
self.argument('copy_source', options_list=('--source-uri', '-u'), validator=validator, required=False,
83+
arg_group='Copy Source')
84+
self.extra('source_sas', default=None, arg_group='Copy Source',
85+
help='The shared access signature for the source storage account.')
86+
self.extra('source_container', default=None, arg_group='Copy Source',
87+
help='The container name for the source storage account.')
88+
self.extra('source_blob', default=None, arg_group='Copy Source',
89+
help='The blob name for the source storage account.')
90+
self.extra('source_snapshot', default=None, arg_group='Copy Source',
91+
help='The blob snapshot for the source storage account.')
92+
self.extra('source_account_name', default=None, arg_group='Copy Source',
93+
help='The storage account name of the source blob.')
94+
self.extra('source_account_key', default=None, arg_group='Copy Source',
95+
help='The storage account key of the source blob.')
96+
if not blob_only:
97+
self.extra('source_path', default=None, arg_group='Copy Source',
98+
help='The file path for the source storage account.')
99+
self.extra('source_share', default=None, arg_group='Copy Source',
100+
help='The share name for the source storage account.')
101+
102+
def register_common_storage_account_options(self):
103+
from azure.cli.core.commands.parameters import get_three_state_flag, get_enum_type
104+
from ._validators import validate_encryption_services
105+
106+
t_access_tier, t_sku_name, t_encryption_services = self.command_loader.get_models(
107+
'AccessTier', 'SkuName', 'EncryptionServices', resource_type=ResourceType.MGMT_STORAGE)
108+
109+
self.argument('https_only', help='Allows https traffic only to storage service.',
110+
arg_type=get_three_state_flag())
111+
self.argument('sku', help='The storage account SKU.', arg_type=get_enum_type(t_sku_name))
112+
self.argument('assign_identity', action='store_true', resource_type=ResourceType.MGMT_STORAGE,
113+
min_api='2017-06-01',
114+
help='Generate and assign a new Storage Account Identity for this storage account for use '
115+
'with key management services like Azure KeyVault.')
116+
self.argument('access_tier', arg_type=get_enum_type(t_access_tier),
117+
help='The access tier used for billing StandardBlob accounts. Cannot be set for StandardLRS, '
118+
'StandardGRS, StandardRAGRS, or PremiumLRS account types. It is required for '
119+
'StandardBlob accounts during creation')
120+
121+
if t_encryption_services:
122+
encryption_choices = list(
123+
t_encryption_services._attribute_map.keys()) # pylint: disable=protected-access
124+
self.argument('encryption_services', arg_type=get_enum_type(encryption_choices),
125+
resource_type=ResourceType.MGMT_STORAGE, min_api='2016-12-01', nargs='+',
126+
validator=validate_encryption_services, help='Specifies which service(s) to encrypt.')
127+
128+
129+
class StorageCommandGroup(AzCommandGroup):
130+
def storage_command(self, name, method_name=None, command_type=None, **kwargs):
131+
""" Registers an Azure CLI Storage Data Plane command. These commands always include the four parameters which
132+
can be used to obtain a storage client: account-name, account-key, connection-string, and sas-token. """
133+
if command_type:
134+
command_name = self.command(name, method_name, command_type=command_type, **kwargs)
135+
else:
136+
command_name = self.command(name, method_name, **kwargs)
137+
self._register_data_plane_account_arguments(command_name)
138+
139+
def storage_custom_command(self, name, method_name, **kwargs):
140+
command_name = self.custom_command(name, method_name, **kwargs)
141+
self._register_data_plane_account_arguments(command_name)
142+
143+
def get_handler_suppress_404(self):
144+
145+
# pylint: disable=inconsistent-return-statements
146+
def handler(ex):
147+
from azure.cli.core.profiles import get_sdk
148+
149+
t_error = get_sdk(self.command_loader.cli_ctx,
150+
ResourceType.DATA_STORAGE,
151+
'common._error#AzureMissingResourceHttpError')
152+
if isinstance(ex, t_error):
153+
return None
154+
raise ex
155+
156+
return handler
157+
158+
def _register_data_plane_account_arguments(self, command_name):
159+
""" Add parameters required to create a storage client """
160+
from ._validators import validate_client_parameters
161+
command = self.command_loader.command_table.get(command_name, None)
162+
if not command:
163+
return
164+
165+
group_name = 'Storage Account'
166+
command.add_argument('account_name', '--account-name', required=False, default=None,
167+
arg_group=group_name,
168+
help='Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be '
169+
'used in conjunction with either storage account key or a SAS token. If neither are '
170+
'present, the command will try to query the storage account key using the '
171+
'authenticated Azure account. If a large number of storage commands are executed the '
172+
'API quota may be hit')
173+
command.add_argument('account_key', '--account-key', required=False, default=None,
174+
arg_group=group_name,
175+
help='Storage account key. Must be used in conjunction with storage account name. '
176+
'Environment variable: AZURE_STORAGE_KEY')
177+
command.add_argument('connection_string', '--connection-string', required=False, default=None,
178+
validator=validate_client_parameters, arg_group=group_name,
179+
help='Storage account connection string. Environment variable: '
180+
'AZURE_STORAGE_CONNECTION_STRING')
181+
command.add_argument('sas_token', '--sas-token', required=False, default=None,
182+
arg_group=group_name,
183+
help='A Shared Access Signature (SAS). Must be used in conjunction with storage account '
184+
'name. Environment variable: AZURE_STORAGE_SAS_TOKEN')
185+
186+
187+
COMMAND_LOADER_CLS = StorageCommandsLoader
Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
# --------------------------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for license information.
4+
# --------------------------------------------------------------------------------------------
5+
6+
from azure.cli.core.commands.client_factory import get_mgmt_service_client, get_data_service_client
7+
from azure.cli.core.profiles import ResourceType, get_sdk
8+
9+
from .sdkutil import get_table_data_type
10+
11+
NO_CREDENTIALS_ERROR_MESSAGE = """
12+
No credentials specified to access storage service. Please provide any of the following:
13+
(1) account name and key (--account-name and --account-key options or
14+
set AZURE_STORAGE_ACCOUNT and AZURE_STORAGE_KEY environment variables)
15+
(2) connection string (--connection-string option or
16+
set AZURE_STORAGE_CONNECTION_STRING environment variable)
17+
(3) account name and SAS token (--sas-token option used with either the --account-name
18+
option or AZURE_STORAGE_ACCOUNT environment variable)
19+
"""
20+
21+
22+
def get_storage_data_service_client(cli_ctx, service, name=None, key=None, connection_string=None, sas_token=None,
23+
socket_timeout=None):
24+
return get_data_service_client(cli_ctx, service, name, key, connection_string, sas_token,
25+
socket_timeout=socket_timeout,
26+
endpoint_suffix=cli_ctx.cloud.suffixes.storage_endpoint)
27+
28+
29+
def generic_data_service_factory(cli_ctx, service, name=None, key=None, connection_string=None, sas_token=None,
30+
socket_timeout=None):
31+
try:
32+
return get_storage_data_service_client(cli_ctx, service, name, key, connection_string, sas_token,
33+
socket_timeout)
34+
except ValueError as val_exception:
35+
_ERROR_STORAGE_MISSING_INFO = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
36+
'common._error#_ERROR_STORAGE_MISSING_INFO')
37+
message = str(val_exception)
38+
if message == _ERROR_STORAGE_MISSING_INFO:
39+
message = NO_CREDENTIALS_ERROR_MESSAGE
40+
from knack.util import CLIError
41+
raise CLIError(message)
42+
43+
44+
def storage_client_factory(cli_ctx, **_):
45+
return get_mgmt_service_client(cli_ctx, ResourceType.MGMT_STORAGE)
46+
47+
48+
def file_data_service_factory(cli_ctx, kwargs):
49+
t_file_svc = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'file#FileService')
50+
return generic_data_service_factory(cli_ctx, t_file_svc, kwargs.pop('account_name', None),
51+
kwargs.pop('account_key', None),
52+
connection_string=kwargs.pop('connection_string', None),
53+
sas_token=kwargs.pop('sas_token', None))
54+
55+
56+
def page_blob_service_factory(cli_ctx, kwargs):
57+
t_page_blob_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'blob.pageblobservice#PageBlobService')
58+
return generic_data_service_factory(cli_ctx, t_page_blob_service, kwargs.pop('account_name', None),
59+
kwargs.pop('account_key', None),
60+
connection_string=kwargs.pop('connection_string', None),
61+
sas_token=kwargs.pop('sas_token', None))
62+
63+
64+
def blob_data_service_factory(cli_ctx, kwargs):
65+
from .sdkutil import get_blob_service_by_type
66+
blob_type = kwargs.get('blob_type')
67+
blob_service = get_blob_service_by_type(cli_ctx, blob_type) or get_blob_service_by_type(cli_ctx, 'block')
68+
69+
return generic_data_service_factory(cli_ctx, blob_service, kwargs.pop('account_name', None),
70+
kwargs.pop('account_key', None),
71+
connection_string=kwargs.pop('connection_string', None),
72+
sas_token=kwargs.pop('sas_token', None),
73+
socket_timeout=kwargs.pop('socket_timeout', None))
74+
75+
76+
def table_data_service_factory(cli_ctx, kwargs):
77+
return generic_data_service_factory(cli_ctx,
78+
get_table_data_type(cli_ctx, 'table', 'TableService'),
79+
kwargs.pop('account_name', None),
80+
kwargs.pop('account_key', None),
81+
connection_string=kwargs.pop('connection_string', None),
82+
sas_token=kwargs.pop('sas_token', None))
83+
84+
85+
def queue_data_service_factory(cli_ctx, kwargs):
86+
t_queue_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'queue#QueueService')
87+
return generic_data_service_factory(
88+
cli_ctx, t_queue_service,
89+
kwargs.pop('account_name', None),
90+
kwargs.pop('account_key', None),
91+
connection_string=kwargs.pop('connection_string', None),
92+
sas_token=kwargs.pop('sas_token', None))
93+
94+
95+
def cloud_storage_account_service_factory(cli_ctx, kwargs):
96+
t_cloud_storage_account = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'common#CloudStorageAccount')
97+
account_name = kwargs.pop('account_name', None)
98+
account_key = kwargs.pop('account_key', None)
99+
sas_token = kwargs.pop('sas_token', None)
100+
kwargs.pop('connection_string', None)
101+
return t_cloud_storage_account(account_name, account_key, sas_token)
102+
103+
104+
def multi_service_properties_factory(cli_ctx, kwargs):
105+
"""Create multiple data services properties instance based on the services option"""
106+
from .services_wrapper import ServiceProperties
107+
108+
t_base_blob_service, t_file_service, t_queue_service, = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
109+
'blob.baseblobservice#BaseBlobService',
110+
'file#FileService', 'queue#QueueService')
111+
112+
t_table_service = get_table_data_type(cli_ctx, 'table', 'TableService')
113+
114+
account_name = kwargs.pop('account_name', None)
115+
account_key = kwargs.pop('account_key', None)
116+
connection_string = kwargs.pop('connection_string', None)
117+
sas_token = kwargs.pop('sas_token', None)
118+
services = kwargs.pop('services', [])
119+
120+
def get_creator(name, service_type):
121+
return lambda: ServiceProperties(cli_ctx, name, service_type, account_name, account_key, connection_string,
122+
sas_token)
123+
124+
creators = {'b': get_creator('blob', t_base_blob_service), 'f': get_creator('file', t_file_service),
125+
'q': get_creator('queue', t_queue_service), 't': get_creator('table', t_table_service)}
126+
127+
return [creators[s]() for s in services]
128+
129+
130+
def cf_sa(cli_ctx, _):
131+
return storage_client_factory(cli_ctx).storage_accounts

0 commit comments

Comments
 (0)