|
| 1 | +# Databricks CLI |
| 2 | +# Copyright 2017 Databricks, Inc. |
| 3 | +# |
| 4 | +# Licensed under the Apache License, Version 2.0 (the "License"), except |
| 5 | +# that the use of services to which certain application programming |
| 6 | +# interfaces (each, an "API") connect requires that the user first obtain |
| 7 | +# a license for the use of the APIs from Databricks, Inc. ("Databricks"), |
| 8 | +# by creating an account at www.databricks.com and agreeing to either (a) |
| 9 | +# the Community Edition Terms of Service, (b) the Databricks Terms of |
| 10 | +# Service, or (c) another written agreement between Licensee and Databricks |
| 11 | +# for the use of the APIs. |
| 12 | +# |
| 13 | +# You may not use this file except in compliance with the License. |
| 14 | +# You may obtain a copy of the License at |
| 15 | +# |
| 16 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 17 | +# |
| 18 | +# Unless required by applicable law or agreed to in writing, software |
| 19 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 20 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 21 | +# See the License for the specific language governing permissions and |
| 22 | +# limitations under the License. |
| 23 | + |
| 24 | +class AutoScale(dict): |
| 25 | + def __init__(self, min_workers=None, max_workers=None): |
| 26 | + super(AutoScale, self).__init__() |
| 27 | + if min_workers is not None: |
| 28 | + self['min_workers'] = min_workers |
| 29 | + if max_workers is not None: |
| 30 | + self['max_workers'] = max_workers |
| 31 | + |
| 32 | +class AwsAttributes(dict): |
| 33 | + def __init__(self, first_on_demand=None, availability=None, zone_id=None, |
| 34 | + instance_profile_arn=None, spot_bid_price_percent=None, ebs_volume_type=None, |
| 35 | + ebs_volume_count=None, ebs_volume_size=None): |
| 36 | + super(AwsAttributes, self).__init__() |
| 37 | + if first_on_demand is not None: |
| 38 | + self['first_on_demand'] = first_on_demand |
| 39 | + if availability is not None: |
| 40 | + self['availability'] = availability |
| 41 | + if zone_id is not None: |
| 42 | + self['zone_id'] = zone_id |
| 43 | + if instance_profile_arn is not None: |
| 44 | + self['instance_profile_arn'] = instance_profile_arn |
| 45 | + if spot_bid_price_percent is not None: |
| 46 | + self['spot_bid_price_percent'] = spot_bid_price_percent |
| 47 | + if ebs_volume_type is not None: |
| 48 | + self['ebs_volume_type'] = ebs_volume_type |
| 49 | + if ebs_volume_count is not None: |
| 50 | + self['ebs_volume_count'] = ebs_volume_count |
| 51 | + if ebs_volume_size is not None: |
| 52 | + self['ebs_volume_size'] = ebs_volume_size |
| 53 | + |
| 54 | +class ClusterLogConf(dict): |
| 55 | + def __init__(self, dbfs=None, s3=None): |
| 56 | + super(ClusterLogConf, self).__init__() |
| 57 | + if dbfs is not None: |
| 58 | + self['dbfs'] = dbfs |
| 59 | + if not isinstance(dbfs, dict): |
| 60 | + raise TypeError('Expected databricks.DbfsStorageInfo() or dict for field dbfs') |
| 61 | + if s3 is not None: |
| 62 | + self['s3'] = s3 |
| 63 | + if not isinstance(s3, dict): |
| 64 | + raise TypeError('Expected databricks.S3StorageInfo() or dict for field s3') |
| 65 | + |
| 66 | +class DbfsStorageInfo(dict): |
| 67 | + def __init__(self, destination=None): |
| 68 | + super(DbfsStorageInfo, self).__init__() |
| 69 | + if destination is not None: |
| 70 | + self['destination'] = destination |
| 71 | + |
| 72 | +class S3StorageInfo(dict): |
| 73 | + def __init__(self, destination=None, region=None, endpoint=None, enable_encryption=None, |
| 74 | + encryption_type=None, kms_key=None, canned_acl=None): |
| 75 | + super(S3StorageInfo, self).__init__() |
| 76 | + if destination is not None: |
| 77 | + self['destination'] = destination |
| 78 | + if region is not None: |
| 79 | + self['region'] = region |
| 80 | + if endpoint is not None: |
| 81 | + self['endpoint'] = endpoint |
| 82 | + if enable_encryption is not None: |
| 83 | + self['enable_encryption'] = enable_encryption |
| 84 | + if encryption_type is not None: |
| 85 | + self['encryption_type'] = encryption_type |
| 86 | + if kms_key is not None: |
| 87 | + self['kms_key'] = kms_key |
| 88 | + if canned_acl is not None: |
| 89 | + self['canned_acl'] = canned_acl |
| 90 | + |
| 91 | +class ClusterTag(dict): |
| 92 | + def __init__(self, key=None, value=None): |
| 93 | + super(ClusterTag, self).__init__() |
| 94 | + if key is not None: |
| 95 | + self['key'] = key |
| 96 | + if value is not None: |
| 97 | + self['value'] = value |
| 98 | + |
| 99 | +class SparkConfPair(dict): |
| 100 | + def __init__(self, key=None, value=None): |
| 101 | + super(SparkConfPair, self).__init__() |
| 102 | + if key is not None: |
| 103 | + self['key'] = key |
| 104 | + if value is not None: |
| 105 | + self['value'] = value |
| 106 | + |
| 107 | +class SparkEnvPair(dict): |
| 108 | + def __init__(self, key=None, value=None): |
| 109 | + super(SparkEnvPair, self).__init__() |
| 110 | + if key is not None: |
| 111 | + self['key'] = key |
| 112 | + if value is not None: |
| 113 | + self['value'] = value |
| 114 | + |
| 115 | +class NewCluster(dict): |
| 116 | + def __init__(self, cluster_name=None, spark_version=None, spark_conf=None, |
| 117 | + aws_attributes=None, node_type_id=None, driver_node_type_id=None, |
| 118 | + ssh_public_keys=None, custom_tags=None, cluster_log_conf=None, |
| 119 | + spark_env_vars=None, autotermination_minutes=None, enable_elastic_disk=None, |
| 120 | + num_workers=None, autoscale=None): |
| 121 | + super(NewCluster, self).__init__() |
| 122 | + if cluster_name is not None: |
| 123 | + self['cluster_name'] = cluster_name |
| 124 | + if spark_version is not None: |
| 125 | + self['spark_version'] = spark_version |
| 126 | + if spark_conf is not None: |
| 127 | + self['spark_conf'] = spark_conf |
| 128 | + if aws_attributes is not None: |
| 129 | + self['aws_attributes'] = aws_attributes |
| 130 | + if not isinstance(aws_attributes, dict): |
| 131 | + raise TypeError('Expected databricks.AwsAttributes() or dict for field aws_attributes') |
| 132 | + if node_type_id is not None: |
| 133 | + self['node_type_id'] = node_type_id |
| 134 | + if driver_node_type_id is not None: |
| 135 | + self['driver_node_type_id'] = driver_node_type_id |
| 136 | + if ssh_public_keys is not None: |
| 137 | + self['ssh_public_keys'] = ssh_public_keys |
| 138 | + if custom_tags is not None: |
| 139 | + self['custom_tags'] = custom_tags |
| 140 | + if cluster_log_conf is not None: |
| 141 | + self['cluster_log_conf'] = cluster_log_conf |
| 142 | + if not isinstance(cluster_log_conf, dict): |
| 143 | + raise TypeError('Expected databricks.ClusterLogConf() or dict for field cluster_log_conf') |
| 144 | + if spark_env_vars is not None: |
| 145 | + self['spark_env_vars'] = spark_env_vars |
| 146 | + if autotermination_minutes is not None: |
| 147 | + self['autotermination_minutes'] = autotermination_minutes |
| 148 | + if enable_elastic_disk is not None: |
| 149 | + self['enable_elastic_disk'] = enable_elastic_disk |
| 150 | + if num_workers is not None: |
| 151 | + self['num_workers'] = num_workers |
| 152 | + if autoscale is not None: |
| 153 | + self['autoscale'] = autoscale |
| 154 | + if not isinstance(autoscale, dict): |
| 155 | + raise TypeError('Expected databricks.AutoScale() or dict for field autoscale') |
| 156 | + |
| 157 | +class CronSchedule(dict): |
| 158 | + def __init__(self, quartz_cron_expression, timezone_id): |
| 159 | + super(CronSchedule, self).__init__() |
| 160 | + if quartz_cron_expression is not None: |
| 161 | + self['quartz_cron_expression'] = quartz_cron_expression |
| 162 | + if timezone_id is not None: |
| 163 | + self['timezone_id'] = timezone_id |
| 164 | + |
| 165 | +class JobSettings(dict): |
| 166 | + def __init__(self, name=None, existing_cluster_id=None, new_cluster=None, libraries=None, |
| 167 | + email_notifications=None, timeout_seconds=None, max_retries=None, |
| 168 | + min_retry_interval_millis=None, retry_on_timeout=None, schedule=None, |
| 169 | + notebook_task=None, spark_jar_task=None, max_concurrent_runs=None): |
| 170 | + super(JobSettings, self).__init__() |
| 171 | + if name is not None: |
| 172 | + self['name'] = name |
| 173 | + if existing_cluster_id is not None: |
| 174 | + self['existing_cluster_id'] = existing_cluster_id |
| 175 | + if new_cluster is not None: |
| 176 | + self['new_cluster'] = new_cluster |
| 177 | + if not isinstance(new_cluster, dict): |
| 178 | + raise TypeError('Expected databricks.NewCluster() or dict for field new_cluster') |
| 179 | + if libraries is not None: |
| 180 | + self['libraries'] = libraries |
| 181 | + if email_notifications is not None: |
| 182 | + self['email_notifications'] = email_notifications |
| 183 | + if not isinstance(email_notifications, dict): |
| 184 | + raise TypeError('Expected databricks.JobEmailNotifications() or dict for field email_notifications') |
| 185 | + if timeout_seconds is not None: |
| 186 | + self['timeout_seconds'] = timeout_seconds |
| 187 | + if max_retries is not None: |
| 188 | + self['max_retries'] = max_retries |
| 189 | + if min_retry_interval_millis is not None: |
| 190 | + self['min_retry_interval_millis'] = min_retry_interval_millis |
| 191 | + if retry_on_timeout is not None: |
| 192 | + self['retry_on_timeout'] = retry_on_timeout |
| 193 | + if schedule is not None: |
| 194 | + self['schedule'] = schedule |
| 195 | + if not isinstance(schedule, dict): |
| 196 | + raise TypeError('Expected databricks.CronSchedule() or dict for field schedule') |
| 197 | + if notebook_task is not None: |
| 198 | + self['notebook_task'] = notebook_task |
| 199 | + if not isinstance(notebook_task, dict): |
| 200 | + raise TypeError('Expected databricks.NotebookTask() or dict for field notebook_task') |
| 201 | + if spark_jar_task is not None: |
| 202 | + self['spark_jar_task'] = spark_jar_task |
| 203 | + if not isinstance(spark_jar_task, dict): |
| 204 | + raise TypeError('Expected databricks.SparkJarTask() or dict for field spark_jar_task') |
| 205 | + if max_concurrent_runs is not None: |
| 206 | + self['max_concurrent_runs'] = max_concurrent_runs |
| 207 | + |
| 208 | +class JobEmailNotifications(dict): |
| 209 | + def __init__(self, on_start=None, on_success=None, on_failure=None): |
| 210 | + super(JobEmailNotifications, self).__init__() |
| 211 | + if on_start is not None: |
| 212 | + self['on_start'] = on_start |
| 213 | + if on_success is not None: |
| 214 | + self['on_success'] = on_success |
| 215 | + if on_failure is not None: |
| 216 | + self['on_failure'] = on_failure |
| 217 | + |
| 218 | +class NotebookTask(dict): |
| 219 | + def __init__(self, notebook_path, base_parameters=None): |
| 220 | + super(NotebookTask, self).__init__() |
| 221 | + if notebook_path is not None: |
| 222 | + self['notebook_path'] = notebook_path |
| 223 | + if base_parameters is not None: |
| 224 | + self['base_parameters'] = base_parameters |
| 225 | + |
| 226 | +class ParamPair(dict): |
| 227 | + def __init__(self, key=None, value=None): |
| 228 | + super(ParamPair, self).__init__() |
| 229 | + if key is not None: |
| 230 | + self['key'] = key |
| 231 | + if value is not None: |
| 232 | + self['value'] = value |
| 233 | + |
| 234 | +class SparkJarTask(dict): |
| 235 | + def __init__(self, jar_uri=None, main_class_name=None, parameters=None): |
| 236 | + super(SparkJarTask, self).__init__() |
| 237 | + if jar_uri is not None: |
| 238 | + self['jar_uri'] = jar_uri |
| 239 | + if main_class_name is not None: |
| 240 | + self['main_class_name'] = main_class_name |
| 241 | + if parameters is not None: |
| 242 | + self['parameters'] = parameters |
| 243 | + |
| 244 | +class EggSpecification(dict): |
| 245 | + def __init__(self, uri=None): |
| 246 | + super(EggSpecification, self).__init__() |
| 247 | + if uri is not None: |
| 248 | + self['uri'] = uri |
| 249 | + |
| 250 | +class JarSpecification(dict): |
| 251 | + def __init__(self, uri=None): |
| 252 | + super(JarSpecification, self).__init__() |
| 253 | + if uri is not None: |
| 254 | + self['uri'] = uri |
| 255 | + |
| 256 | +class MavenSpecification(dict): |
| 257 | + def __init__(self): |
| 258 | + super(MavenSpecification, self).__init__() |
| 259 | + pass |
| 260 | + |
| 261 | +class PipSpecification(dict): |
| 262 | + def __init__(self, package_name=None, version_specifier=None): |
| 263 | + super(PipSpecification, self).__init__() |
| 264 | + if package_name is not None: |
| 265 | + self['package_name'] = package_name |
| 266 | + if version_specifier is not None: |
| 267 | + self['version_specifier'] = version_specifier |
| 268 | + |
| 269 | +class Library(dict): |
| 270 | + def __init__(self, jar=None, egg=None, pypi=None, maven=None): |
| 271 | + super(Library, self).__init__() |
| 272 | + if jar is not None: |
| 273 | + self['jar'] = jar |
| 274 | + if egg is not None: |
| 275 | + self['egg'] = egg |
| 276 | + if pypi is not None: |
| 277 | + self['pypi'] = pypi |
| 278 | + if not isinstance(pypi, dict): |
| 279 | + raise TypeError('Expected databricks.PythonPyPiLibrary() or dict for field pypi') |
| 280 | + if maven is not None: |
| 281 | + self['maven'] = maven |
| 282 | + if not isinstance(maven, dict): |
| 283 | + raise TypeError('Expected databricks.MavenLibrary() or dict for field maven') |
| 284 | + |
| 285 | +class MavenLibrary(dict): |
| 286 | + def __init__(self, coordinates, repo=None, exclusions=None): |
| 287 | + super(MavenLibrary, self).__init__() |
| 288 | + if coordinates is not None: |
| 289 | + self['coordinates'] = coordinates |
| 290 | + if repo is not None: |
| 291 | + self['repo'] = repo |
| 292 | + if exclusions is not None: |
| 293 | + self['exclusions'] = exclusions |
| 294 | + |
| 295 | +class PythonPyPiLibrary(dict): |
| 296 | + def __init__(self, package, repo=None): |
| 297 | + super(PythonPyPiLibrary, self).__init__() |
| 298 | + if package is not None: |
| 299 | + self['package'] = package |
| 300 | + if repo is not None: |
| 301 | + self['repo'] = repo |
0 commit comments