Skip to content

Commit a5683be

Browse files
committed
Merge branch 'release/6.0'
2 parents 050aae3 + 3b01c7e commit a5683be

File tree

12 files changed

+618
-59
lines changed

12 files changed

+618
-59
lines changed

HISTORY.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
Changelog
22
==========
33

4+
5.1.0 (2019-03-01)
5+
------------------
6+
7+
* Initial release for DSS 5.1
8+
49
5.0.0 (2018-07-26)
510
------------------
611

dataikuapi/dss/admin.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -681,14 +681,16 @@ def start(self):
681681
raise Exception('Cluster operation failed : %s' % (json.dumps(resp.get('messages', {}).get('messages', {}))))
682682
return resp
683683

684-
def stop(self):
684+
def stop(self, terminate=True):
685685
"""
686686
Stops or detaches the cluster
687687
688688
This operation is only valid for a managed cluster.
689+
:param boolean terminate: whether to delete the cluster after stopping it
689690
"""
690691
resp = self.client._perform_json(
691-
"POST", "/admin/clusters/%s/actions/stop" % (self.cluster_id))
692+
"POST", "/admin/clusters/%s/actions/stop" % (self.cluster_id),
693+
params = {'terminate':terminate})
692694
if resp is None:
693695
raise Exception('Env update returned no data')
694696
if resp.get('messages', {}).get('error', False):
@@ -728,7 +730,7 @@ def save(self):
728730
"PUT", "/admin/clusters/%s" % (self.cluster_id), body=self.settings)
729731

730732
class DSSClusterStatus(object):
731-
def __init__(self, client, cluster_id, settings):
733+
def __init__(self, client, cluster_id, status):
732734
"""Do not call directly, use :meth:`DSSCluster.get_Status`"""
733735
self.client = client
734736
self.cluster_id = cluster_id

dataikuapi/dss/apideployer.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22
from .future import DSSFuture
33

44
class DSSAPIDeployer(object):
5-
"""Handle to interact with the API Deployer.
5+
"""
6+
Handle to interact with the API Deployer.
67
78
Do not create this directly, use :meth:`dataikuapi.dss.DSSClient.get_apideployer`
89
"""
@@ -70,12 +71,29 @@ def list_infras(self, as_objects = True):
7071
else:
7172
return l
7273

74+
def create_infra(self, infra_id, stage, type):
75+
"""
76+
Creates a new infrastructure on the API Deployer and returns the handle to interact with it.
77+
78+
:param str infra_id: Unique Identifier of the infra to create
79+
:param str stage: Infrastructure stage. Stages are configurable on each API Deployer
80+
:param str type: STATIC or KUBERNETES
81+
:rtype: :class:`DSSAPIDeployerInfra`
82+
"""
83+
settings = {
84+
"id": infra_id,
85+
"stage": stage,
86+
"type": type,
87+
}
88+
self.client._perform_json("POST", "/api-deployer/infras", body=settings)
89+
return self.get_infra(infra_id)
90+
7391
def get_infra(self, infra_id):
7492
"""
7593
Returns a handle to interact with a single deployment infra, as a :class:`DSSAPIDeployerInfra`
7694
7795
:param str infra_id: Identifier of the infra to get
78-
:rtype: :class:`DSSAPIDeployerDeployment`
96+
:rtype: :class:`DSSAPIDeployerInfra`
7997
"""
8098
return DSSAPIDeployerInfra(self.client, infra_id)
8199

@@ -235,7 +253,7 @@ def start_update(self):
235253
236254
:returns: a :class:`dataikuapi.dss.future.DSSFuture` tracking the progress of the update. Call
237255
:meth:`~dataikuapi.dss.future.DSSFuture.wait_for_result` on the returned object
238-
to wait for completion (or failure)
256+
to wait for completion (or failure)
239257
"""
240258
future_response = self.client._perform_json(
241259
"POST", "/api-deployer/deployments/%s/actions/update" % (self.deployment_id))
@@ -453,4 +471,4 @@ def get_raw(self):
453471
Gets the raw status information. This returns a dictionary with various information about the service,
454472
:rtype: dict
455473
"""
456-
return self.light_status
474+
return self.light_status

dataikuapi/dss/dataset.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -254,3 +254,51 @@ def get_object_discussions(self):
254254
:rtype: :class:`dataikuapi.discussion.DSSObjectDiscussions`
255255
"""
256256
return DSSObjectDiscussions(self.client, self.project_key, "DATASET", self.dataset_name)
257+
258+
class DSSManagedDatasetCreationHelper(object):
259+
260+
def __init__(self, project, dataset_name):
261+
self.project = project
262+
self.dataset_name = dataset_name
263+
self.creation_settings = { "specificSettings" : {} }
264+
265+
def get_creation_settings(self):
266+
return self.creation_settings
267+
268+
def with_store_into(self, connection, type_option_id = None, format_option_id = None):
269+
"""
270+
Sets the connection into which to store the new managed dataset
271+
:param str connection: Name of the connection to store into
272+
:param str type_option_id: If the connection accepts several types of datasets, the type
273+
:param str format_option_id: Optional identifier of a file format option
274+
:return: self
275+
"""
276+
self.creation_settings["connectionId"] = connection
277+
if type_option_id is not None:
278+
self.creation_settings["typeOptionId"] = type_option_id
279+
if format_option_id is not None:
280+
self.creation_settings["specificSettings"]["formatOptionId"] = format_option_id
281+
return self
282+
283+
def with_copy_partitioning_from(self, dataset_ref):
284+
"""
285+
Sets the new managed dataset to use the same partitioning as an existing dataset_name
286+
287+
:param str dataset_ref: Name of the dataset to copy partitioning from
288+
:return: self
289+
"""
290+
self.creation_settings["partitioningOptionId"] = "copy:%s" % dataset_ref
291+
return self
292+
293+
def create(self):
294+
"""
295+
Executes the creation of the managed dataset according to the selected options
296+
297+
:return: The :class:`DSSDataset` corresponding to the newly created dataset
298+
"""
299+
self.project.client._perform_json("POST", "/projects/%s/datasets/managed" % self.project.project_key,
300+
body = {
301+
"name": self.dataset_name,
302+
"creationSettings": self.creation_settings
303+
})
304+
return DSSDataset(self.project.client, self.project.project_key, self.dataset_name)

dataikuapi/dss/future.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,15 @@ def __init__(self, client, job_id, state=None):
1010
self.state = state
1111
self.state_is_peek = True
1212

13+
@classmethod
14+
def get_result_wait_if_needed(cls, client, ret):
15+
if 'jobId' in ret:
16+
future = DSSFuture(client, ret["jobId"], ret)
17+
future.wait_for_result()
18+
return future.get_result()
19+
else:
20+
return ret['result']
21+
1322
def abort(self):
1423
"""
1524
Abort the future

dataikuapi/dss/plugin.py

Lines changed: 98 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,29 @@
77
from .apiservice import DSSAPIService
88
import sys
99

10+
class DSSPluginSettings(object):
11+
"""
12+
The settings of a plugin.
13+
"""
14+
15+
def __init__(self, client, plugin_id, settings):
16+
"""Do not call this directly, use :meth:`DSSPlugin.get_settings`"""
17+
self.client = client
18+
self.plugin_id = plugin_id
19+
self.settings = settings
20+
21+
def get_raw(self):
22+
"""Returns the raw settings object"""
23+
return self.settings
24+
25+
def set_code_env(self, code_env_name):
26+
"""Sets the name of the code env to use for this plugin"""
27+
self.settings["codeEnvName"] = code_env_name
28+
29+
def save(self):
30+
"""Saves the settings to DSS"""
31+
self.client._perform_empty("POST", "/plugins/%s/settings" % (self.plugin_id), body=self.settings)
32+
1033
class DSSPlugin(object):
1134
"""
1235
A plugin on the DSS instance
@@ -16,65 +39,110 @@ def __init__(self, client, plugin_id):
1639
self.plugin_id = plugin_id
1740

1841
########################################################
19-
# plugin upload/update as zip
42+
# Settings
2043
########################################################
2144

22-
def upload(self, file_path):
45+
def get_settings(self):
46+
"""Return the plugin-level settings
47+
48+
:return: a :class:`DSSPluginSettings`
2349
"""
24-
Upload the given file as a plugin
50+
settings = self.client._perform_json("GET", "/plugins/%s/settings" % (self.plugin_id))
51+
return DSSPluginSettings(self.client, self.plugin_id, settings)
2552

26-
Note: this call requires an API key with admin rights
27-
28-
:param: file_path : the path to the zip file of the plugin
53+
########################################################
54+
# Code env
55+
########################################################
56+
57+
def create_code_env(self, python_interpreter=None, conda=False):
2958
"""
30-
with open(file_path, 'rb') as f:
31-
return self.client._perform_json_upload("POST", "/plugins/%s/upload" % (self.plugin_id), 'plugin.zip', f).text
59+
Starts the creation of the code env of the plugin
3260
33-
def update(self, file_path):
61+
:return: a :class:`dataikuapi.dssfuture.DSSFuture`
3462
"""
35-
Update the plugin with the given file
63+
ret = self.client._perform_json("POST", "/plugins/%s/code-env/actions/create" % (self.plugin_id), body={
64+
"deploymentMode" : "PLUGIN_MANAGED",
65+
"conda": conda,
66+
"pythonInterpreter": python_interpreter
67+
})
68+
return self.client.get_future(ret["jobId"])
3669

37-
Note: this call requires an API key with admin rights
38-
39-
:param: file_path : the path to the zip file of the plugin
70+
71+
def update_code_env(self):
72+
"""
73+
Starts an update of the code env of the plugin
74+
75+
:return: a :class:`dataikuapi.dss.future.DSSFuture`
76+
"""
77+
ret = self.client._perform_json("POST", "/plugins/%s/code-env/actions/update" % (self.plugin_id))
78+
return self.client.get_future(ret["jobId"])
79+
80+
81+
########################################################
82+
# Plugin update
83+
########################################################
84+
85+
def update_from_zip(self, fp):
86+
"""
87+
Updates the plugin from a plugin archive (as a file object)
88+
89+
:param object fp: A file-like object pointing to a plugin archive zip
90+
"""
91+
files = {'file': fp }
92+
self.client._perform_json("POST", "/plugins/%s/actions/updateFromZip" % (self.plugin_id), files=files)
93+
94+
def update_from_store(self):
95+
"""
96+
Updates the plugin from the Dataiku plugin store
97+
98+
:return: a :class:`~dataikuapi.dss.future.DSSFuture`
99+
"""
100+
ret = self.client._perform_json("POST", "/plugins/%s/actions/updateFromStore" % (self.plugin_id))
101+
return self.client.get_future(ret["jobId"])
102+
103+
def update_from_git(self, repository_url, checkout = "master", subpath=None):
104+
"""
105+
Updates the plugin from a Git repository. DSS must be setup to allow access to the repository.
106+
107+
:param str repository_url: URL of a Git remote
108+
:param str checkout: branch/tag/SHA1 to commit. For example "master"
109+
:param str subpath: Optional, path within the repository to use as plugin. Should contain a 'plugin.json' file
110+
:return: a :class:`~dataikuapi.dss.future.DSSFuture`
40111
"""
41-
with open(file_path, 'rb') as f:
42-
return self.client._perform_json_upload("POST", "/plugins/%s/update" % (self.plugin_id), 'plugin.zip', f).text
112+
ret = self.client._perform_json("POST", "/plugins/%s/actions/updateFromGit" % (self.plugin_id), body={
113+
"gitRepositoryUrl": repository_url,
114+
"gitCheckout" : checkout,
115+
"gitSubpath": subpath
116+
})
117+
return self.client.get_future(ret["jobId"])
43118

44119
########################################################
45120
# Managing the dev plugin's contents
46121
########################################################
47122

48123
def list_files(self):
49124
"""
50-
Get the hierarchy of files in the plugin
51-
52-
Returns:
53-
the plugins's contents
125+
Get the hierarchy of files in the plugin (dev plugins only)
54126
"""
55127
return self.client._perform_json("GET", "/plugins/%s/contents" % (self.plugin_id))
56128

57129
def get_file(self, path):
58130
"""
59-
Get a file from the plugin folder
60-
61-
Args:
62-
path: the name of the file, from the root of the plugin
131+
Get a file from the plugin folder (dev plugins only)
63132
64-
Returns:
65-
the file's content, as a stream
133+
:param str path: the path of the file, relative to the root of the plugin
134+
135+
:return: a file-like object containing the file's content
66136
"""
67137
return self.client._perform_raw("GET", "/plugins/%s/contents/%s" % (self.plugin_id, path)).raw
68138

69139
def put_file(self, path, f):
70140
"""
71-
Update a file in the plugin folder
141+
Update a file in the plugin folder (dev plugins only)
72142
73-
Args:
74-
f: the file contents, as a stream
75-
path: the name of the file, from the root of the plugin
143+
:param file-like f: the file contents, as a file-like object
144+
:param str path: the path of the file, relative ot the root of the plugin
76145
"""
77-
78146
file_name = path.split('/')[-1]
79147
data = f.read() # eat it all, because making it work with a path variable and a MultifilePart in swing looks complicated
80148
return self.client._perform_empty("POST", "/plugins/%s/contents/%s" % (self.plugin_id, path), raw_body=data)

0 commit comments

Comments
 (0)