Skip to content

Commit 3039d5b

Browse files
committed
fixing up some formatting and adding a couple more doc strings
1 parent d0cdc20 commit 3039d5b

File tree

1 file changed

+34
-16
lines changed

1 file changed

+34
-16
lines changed

dataikuapi/dss/project.py

Lines changed: 34 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def rec(pf):
6969
def move_to_folder(self, folder):
7070
"""
7171
Moves this project to a project folder
72-
:param folder :class:`dataikuapi.dss.projectfolder.DSSProjectFolder
72+
:param folder :class:`dataikuapi.dss.projectfolder.DSSProjectFolder`
7373
"""
7474
current_folder = self.get_project_folder()
7575
current_folder.move_project_to(self.project_key, folder)
@@ -823,12 +823,11 @@ def start_job(self, definition):
823823
"""
824824
Create a new job, and return a handle to interact with it
825825
826-
:param: dict definition: The definition should contain:
826+
:param dict definition: The definition should contain:
827827
828828
* the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD)
829829
* a list of outputs to build from the available types: (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT)
830-
* (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed
831-
HDFS datasets.
830+
* (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed HDFS datasets.
832831
833832
:returns: A :class:`dataikuapi.dss.job.DSSJob` job handle
834833
"""
@@ -839,12 +838,11 @@ def start_job_and_wait(self, definition, no_fail=False):
839838
"""
840839
Starts a new job and waits for it to complete.
841840
842-
:param: dict definition: The definition should contain:
841+
:param dict definition: The definition should contain:
843842
844843
* the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD)
845844
* a list of outputs to build from the available types: (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT)
846-
* (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed
847-
HDFS datasets.
845+
* (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed HDFS datasets.
848846
"""
849847
job_def = self.client._perform_json("POST", "/projects/%s/jobs/" % self.project_key, body = definition)
850848
job = DSSJob(self.client, self.project_key, job_def['id'])
@@ -884,8 +882,8 @@ def list_jupyter_notebooks(self, active=False, as_type="object"):
884882
:param bool as_type: How to return the list. Supported values are "listitems" and "objects".
885883
:param bool active: if True, only return currently running jupyter notebooks.
886884
887-
:returns: The list of the notebooks. If "as_type" is "listitems", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`,
888-
if "as_type" is "objects", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebook`
885+
:returns: The list of the notebooks. If "as_type" is "listitems", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`, if "as_type" is "objects", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebook`
886+
889887
:rtype: list of :class:`dataikuapi.dss.notebook.DSSJupyterNotebook` or list of :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`
890888
"""
891889
notebook_items = self.client._perform_json("GET", "/projects/%s/jupyter-notebooks/" % self.project_key, params={"active": active})
@@ -969,7 +967,7 @@ def set_variables(self, obj):
969967
WARNING: if executed from a python recipe, the changes made by `set_variables` will not be "seen" in that recipe.
970968
Use the internal API dataiku.get_custom_variables() instead if this behavior is needed
971969
972-
@param dict obj: must be a modified version of the object returned by get_variables
970+
:param dict obj: must be a modified version of the object returned by get_variables
973971
"""
974972
if not "standard" in obj:
975973
raise ValueError("Missing 'standard' key in argument")
@@ -1036,17 +1034,28 @@ def get_api_service(self, service_id):
10361034
########################################################
10371035

10381036
def list_exported_bundles(self):
1037+
"""
1038+
:returns: A dictionary of all bundles for a project on the Design node.
1039+
"""
10391040
return self.client._perform_json("GET",
10401041
"/projects/%s/bundles/exported" % self.project_key)
10411042

10421043
def export_bundle(self, bundle_id):
1044+
"""
1045+
Creates a new project bundle on the Design node
1046+
1047+
:param str bundle_id: bundle id tag
1048+
"""
10431049
return self.client._perform_json("PUT",
10441050
"/projects/%s/bundles/exported/%s" % (self.project_key, bundle_id))
10451051

10461052
def get_exported_bundle_archive_stream(self, bundle_id):
10471053
"""
10481054
Download a bundle archive that can be deployed in a DSS automation Node, as a binary stream.
1049-
Warning: this stream will monopolize the DSSClient until closed.
1055+
1056+
.. warning::
1057+
1058+
this stream will monopolize the DSSClient until closed.
10501059
"""
10511060
return self.client._perform_raw("GET",
10521061
"/projects/%s/bundles/exported/%s/archive" % (self.project_key, bundle_id))
@@ -1055,7 +1064,7 @@ def download_exported_bundle_archive_to_file(self, bundle_id, path):
10551064
"""
10561065
Download a bundle archive that can be deployed in a DSS automation Node into the given output file.
10571066
1058-
:param path if "-", will write to /dev/stdout
1067+
:param string path: if "-", will write to /dev/stdout
10591068
"""
10601069
if path == "-":
10611070
path= "/dev/stdout"
@@ -1092,14 +1101,14 @@ def publish_bundle(self, bundle_id, published_project_key=None):
10921101

10931102
def list_imported_bundles(self):
10941103
"""
1095-
:returns: a dict of bundles objects for the project.
1104+
:returns: a dict containing bundle imports for a project, on the Automation node.
10961105
"""
10971106
return self.client._perform_json("GET",
10981107
"/projects/%s/bundles/imported" % self.project_key)
10991108

11001109
def import_bundle_from_archive(self, archive_path):
11011110
"""
1102-
Imports a bundle from a path to a zip bundle archive.
1111+
Imports a bundle from a zip archive path on the Automation node.
11031112
11041113
:param str archive_path: A full path to a zip archive, for example `/home/dataiku/my-bundle-v1.zip`
11051114
"""
@@ -1109,7 +1118,7 @@ def import_bundle_from_archive(self, archive_path):
11091118

11101119
def import_bundle_from_stream(self, fp):
11111120
"""
1112-
Imports a bundle from a file stream
1121+
Imports a bundle from a file stream, on the Automation node.
11131122
11141123
:param file-like fp: file handler. Usage example:
11151124
@@ -1140,6 +1149,11 @@ def activate_bundle(self, bundle_id, scenarios_to_enable=None):
11401149
"/projects/%s/bundles/imported/%s/actions/activate" % (self.project_key, bundle_id), body=options)
11411150

11421151
def preload_bundle(self, bundle_id):
1152+
"""
1153+
Preloads a bundle that has been imported on the Automation node
1154+
1155+
:param str bundle_id: the bundle_id for an existing imported bundle
1156+
"""
11431157
return self.client._perform_json("POST",
11441158
"/projects/%s/bundles/imported/%s/actions/preload" % (self.project_key, bundle_id))
11451159

@@ -1220,8 +1234,9 @@ def list_recipes(self, as_type="listitems"):
12201234
def get_recipe(self, recipe_name):
12211235
"""
12221236
Gets a :class:`dataikuapi.dss.recipe.DSSRecipe` handle to interact with a recipe
1237+
12231238
:param str recipe_name: The name of the recipe
1224-
:rtype :class:`dataikuapi.dss.recipe.DSSRecipe`
1239+
:rtype: :class:`dataikuapi.dss.recipe.DSSRecipe`
12251240
"""
12261241
return DSSRecipe(self.client, self.project_key, recipe_name)
12271242

@@ -1315,6 +1330,9 @@ def new_recipe(self, type, name=None):
13151330
########################################################
13161331

13171332
def get_flow(self):
1333+
"""
1334+
:rtype: A :class:`dataikuapi.dss.flow.DSSProjectFlow`
1335+
"""
13181336
return DSSProjectFlow(self.client, self)
13191337

13201338
########################################################

0 commit comments

Comments
 (0)