@@ -26,7 +26,7 @@ class DSSProject(object):
2626 """
2727 A handle to interact with a project on the DSS instance.
2828
29- Do not create this class directly, instead use :meth:`dataikuapi.DSSClient.get_project``
29+ Do not create this class directly, instead use :meth:`dataikuapi.DSSClient.get_project`
3030 """
3131 def __init__ (self , client , project_key ):
3232 self .client = client
@@ -69,7 +69,7 @@ def rec(pf):
6969 def move_to_folder (self , folder ):
7070 """
7171 Moves this project to a project folder
72- :param folder :class:`dataikuapi.dss.projectfolder.DSSProjectFolder
72+ :param folder :class:`dataikuapi.dss.projectfolder.DSSProjectFolder`
7373 """
7474 current_folder = self .get_project_folder ()
7575 current_folder .move_project_to (self .project_key , folder )
@@ -823,14 +823,13 @@ def start_job(self, definition):
823823 """
824824 Create a new job, and return a handle to interact with it
825825
826- Args:
827- definition: the definition for the job to create. The definition must contain the type of job (RECURSIVE_BUILD,
828- NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD) and a list of outputs to build.
829- Optionally, a refreshHiveMetastore field can specify whether to re-synchronize the Hive metastore for recomputed
830- HDFS datasets.
826+ :param dict definition: The definition should contain:
827+
828+ * the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD)
829+ * a list of outputs to build from the available types: (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT)
830+ * (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed HDFS datasets.
831831
832- Returns:
833- A :class:`dataikuapi.dss.job.DSSJob` job handle
832+ :returns: A :class:`dataikuapi.dss.job.DSSJob` job handle
834833 """
835834 job_def = self .client ._perform_json ("POST" , "/projects/%s/jobs/" % self .project_key , body = definition )
836835 return DSSJob (self .client , self .project_key , job_def ['id' ])
@@ -839,11 +838,11 @@ def start_job_and_wait(self, definition, no_fail=False):
839838 """
840839 Starts a new job and waits for it to complete.
841840
842- Args :
843- definition: the definition for the job to create. The definition must contain the type of job (RECURSIVE_BUILD,
844- NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD) and a list of outputs to build.
845- Optionally, a refreshHiveMetastore field can specify whether to re-synchronize the Hive metastore for recomputed
846- HDFS datasets.
841+ :param dict definition: The definition should contain :
842+
843+ * the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD)
844+ * a list of outputs to build from the available types: (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT)
845+ * (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed HDFS datasets.
847846 """
848847 job_def = self .client ._perform_json ("POST" , "/projects/%s/jobs/" % self .project_key , body = definition )
849848 job = DSSJob (self .client , self .project_key , job_def ['id' ])
@@ -883,8 +882,8 @@ def list_jupyter_notebooks(self, active=False, as_type="object"):
883882 :param bool as_type: How to return the list. Supported values are "listitems" and "objects".
884883 :param bool active: if True, only return currently running jupyter notebooks.
885884
886- :returns: The list of the notebooks. If "as_type" is "listitems", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`,
887- if "as_type" is "objects", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebook`
885+ :returns: The list of the notebooks. If "as_type" is "listitems", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`, if "as_type" is "objects", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebook`
886+
888887 :rtype: list of :class:`dataikuapi.dss.notebook.DSSJupyterNotebook` or list of :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`
889888 """
890889 notebook_items = self .client ._perform_json ("GET" , "/projects/%s/jupyter-notebooks/" % self .project_key , params = {"active" : active })
@@ -968,7 +967,7 @@ def set_variables(self, obj):
968967 WARNING: if executed from a python recipe, the changes made by `set_variables` will not be "seen" in that recipe.
969968 Use the internal API dataiku.get_custom_variables() instead if this behavior is needed
970969
971- @ param dict obj: must be a modified version of the object returned by get_variables
970+ : param dict obj: must be a modified version of the object returned by get_variables
972971 """
973972 if not "standard" in obj :
974973 raise ValueError ("Missing 'standard' key in argument" )
@@ -1035,17 +1034,39 @@ def get_api_service(self, service_id):
10351034 ########################################################
10361035
10371036 def list_exported_bundles (self ):
1037+ """
1038+ :returns: A dictionary of all bundles for a project on the Design node.
1039+ """
10381040 return self .client ._perform_json ("GET" ,
10391041 "/projects/%s/bundles/exported" % self .project_key )
10401042
10411043 def export_bundle (self , bundle_id ):
1044+ """
1045+ Creates a new project bundle on the Design node
1046+
1047+ :param str bundle_id: bundle id tag
1048+ """
10421049 return self .client ._perform_json ("PUT" ,
10431050 "/projects/%s/bundles/exported/%s" % (self .project_key , bundle_id ))
10441051
10451052 def get_exported_bundle_archive_stream (self , bundle_id ):
10461053 """
10471054 Download a bundle archive that can be deployed in a DSS automation Node, as a binary stream.
1048- Warning: this stream will monopolize the DSSClient until closed.
1055+
1056+ .. warning::
1057+
1058+ The stream must be closed after use. Use a ``with`` statement to handle closing the stream at the end of the block by default. For example:
1059+
1060+ .. code-block:: python
1061+
1062+ with project.get_exported_bundle_archive_stream('v1') as fp:
1063+ # use fp
1064+
1065+ # or explicitly close the stream after use
1066+ fp = project.get_exported_bundle_archive_stream('v1')
1067+ # use fp, then close
1068+ fp.close()
1069+
10491070 """
10501071 return self .client ._perform_raw ("GET" ,
10511072 "/projects/%s/bundles/exported/%s/archive" % (self .project_key , bundle_id ))
@@ -1054,7 +1075,7 @@ def download_exported_bundle_archive_to_file(self, bundle_id, path):
10541075 """
10551076 Download a bundle archive that can be deployed in a DSS automation Node into the given output file.
10561077
1057- :param path if "-", will write to /dev/stdout
1078+ :param string path: if "-", will write to /dev/stdout
10581079 """
10591080 if path == "-" :
10601081 path = "/dev/stdout"
@@ -1090,15 +1111,34 @@ def publish_bundle(self, bundle_id, published_project_key=None):
10901111 ########################################################
10911112
10921113 def list_imported_bundles (self ):
1114+ """
1115+ :returns: a dict containing bundle imports for a project, on the Automation node.
1116+ """
10931117 return self .client ._perform_json ("GET" ,
10941118 "/projects/%s/bundles/imported" % self .project_key )
10951119
10961120 def import_bundle_from_archive (self , archive_path ):
1121+ """
1122+ Imports a bundle from a zip archive path on the Automation node.
1123+
1124+ :param str archive_path: A full path to a zip archive, for example `/home/dataiku/my-bundle-v1.zip`
1125+ """
10971126 return self .client ._perform_json ("POST" ,
10981127 "/projects/%s/bundles/imported/actions/importFromArchive" % (self .project_key ),
10991128 params = { "archivePath" : osp .abspath (archive_path ) })
11001129
11011130 def import_bundle_from_stream (self , fp ):
1131+ """
1132+ Imports a bundle from a file stream, on the Automation node.
1133+
1134+ :param file-like fp: file handler. Usage example:
1135+
1136+ .. code-block:: python
1137+
1138+ project = client.get_project('MY_PROJECT')
1139+ with open('/home/dataiku/my-bundle-v1.zip', 'rb') as f:
1140+ project.import_bundle_from_stream(f)
1141+ """
11021142 files = {'file' : fp }
11031143 return self .client ._perform_empty ("POST" ,
11041144 "/projects/%s/bundles/imported/actions/importFromStream" % (self .project_key ),
@@ -1120,6 +1160,11 @@ def activate_bundle(self, bundle_id, scenarios_to_enable=None):
11201160 "/projects/%s/bundles/imported/%s/actions/activate" % (self .project_key , bundle_id ), body = options )
11211161
11221162 def preload_bundle (self , bundle_id ):
1163+ """
1164+ Preloads a bundle that has been imported on the Automation node
1165+
1166+ :param str bundle_id: the bundle_id for an existing imported bundle
1167+ """
11231168 return self .client ._perform_json ("POST" ,
11241169 "/projects/%s/bundles/imported/%s/actions/preload" % (self .project_key , bundle_id ))
11251170
@@ -1200,8 +1245,9 @@ def list_recipes(self, as_type="listitems"):
12001245 def get_recipe (self , recipe_name ):
12011246 """
12021247 Gets a :class:`dataikuapi.dss.recipe.DSSRecipe` handle to interact with a recipe
1248+
12031249 :param str recipe_name: The name of the recipe
1204- :rtype :class:`dataikuapi.dss.recipe.DSSRecipe`
1250+ :rtype: :class:`dataikuapi.dss.recipe.DSSRecipe`
12051251 """
12061252 return DSSRecipe (self .client , self .project_key , recipe_name )
12071253
@@ -1295,6 +1341,9 @@ def new_recipe(self, type, name=None):
12951341 ########################################################
12961342
12971343 def get_flow (self ):
1344+ """
1345+ :rtype: A :class:`dataikuapi.dss.flow.DSSProjectFlow`
1346+ """
12981347 return DSSProjectFlow (self .client , self )
12991348
13001349 ########################################################
@@ -1681,6 +1730,11 @@ def with_refresh_metastore(self, refresh_metastore):
16811730 def with_output (self , name , object_type = None , object_project_key = None , partition = None ):
16821731 """
16831732 Adds an item to build in this job
1733+
1734+ :param name: name of the output object
1735+ :param object_type: type of object to build from: DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT
1736+ :param object_project_key: PROJECT_KEY for the project that contains the object to build
1737+ :param partition: specify partition to build
16841738 """
16851739 self .definition ['outputs' ].append ({'type' :object_type , 'id' :name , 'projectKey' :object_project_key , 'partition' :partition })
16861740 return self
0 commit comments