@@ -26,7 +26,7 @@ class DSSProject(object):
2626 """
2727 A handle to interact with a project on the DSS instance.
2828
29- Do not create this class directly, instead use :meth:`dataikuapi.DSSClient.get_project``
29+ Do not create this class directly, instead use :meth:`dataikuapi.DSSClient.get_project`
3030 """
3131 def __init__ (self , client , project_key ):
3232 self .client = client
@@ -205,7 +205,7 @@ def get_metadata(self):
205205 Get the metadata attached to this project. The metadata contains label, description
206206 checklists, tags and custom metadata of the project.
207207
208- For more information on available metadata, please see https://doc.dataiku.com/dss/api/6.0 /rest/
208+ For more information on available metadata, please see https://doc.dataiku.com/dss/api/latest /rest/
209209
210210 :returns: a dict object containing the project metadata.
211211 :rtype: dict
@@ -823,14 +823,14 @@ def start_job(self, definition):
823823 """
824824 Create a new job, and return a handle to interact with it
825825
826- Args:
827- definition: the definition for the job to create. The definition must contain the type of job (RECURSIVE_BUILD,
828- NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD) and a list of outputs to build.
829- Optionally, a refreshHiveMetastore field can specify whether to re-synchronize the Hive metastore for recomputed
826+ :param: dict definition: The definition should contain
827+
828+ * the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD)
829+ * a list of outputs to build (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT)
830+ * (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed
830831 HDFS datasets.
831832
832- Returns:
833- A :class:`dataikuapi.dss.job.DSSJob` job handle
833+ :returns: A :class:`dataikuapi.dss.job.DSSJob` job handle
834834 """
835835 job_def = self .client ._perform_json ("POST" , "/projects/%s/jobs/" % self .project_key , body = definition )
836836 return DSSJob (self .client , self .project_key , job_def ['id' ])
@@ -839,10 +839,11 @@ def start_job_and_wait(self, definition, no_fail=False):
839839 """
840840 Starts a new job and waits for it to complete.
841841
842- Args:
843- definition: the definition for the job to create. The definition must contain the type of job (RECURSIVE_BUILD,
844- NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD) and a list of outputs to build.
845- Optionally, a refreshHiveMetastore field can specify whether to re-synchronize the Hive metastore for recomputed
842+ :param: dict definition: The definition should contain:
843+
844+ * the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD)
845+ * a list of outputs to build (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT)
846+ * (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed
846847 HDFS datasets.
847848 """
848849 job_def = self .client ._perform_json ("POST" , "/projects/%s/jobs/" % self .project_key , body = definition )
@@ -1090,15 +1091,32 @@ def publish_bundle(self, bundle_id, published_project_key=None):
10901091 ########################################################
10911092
10921093 def list_imported_bundles (self ):
1094+ """
1095+ Returns a list of imported bundles for a project.
1096+ """
10931097 return self .client ._perform_json ("GET" ,
10941098 "/projects/%s/bundles/imported" % self .project_key )
10951099
10961100 def import_bundle_from_archive (self , archive_path ):
1101+ """
1102+ Imports a bundle from a path to a zip bundle archive.
1103+
1104+ :param archive_path: A full path to a zip archive, for example `/home/dataiku/DKU_HAIKU_STARTER_v1.zip`
1105+ """
10971106 return self .client ._perform_json ("POST" ,
10981107 "/projects/%s/bundles/imported/actions/importFromArchive" % (self .project_key ),
10991108 params = { "archivePath" : osp .abspath (archive_path ) })
11001109
11011110 def import_bundle_from_stream (self , fp ):
1111+ """
1112+ Imports a bundle from a file stream
1113+
1114+ :param fp: pointer to a file stream For example:
1115+
1116+ ```
1117+ with open()
1118+ ```
1119+ """
11021120 files = {'file' : fp }
11031121 return self .client ._perform_empty ("POST" ,
11041122 "/projects/%s/bundles/imported/actions/importFromStream" % (self .project_key ),
@@ -1120,6 +1138,9 @@ def activate_bundle(self, bundle_id, scenarios_to_enable=None):
11201138 "/projects/%s/bundles/imported/%s/actions/activate" % (self .project_key , bundle_id ), body = options )
11211139
11221140 def preload_bundle (self , bundle_id ):
1141+ """
1142+ Preloads a bundle
1143+ """
11231144 return self .client ._perform_json ("POST" ,
11241145 "/projects/%s/bundles/imported/%s/actions/preload" % (self .project_key , bundle_id ))
11251146
@@ -1681,6 +1702,11 @@ def with_refresh_metastore(self, refresh_metastore):
16811702 def with_output (self , name , object_type = None , object_project_key = None , partition = None ):
16821703 """
16831704 Adds an item to build in this job
1705+
1706+ :param name: name of the output object
1707+ :param object_type: type of object to build from: DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT
1708+ :param object_project_key: PROJECT_KEY for the project that contains the object to build
1709+ :param partition: specify partition to build
16841710 """
16851711 self .definition ['outputs' ].append ({'type' :object_type , 'id' :name , 'projectKey' :object_project_key , 'partition' :partition })
16861712 return self
0 commit comments