Skip to content

Commit bceb57b

Browse files
committed
style: update docstring to match mutable args changes
1 parent 6c23ac5 commit bceb57b

File tree

5 files changed

+37
-24
lines changed

5 files changed

+37
-24
lines changed

dataikuapi/dss/macro.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,9 @@ def run(self, params=None, admin_params=None, wait=True):
3737
"""
3838
Run the macro from the project
3939
40-
:param params: parameters to the macro run
41-
:param admin_params: admin parameters to the macro run (if the authentication of
42-
the api client does not cover admin rights, they are ignored)
40+
:param dict params: parameters to the macro run (defaults to `{}`)
41+
:param dict admin_params: admin parameters to the macro run (if the authentication of
42+
the api client does not cover admin rights, they are ignored, default to empty)
4343
:param wait: if True, the call blocks until the run is finished
4444
:returns: a run identifier to use to abort or retrieve results
4545
"""

dataikuapi/dss/ml.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1259,7 +1259,7 @@ def ensemble(self, model_ids=None, method=None):
12591259
"""
12601260
Create an ensemble model of a set of models
12611261
1262-
:param list model_ids: A list of model identifiers
1262+
:param list model_ids: A list of model identifiers (defaults to `[]`)
12631263
:param str method: the ensembling method. One of: AVERAGE, PROBA_AVERAGE, MEDIAN, VOTE, LINEAR_MODEL, LOGISTIC_MODEL
12641264
12651265
This method waits for the ensemble train to complete. If you want to train asynchronously, use :meth:`start_ensembling` and :meth:`wait_train_complete`
@@ -1302,7 +1302,7 @@ def start_ensembling(self, model_ids=None, method=None):
13021302
"""
13031303
Creates asynchronously a new ensemble models of a set of models.
13041304
1305-
:param list model_ids: A list of model identifiers
1305+
:param list model_ids: A list of model identifiers (defaults to `[]`)
13061306
:param str method: the ensembling method (AVERAGE, PROBA_AVERAGE, MEDIAN, VOTE, LINEAR_MODEL, LOGISTIC_MODEL)
13071307
13081308
This returns immediately, before train is complete. To wait for train to complete, use :meth:`wait_train_complete`

dataikuapi/dss/project.py

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def get_export_stream(self, options=None):
5454
Return a stream of the exported project
5555
You need to close the stream after download. Failure to do so will result in the DSSClient becoming unusable.
5656
57-
:param dict options: Dictionary of export options. The following options are available:
57+
:param dict options: Dictionary of export options (defaults to `{}`). The following options are available:
5858
5959
* exportUploads (boolean): Exports the data of Uploaded datasets - default False
6060
* exportManagedFS (boolean): Exports the data of managed Filesystem datasets - default False
@@ -80,6 +80,18 @@ def export_to_file(self, path, options=None):
8080
Export the project to a file
8181
8282
:param str path: the path of the file in which the exported project should be saved
83+
:param dict options: Dictionary of export options (defaults to `{}`). The following options are available:
84+
85+
* exportUploads (boolean): Exports the data of Uploaded datasets - default False
86+
* exportManagedFS (boolean): Exports the data of managed Filesystem datasets - default False
87+
* exportAnalysisModels (boolean): Exports the models trained in analysis - default False
88+
* exportSavedModels (boolean): Exports the models trained in saved models - default False
89+
* exportManagedFolders (boolean): Exports the data of managed folders - default False
90+
* exportAllInputDatasets (boolean): Exports the data of all input datasets - default False
91+
* exportAllDatasets (boolean): Exports the data of all datasets - default False
92+
* exportAllInputManagedFolders (boolean): Exports the data of all input managed folders - default False
93+
* exportGitRepositoy (boolean): Exports the Git repository history - default False
94+
* exportInsightsData (boolean): Exports the data of static insights - default False
8395
"""
8496
if options is None:
8597
options = {}
@@ -114,7 +126,7 @@ def duplicate(self, target_project_key,
114126
:param bool export_saved_models:
115127
:param bool export_git_repository:
116128
:param bool export_insights_data:
117-
:param dict remapping: dict of connections to be remapped for the new project
129+
:param dict remapping: dict of connections to be remapped for the new project (defaults to `{}`)
118130
:param target_project_folder: the project folder where to put the duplicated project
119131
:type target_project_folder: A :class:`dataikuapi.dss.projectfolder.DSSProjectFolder
120132
:returns: A dict containing the original and duplicated project's keys
@@ -230,9 +242,9 @@ def create_dataset(self, dataset_name, type,
230242
231243
:param string dataset_name: the name for the new dataset
232244
:param string type: the type of the dataset
233-
:param dict params: the parameters for the type, as a JSON object
245+
:param dict params: the parameters for the type, as a JSON object (defaults to `{}`)
234246
:param string formatType: an optional format to create the dataset with (only for file-oriented datasets)
235-
:param string formatParams: the parameters to the format, as a JSON object (only for file-oriented datasets)
247+
:param dict formatParams: the parameters to the format, as a JSON object (only for file-oriented datasets, default to empty)
236248
237249
Returns:
238250
A :class:`dataikuapi.dss.dataset.DSSDataset` dataset handle
@@ -692,8 +704,8 @@ def create_scenario(self, scenario_name, type, definition=None):
692704
:param str scenario_name: The name for the new scenario. This does not need to be unique
693705
(although this is strongly recommended)
694706
:param str type: The type of the scenario. MUst be one of 'step_based' or 'custom_python'
695-
:param object definition: the JSON definition of the scenario. Use ``get_definition(with_status=False)`` on an
696-
existing ``DSSScenario`` object in order to get a sample definition object
707+
:param dict definition: the JSON definition of the scenario. Use ``get_definition(with_status=False)`` on an
708+
existing ``DSSScenario`` object in order to get a sample definition object (defaults to `{}`)
697709
698710
:returns: a :class:`.scenario.DSSScenario` handle to interact with the newly-created scenario
699711
"""
@@ -804,7 +816,7 @@ def get_tags(self):
804816
def set_tags(self, tags=None):
805817
"""
806818
Set the tags of this project.
807-
@param obj: must be a modified version of the object returned by list_tags
819+
:param dict tags: must be a modified version of the object returned by list_tags (defaults to `{}`)
808820
"""
809821
if tags is None:
810822
tags = {}

dataikuapi/dss/scenario.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def run(self, params=None):
2424
"""
2525
Requests a run of the scenario, which will start after a few seconds.
2626
27-
:params dict params: additional parameters that will be passed to the scenario through trigger params
27+
:params dict params: additional parameters that will be passed to the scenario through trigger params (defaults to `{}`)
2828
"""
2929
if params is None:
3030
params = {}
@@ -53,11 +53,9 @@ def run_and_wait(self, params=None, no_fail=False):
5353
"""
5454
Requests a run of the scenario, which will start after a few seconds. Wait the end of the run to complete.
5555
56-
Args:
57-
params: additional parameters that will be passed to the scenario through trigger params
56+
:param dict params: additional parameters that will be passed to the scenario through trigger params (defaults to `{}`)
5857
59-
Returns:
60-
A :class:`dataikuapi.dss.admin.DSSScenarioRun` run handle
58+
:return: A :class:`dataikuapi.dss.admin.DSSScenarioRun` run handle
6159
"""
6260
if params is None:
6361
params = {}

dataikuapi/dssclient.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ def create_user(self, login, password, display_name='', source_type='LOCAL', gro
302302
:param str password: the password of the new user
303303
:param str display_name: the displayed name for the new user
304304
:param str source_type: the type of new user. Admissible values are 'LOCAL' or 'LDAP'
305-
:param list groups: the names of the groups the new user belongs to
305+
:param list groups: the names of the groups the new user belongs to (defaults to `[]`)
306306
:param str profile: The profile for the new user, can be one of READER, DATA_ANALYST or DATA_SCIENTIST
307307
308308
:return: A :class:`dataikuapi.dss.admin.DSSUser` user handle
@@ -398,11 +398,11 @@ def create_connection(self, name, type, params=None, usable_by='ALL', allowed_gr
398398
399399
:param name: the name of the new connection
400400
:param type: the type of the new connection
401-
:param params: the parameters of the new connection, as a JSON object
401+
:param dict params: the parameters of the new connection, as a JSON object (defaults to `{}`)
402402
:param usable_by: the type of access control for the connection. Either 'ALL' (=no access control)
403403
or 'ALLOWED' (=access restricted to users of a list of groups)
404-
:param allowed_groups: when using access control (that is, setting usable_by='ALLOWED'), the list
405-
of names of the groups whose users are allowed to use the new connection
404+
:param list allowed_groups: when using access control (that is, setting usable_by='ALLOWED'), the list
405+
of names of the groups whose users are allowed to use the new connection (defaults to `[]`)
406406
407407
:returns: A :class:`dataikuapi.dss.admin.DSSConnection` connection handle
408408
"""
@@ -671,8 +671,8 @@ def log_custom_audit(self, custom_type, custom_params=None):
671671
"""
672672
Log a custom entry to the audit trail
673673
674-
:param str custom_type value for customMsgType in audit trail item
675-
:param dict custom_params value for customMsgParams in audit trail item
674+
:param str custom_type: value for customMsgType in audit trail item
675+
:param dict custom_params: value for customMsgParams in audit trail item (defaults to `{}`)
676676
"""
677677
if custom_params is None:
678678
custom_params = {}
@@ -793,6 +793,9 @@ def get_apideployer(self):
793793
def catalog_index_connections(self, connection_names=None, all_connections=False, indexing_mode="FULL"):
794794
"""
795795
Triggers an indexing of multiple connections in the data catalog
796+
797+
:param list connection_names: list of connections to index, ignored if `all_connections=True` (defaults to `[]`)
798+
:param bool all_connections: index all connections, (defaults to `False`)
796799
"""
797800
if connection_names is None:
798801
connection_names = []
@@ -955,7 +958,7 @@ def execute(self, settings=None):
955958
"""
956959
Executes the import with provided settings.
957960
958-
:param dict settings: Dict of import settings. The following settings are available:
961+
:param dict settings: Dict of import settings (defaults to `{}`). The following settings are available:
959962
960963
* targetProjectKey (string): Key to import under. Defaults to the original project key
961964
* remapping (dict): Dictionary of connection and code env remapping settings.

0 commit comments

Comments
 (0)