Edit docstrings (#672)

This commit is contained in:
pollfly 2022-05-11 10:45:40 +03:00 committed by GitHub
parent 1f8726e539
commit 3474d70afb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 21 additions and 20 deletions

View File

@ -904,7 +904,7 @@ class Task(IdObjectBase, AccessMixin, SetupUploadMixin):
- ``False`` - ClearML does not copy the Task's label enumeration from the input model.
:param name: Model section name to be stored on the Task (unrelated to the model object name itself)
Default: the the model weight filename is used (excluding file extension)
Default: the model weight filename is used (excluding file extension)
"""
if model_id is None and not model_name:
raise ValueError('Expected one of [model_id, model_name]')
@ -1183,7 +1183,7 @@ class Task(IdObjectBase, AccessMixin, SetupUploadMixin):
def delete_parameter(self, name):
# type: (str) -> bool
"""
Delete a parameter byt it's full name Section/name.
Delete a parameter by its full name Section/name.
:param name: Parameter name in full, i.e. Section/name. For example, 'Args/batch_size'
:return: True if the parameter was deleted successfully
@ -1317,7 +1317,7 @@ class Task(IdObjectBase, AccessMixin, SetupUploadMixin):
def _add_artifacts(self, artifacts_list):
# type: (Sequence[tasks.Artifact]) -> Optional[List[tasks.Artifact]]
"""
List of artifacts (tasks.Artifact) to add to the the task
List of artifacts (tasks.Artifact) to add to the task
If an artifact by the same name already exists it will overwrite the existing artifact.
:param list artifacts_list: list of artifacts (type tasks.Artifact)

View File

@ -269,7 +269,7 @@ class EventTrainsWriter(object):
:param str tag:
:param int num_split_parts:
:param str split_char: a character to split the tag on
:param str join_char: a character to join the the splits
:param str join_char: a character to join the splits
:param str default_title: variant to use in case no variant can be inferred automatically
:param str logdir_header: if 'series_last' then series=header: series, if 'series then series=series :header,
if 'title_last' then title=header title, if 'title' then title=title header

View File

@ -82,7 +82,7 @@ def cli():
add.add_argument('--id', type=str, required=False,
help='Previously created dataset id. Default: previously created/accessed dataset')
add.add_argument('--dataset-folder', type=str, default=None,
help='Dataset base folder top add the files to (default: Dataset root)')
help='Dataset base folder to add the files to (default: Dataset root)')
add.add_argument('--files', type=str, nargs='*',
help='Files / folders to add (support for wildcard selection). '
'Example: ~/data/*.jpg ~/data/jsons')
@ -104,7 +104,7 @@ def cli():
sync.add_argument('--id', type=str, required=False,
help='Previously created dataset id. Default: previously created/accessed dataset')
sync.add_argument('--dataset-folder', type=str, default=None,
help='Dataset base folder top add the files to (default: Dataset root)')
help='Dataset base folder to add the files to (default: Dataset root)')
sync.add_argument('--folder', type=str, required=True,
help='Local folder to sync (support for wildcard selection). '
'Example: ~/data/*.jpg')

View File

@ -1133,7 +1133,7 @@ class Logger(object):
def get_default_debug_sample_history(self):
# type: () -> int
"""
Return the the default max debug sample history when reporting media/debug samples.
Return the default max debug sample history when reporting media/debug samples.
If value was not set specifically, the functions returns the configuration file default value.
:return: default number of samples (files) to store on a unique set of title/series being reported

View File

@ -944,7 +944,7 @@ class InputModel(Model):
- Imported models (InputModel objects created using the :meth:`Logger.import_model` method).
- Models whose metadata is already in the ClearML platform, meaning the InputModel object is instantiated
from the ``InputModel`` class specifying the the model's ClearML Id as an argument.
from the ``InputModel`` class specifying the model's ClearML Id as an argument.
- Models whose origin is not ClearML that are used to create an InputModel object. For example,
models created using TensorFlow models.

View File

@ -94,7 +94,7 @@ class Task(_Task):
configuration, label enumeration, models, and other artifacts.
The term "main execution Task" refers to the Task context for current running experiment. Python experiment scripts
can create one, and only one, main execution Task. It is a traceable, and after a script runs and ClearML stores
can create one, and only one, main execution Task. It is traceable, and after a script runs and ClearML stores
the Task in the **ClearML Server** (backend), it is modifiable, reproducible, executable by a worker, and you
can duplicate it for further experimentation.
@ -303,7 +303,7 @@ class Task(_Task):
The values are:
- ``True`` - Continue the the last Task ID.
- ``True`` - Continue the last Task ID.
specified explicitly by reuse_last_task_id or implicitly with the same logic as reuse_last_task_id
- ``False`` - Overwrite the execution of previous Task (default).
- A string - You can also specify a Task ID (string) to be continued.
@ -393,7 +393,7 @@ class Task(_Task):
auto_connect_frameworks={'tensorboard': {'report_hparams': False}}
:param bool auto_resource_monitoring: Automatically create machine resource monitoring plots
These plots appear in in the **ClearML Web-App (UI)**, **RESULTS** tab, **SCALARS** sub-tab,
These plots appear in the **ClearML Web-App (UI)**, **RESULTS** tab, **SCALARS** sub-tab,
with a title of **:resource monitor:**.
The values are:
@ -926,7 +926,7 @@ class Task(_Task):
If ``task_ids`` specified, then ``project_name`` and ``task_name`` are ignored.
:param str project_name: The project name of the Tasks to get. To get the experiment
in all projects, use the default value of ``None``. (Optional)
Use a list of string for multiple optional project names.
Use a list of strings for multiple optional project names.
:param str task_name: The full name or partial name of the Tasks to match within the specified
``project_name`` (or all projects if ``project_name`` is ``None``).
This method supports regular expressions for name matching. (Optional)
@ -984,7 +984,7 @@ class Task(_Task):
:param str project_name: The project name of the Tasks to get. To get the experiment
in all projects, use the default value of ``None``. (Optional)
Use a list of string for multiple optional project names.
Use a list of strings for multiple optional project names.
:param str task_name: The full name or partial name of the Tasks to match within the specified
``project_name`` (or all projects if ``project_name`` is ``None``).
This method supports regular expressions for name matching. (Optional)
@ -1149,7 +1149,7 @@ class Task(_Task):
- If ``parent`` is not specified, then ``parent`` is set to ``source_task.parent``.
- If ``parent`` is not specified and ``source_task.parent`` is not available, then
``parent`` set to to ``source_task``.
``parent`` set to ``source_task``.
:param str project: The Id of the project in which to create the new Task.
If ``None``, the new task inherits the original Task's project. (Optional)
@ -1691,7 +1691,7 @@ class Task(_Task):
):
# type: (bool, bool, bool, Callable[[str, str], bool]) -> bool
"""
Delete the task as well as it's output models and artifacts.
Delete the task as well as its output models and artifacts.
Models and artifacts are deleted from their storage locations, each using its URI.
Note: in order to delete models and artifacts using their URI, make sure the proper storage credentials are
@ -1720,7 +1720,7 @@ class Task(_Task):
def register_artifact(self, name, artifact, metadata=None, uniqueness_columns=True):
# type: (str, pandas.DataFrame, Dict, Union[bool, Sequence[str]]) -> None
"""
Register (add) an artifact for the current Task. Registered artifacts are dynamically sychronized with the
Register (add) an artifact for the current Task. Registered artifacts are dynamically synchronized with the
**ClearML Server** (backend). If a registered artifact is updated, the update is stored in the
**ClearML Server** (backend). Registered artifacts are primarily used for Data Auditing.
@ -1730,7 +1730,7 @@ class Task(_Task):
.. note::
ClearML also supports uploaded artifacts which are one-time uploads of static artifacts that are not
dynamically sychronized with the **ClearML Server** (backend). These static artifacts include
dynamically synchronized with the **ClearML Server** (backend). These static artifacts include
additional object types. For more information, see :meth:`Task.upload_artifact`.
:param str name: The name of the artifact.
@ -2037,6 +2037,7 @@ class Task(_Task):
"""
Get user properties for this task.
Returns a dictionary mapping user property name to user property details dict.
:param value_only: If True, returned user property details will be a string representing the property value.
"""
if not Session.check_min_api_version("2.9"):
@ -2366,11 +2367,11 @@ class Task(_Task):
:param func: A function to execute remotely as a single Task.
On the remote executed Task the entry-point and the environment are copied from this
calling process, only this function call redirect the the execution flow to the called func,
calling process, only this function call redirect the execution flow to the called func,
alongside the passed arguments
:param func_name: A unique identifier of the function. Default the function name without the namespace.
For example Class.foo() becomes 'foo'
:param task_name: The newly create Task name. Default: the calling Task name + function name
:param task_name: The newly created Task name. Default: the calling Task name + function name
:param kwargs: name specific arguments for the target function.
These arguments will appear under the configuration, "Function" section
@ -2546,7 +2547,7 @@ class Task(_Task):
def import_offline_session(cls, session_folder_zip, previous_task_id=None, iteration_offset=0):
# type: (str, Optional[str], Optional[int]) -> (Optional[str])
"""
Upload an off line session (execution) of a Task.
Upload an offline session (execution) of a Task.
Full Task execution includes repository details, installed packages, artifacts, logs, metric and debug samples.
This function may also be used to continue a previously executed task with a task executed offline.