Project
Project Objects
class Project()
Access project related data and manipulate the project.
project_hash
@property
def project_hash() -> str
Get the project hash (i.e. the Project ID).
title
@property
def title() -> str
Get the title of the project.
description
@property
def description() -> str
Get the description of the project.
created_at
@property
def created_at() -> datetime.datetime
Get the time the project was created at.
last_edited_at
@property
def last_edited_at() -> datetime.datetime
Get the time the project was last edited at.
ontology
@property
@deprecated(version="0.1.95", alternative=".ontology_structure")
def ontology() -> Dict[str, Any]
Get the ontology of the project.
DEPRECATED: Prefer using the :meth:encord.Project.ontology_structure
method.
This method returns the same structure as :meth:encord.Project.ontology_structure
, just in
raw python dictionary format.
ontology_hash
@property
def ontology_hash() -> str
Get the ontology hash of the project’s ontology.
ontology_structure
@property
def ontology_structure() -> OntologyStructure
Get the ontology structure of the project’s ontology.
datasets
@property
@deprecated(version="0.1.117", alternative=".list_datasets")
def datasets() -> List[Dict[str, Any]]
DEPRECATED: Prefer using the :meth:encord.project.list_datasets
class to work with the data.
Get the info about datasets associated with this project.
project_type
@property
def project_type() -> ProjectType
Get the project type.
label_rows
@property
@deprecated(version="0.1.104", alternative=".list_label_rows_v2")
def label_rows() -> dict
Get the label rows.
DEPRECATED: Prefer using :meth:list_label_rows_v2()
method and :meth:LabelRowV2
class to work with the data.
.. code::
from encord.orm.label_row import LabelRowMetadata
project = user_client.get_project(“[project_hash]“)
label_rows = LabelRowMetadata.from_list(project.label_rows)
refetch_data
def refetch_data() -> None
The Project class will only fetch its properties once. Use this function if you suspect the state of those properties to be dirty.
refetch_ontology
def refetch_ontology() -> None
Update the ontology for the project to reflect changes on the backend.
get_project
def get_project() -> OrmProject
This function is exposed for convenience. You are encouraged to use the property accessors instead.
workflow
@property
def workflow() -> Workflow
Get the workflow of the project.
Available only for workflow projects.
list_label_rows_v2
def list_label_rows_v2(
data_hashes: Optional[Union[List[str], List[UUID]]] = None,
label_hashes: Optional[Union[List[str], List[UUID]]] = None,
edited_before: Optional[Union[str, datetime.datetime]] = None,
edited_after: Optional[Union[str, datetime.datetime]] = None,
label_statuses: Optional[List[AnnotationTaskStatus]] = None,
shadow_data_state: Optional[ShadowDataState] = None,
data_title_eq: Optional[str] = None,
data_title_like: Optional[str] = None,
workflow_graph_node_title_eq: Optional[str] = None,
workflow_graph_node_title_like: Optional[str] = None,
include_workflow_graph_node: bool = True,
include_client_metadata: bool = False,
include_images_data: bool = False,
include_all_label_branches: bool = False) -> List[LabelRowV2]
List label rows with various filtering options.
Arguments:
data_hashes
- List of data hashes to filter by.label_hashes
- List of label hashes to filter by.edited_before
- Optionally filter to only rows last edited before the specified time.edited_after
- Optionally filter to only rows last edited after the specified time.label_statuses
- Optionally filter to only those label rows that have one of the specified :class:~encord.orm.label_row.AnnotationTaskStatus
es.shadow_data_state
- Optionally filter by data type in Benchmark QA projects. See :class:~encord.orm.label_row.ShadowDataState
.data_title_eq
- Optionally filter by exact title match.data_title_like
- Optionally filter by fuzzy title match; SQL syntax.workflow_graph_node_title_eq
- Optionally filter by exact match with workflow node title.workflow_graph_node_title_like
- Optionally filter by fuzzy match with workflow node title; SQL syntax.include_workflow_graph_node
- Include workflow graph node metadata in all the results. True by default.include_client_metadata
- Optionally include client metadata into the result of this query.include_images_data
- Optionally include image group metadata into the result of this query.include_all_label_branches
- Optionally include all label branches. They will be included as separate label row objects.
Returns:
A list of :class:~encord.objects.LabelRowV2
instances for all the matching label rows.
add_users
def add_users(user_emails: List[str],
user_role: ProjectUserRole) -> List[ProjectUser]
Add users to the project.
Arguments:
user_emails
- List of user emails to be added.user_role
- The user role to assign to all users.
Returns:
List[ProjectUser]
- A list of ProjectUser objects representing the added users.
Raises:
AuthorisationError
- If the project API key is invalid.ResourceNotFoundError
- If no project exists by the specified project EntityId.UnknownError
- If an error occurs while adding the users to the project.
list_groups
def list_groups() -> Iterable[ProjectGroup]
List all groups that have access to a particular project.
Returns:
Iterable[ProjectGroup]
- An iterable of ProjectGroup objects.
add_group
def add_group(group_hash: Union[List[UUID], UUID], user_role: ProjectUserRole)
Add a group to the project.
Arguments:
group_hash
- List of group hashes or a single group hash to be added.user_role
- User role that the group will be given.
Returns:
None
remove_group
def remove_group(group_hash: Union[List[UUID], UUID])
Remove a group from the project.
Arguments:
group_hash
- List of group hashes or a single group hash to be removed.
Returns:
None
copy_project
def copy_project(copy_datasets: Union[bool, CopyDatasetOptions] = False,
copy_collaborators=False,
copy_models=False,
*,
copy_labels: Optional[CopyLabelsOptions] = None,
new_title: Optional[str] = None,
new_description: Optional[str] = None) -> str
Copy the current project into a new one with copied contents including settings, datasets, and users. Labels and models are optional.
Arguments:
copy_datasets
- If True, the datasets of the existing project are copied over, and new tasks are created from those datasets.copy_collaborators
- If True, all users of the existing project are copied over with their current roles. If label and/or annotator reviewer mapping is set, this will also be copied over.copy_models
- If True, all models with their training information will be copied into the new project.copy_labels
- Options for copying labels, defined inCopyLabelsOptions
.new_title
- When provided, will be used as the title for the new project.new_description
- When provided, will be used as the description for the new project.
Returns:
str
- The EntityId of the newly created project.
Raises:
AuthorisationError
- If the project API key is invalid.ResourceNotFoundError
- If no project exists by the specified project EntityId.UnknownError
- If an error occurs while copying the project.
submit_label_row_for_review
def submit_label_row_for_review(uid: str)
Submit a label row for review.
Note: This method is not supported for workflow-based projects. See the documentation about the workflows.
Arguments:
uid
- A label_hash (uid) string.
Returns:
bool
- True if the submission was successful, False otherwise.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while submitting for review.OperationNotAllowed
- If the write operation is not allowed by the API key.
add_datasets
def add_datasets(dataset_hashes: List[str]) -> bool
Add datasets to the project.
Arguments:
dataset_hashes
- List of dataset hashes of the datasets to be added.
Returns:
bool
- True if the datasets were successfully added, False otherwise.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If one or more datasets don’t exist by the specified dataset_hashes.UnknownError
- If an error occurs while adding the datasets to the project.OperationNotAllowed
- If the write operation is not allowed by the API key.
remove_datasets
def remove_datasets(dataset_hashes: List[str]) -> bool
Remove datasets from the project.
Arguments:
dataset_hashes
- List of dataset hashes of the datasets to be removed.
Returns:
bool
- True if the datasets were successfully removed, False otherwise.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If no dataset exists by the specified dataset_hash (uid).UnknownError
- If an error occurs while removing the datasets from the project.OperationNotAllowed
- If the operation is not allowed by the API key.
get_project_ontology
@deprecated(version="0.1.95", alternative=".ontology_structure")
def get_project_ontology() -> LegacyOntology
DEPRECATED: Prefer using the ontology_structure
property accessor instead.
Returns:
LegacyOntology
- The project’s ontology.
add_object
@deprecated("0.1.102", alternative="encord.ontology.Ontology class")
def add_object(name: str, shape: ObjectShape) -> bool
DEPRECATED: Prefer using :class:Ontology [encord.ontology.Ontology]
to manipulate ontology.
Add an object to an ontology.
ATTENTION: This legacy method will affect all the projects sharing the same ontology.
Arguments:
name
- The name of the object.shape
- The shape of the object. (BOUNDING_BOX, POLYGON, POLYLINE, or KEY_POINT)
Returns:
bool
- True if the object was added successfully, False otherwise.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while adding the object to the project ontology.OperationNotAllowed
- If the operation is not allowed by the API key.ValueError
- If invalid arguments are supplied in the function call.
add_classification
@deprecated("0.1.102", alternative="encord.ontology.Ontology class")
def add_classification(name: str,
classification_type: ClassificationType,
required: bool,
options: Optional[Iterable[str]] = None)
DEPRECATED: Prefer using :class:Ontology encord.ontology.Ontology
to manipulate ontology.
Add a classification to an ontology.
ATTENTION: This legacy method will affect all the projects sharing the same ontology.
Arguments:
name
- The name of the classification.classification_type
- The classification type (RADIO, TEXT, or CHECKLIST).required
- Whether this classification is required by the annotator.options
- The list of options for the classification (to be set to None for texts).
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while adding the classification to the project ontology.OperationNotAllowed
- If the operation is not allowed by the API key.ValueError
- If invalid arguments are supplied in the function call.
list_models
def list_models() -> List[ModelConfiguration]
List all models that are associated with the project. Use the
:meth:encord.project.Project.get_training_metadata
to get more metadata about each training instance.
Example:
.. code::
from encord.utilities.project_utilities import get_all_model_iteration_uids
project = client_instance.get_project([project_hash])
model_configurations = project.list_models() all_model_iteration_uids = get_all_model_iteration_uids(model_configurations) training_metadata = project.get_training_metadata( all_model_iteration_uids, get_model_training_labels=True, )
Returns:
List[ModelConfiguration]
- A list of ModelConfiguration objects representing the models associated with the project.
get_training_metadata
def get_training_metadata(
model_iteration_uids: Iterable[str],
get_created_at: bool = False,
get_training_final_loss: bool = False,
get_model_training_labels: bool = False) -> List[TrainingMetadata]
Given a list of model_iteration_uids, get metadata around each model_iteration.
Arguments:
model_iteration_uids
- The model iteration uids.get_created_at
- Whether thecreated_at
field should be retrieved.get_training_final_loss
- Whether thetraining_final_loss
field should be retrieved.get_model_training_labels
- Whether themodel_training_labels
field should be retrieved.
Returns:
List[TrainingMetadata]
- A list of TrainingMetadata objects containing the requested metadata.
create_model_row
def create_model_row(title: str, description: str, features: List[str],
model: Union[AutomationModels, str]) -> str
Create a model row.
Arguments:
title
- Model title.description
- Model description.features
- List of feature_node_hashes which are IDs of ontology objects or classifications to be included in the model.model
- The model type to be used. For backwards compatibility purposes, strings corresponding to the values of the :class:.AutomationModels
Enum are also allowed.
Returns:
str
- The uid of the added model row.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ModelFeaturesInconsistentError
- If a feature type is different from what is supported by the model (e.g. if creating a classification model using a bounding box).
model_delete
def model_delete(uid: str) -> bool
Delete a model created on the platform.
Arguments:
uid
- A model_hash (uid) string.
Returns:
bool
- True if the model was successfully deleted, False otherwise.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If no model exists by the specified model_hash (uid).UnknownError
- If an error occurs during deletion.
model_inference
def model_inference(uid: str,
file_paths: Optional[List[str]] = None,
base64_strings: Optional[List[bytes]] = None,
conf_thresh: float = 0.6,
iou_thresh: float = 0.3,
device: Device = Device.CUDA,
detection_frame_range: Optional[List[int]] = None,
allocation_enabled: bool = False,
data_hashes: Optional[List[str]] = None,
rdp_thresh: float = 0.005)
Run inference with a model trained on the platform.
The image(s)/video(s) can be provided either as local file paths, base64 strings, or as data hashes if the data is already uploaded on the Encord platform.
Arguments:
uid
- A model_iteration_hash (uid) string.file_paths
- List of local file paths to image(s) or video(s) - if running inference on files.base64_strings
- List of base64 strings of image(s) or video(s) - if running inference on base64 strings.conf_thresh
- Confidence threshold (default 0.6).iou_thresh
- Intersection over union threshold (default 0.3).device
- Device (CPU or CUDA, default is CUDA).detection_frame_range
- Detection frame range (for videos).allocation_enabled
- Object UID allocation (tracking) enabled (disabled by default).data_hashes
- List of hashes of the videos/image_groups you’d like to run inference on.rdp_thresh
- Parameter specifying the polygon coarseness to be used while running inference. The higher the value, the fewer points in the segmented image.
Returns:
dict
- A dictionary of inference results.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If no model exists by the specified model_iteration_hash (uid).UnknownError
- If an error occurs while running inference.FileTypeNotSupportedError
- If the file type is not supported for inference (has to be an image or video).FileSizeNotSupportedError
- If the file size is too big to be supported.DetectionRangeInvalidError
- If a detection range is invalid for video inference.
model_train_start
def model_train_start(model_hash: Union[str, UUID],
label_rows: List[Union[str, UUID]],
epochs: int,
weights: ModelTrainingWeights,
batch_size: int = 24,
device: Device = Device.CUDA) -> UUID
This method initializes model training in Encord’s backend. Once the training_hash (UUID) is returned, you can exit the terminal while the job continues uninterrupted.
You can check job status at any point using
the :meth:model_train_get_result
method.
This can be done in a separate Python session to the one
where the job was initialized.
Arguments:
model_hash
- A unique identifier (UUID) for the model. The format is a string.label_rows
- List of label row uids (hashes) for training.epochs
- Number of passes through the training dataset.weights
- Model weights.batch_size
- Number of training examples utilized in one iteration.device
- Device (CPU or CUDA, default is CUDA).
Returns:
UUID
- A model iteration training_hash.
Raises:
AuthorisationError
- If access to the specified resource is restricted.ModelWeightsInconsistentError
- If the passed model weights are incompatible with the selected model.ResourceNotFoundError
- If no model exists by the specified model_hash (uid).
model_train_get_result
def model_train_get_result(model_hash: Union[str, UUID],
training_hash: Union[str, UUID],
timeout_seconds: int = 7 * 24 * 60 * 60) -> dict
Fetch model training status, perform long polling process for timeout_seconds
.
Arguments:
model_hash:
A unique identifier (UUID) for the model.
training_hash:
A unique identifier(UUID) of the model iteration. This ID enables the user to track the job progress using the SDK or web app.
timeout_seconds:
Number of seconds the method waits while waiting for a response.
If timeout_seconds == 0
, only a single checking request is performed.
Responses are immediately returned.
Returns:
Response containing details about job status, errors, and progress.
object_interpolation
def object_interpolation(key_frames, objects_to_interpolate)
Run object interpolation algorithm on project labels (requires an editor ontology and feature uids).
Interpolation is supported for bounding box, polygon, and keypoint.
Arguments:
-
key_frames
- Labels for frames to be interpolated. Key frames are consumed in the form::{ "[frame_number]": { "objects": [ { "objectHash": "[object_hash]", "featureHash": "[feature_hash]", "polygon": { "0": { "x": x1, "y": y1, }, "1": { "x": x2, "y": y2, }, # ..., } }, # ... ] }, # ..., }
-
objects_to_interpolate
- List of object uids (hashes) of objects to interpolate.
Returns:
dict
- Full set of filled frames including interpolated objects.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while running interpolation.
fitted_bounding_boxes
def fitted_bounding_boxes(frames: dict, video: dict)
Fit bounding boxes to the given frames of a video.
Arguments:
-
frames
- Labels for frames to be fitted. Frames are consumed in the form::{ "[frame_number]": { "objects": [ { "objectHash": "[object_hash]", "featureHash": "[feature_hash]", "polygon": { "0": { "x": x1, "y": y1, }, "1": { "x": x2, "y": y2, }, # ..., } }, # ... ] }, # ..., }
-
video
- Metadata of the video for which bounding box fitting needs to be run::{ "width": w, "height": h, }
Returns:
dict
- Full set of filled frames including fitted objects.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while running interpolation.
get_data
def get_data(
data_hash: str,
get_signed_url: bool = False
) -> Tuple[Optional[Video], Optional[List[Image]]]
Retrieve information about a video or image group.
Arguments:
data_hash
- The uid of the data object.get_signed_url
- Optionally return signed URLs for timed public access to that resource (default False).
Returns:
A tuple consisting of the video (if it exists) and a list of individual images (if they exist).
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while retrieving the object.
get_label_logs
def get_label_logs(user_hash: Optional[str] = None,
data_hash: Optional[str] = None,
from_unix_seconds: Optional[int] = None,
to_unix_seconds: Optional[int] = None,
after: Optional[datetime.datetime] = None,
before: Optional[datetime.datetime] = None,
user_email: Optional[str] = None) -> List[LabelLog]
Get label logs, which represent the actions taken in the UI to create labels.
All arguments can be left as None
if no filtering should be applied.
Arguments:
user_hash
- Filter the label logs by the user.data_hash
- Filter the label logs by the data_hash.from_unix_seconds
- Filter the label logs to only include labels after this timestamp. Deprecated: use parameter after instead.to_unix_seconds
- Filter the label logs to only include labels before this timestamp. Deprecated: use parameter before instead.after
- Filter the label logs to only include labels after the specified time.before
- Filter the label logs to only include labels before the specified time.user_email
- Filter by the annotator email.
Returns:
List of label logs.
get_cloud_integrations
def get_cloud_integrations() -> List[CloudIntegration]
Get the list of cloud integrations.
Returns:
List of CloudIntegration objects.
list_label_rows
@deprecated(version="0.1.104", alternative=".list_label_rows_v2")
def list_label_rows(
edited_before: Optional[Union[str, datetime.datetime]] = None,
edited_after: Optional[Union[str, datetime.datetime]] = None,
label_statuses: Optional[List[AnnotationTaskStatus]] = None,
shadow_data_state: Optional[ShadowDataState] = None,
*,
include_uninitialised_labels=False,
label_hashes: Optional[List[str]] = None,
data_hashes: Optional[List[str]] = None) -> List[LabelRowMetadata]
DEPRECATED - use list_label_rows_v2
to manage label rows instead.
Arguments:
edited_before
- Optionally filter to only rows last edited before the specified time.edited_after
- Optionally filter to only rows last edited after the specified time.label_statuses
- Optionally filter to only those label rows that have one of the specified AnnotationTaskStatus.shadow_data_state
- Optionally filter by data type in Benchmark QA projects. See ShadowDataState.include_uninitialised_labels
- Whether to return only label rows that are “created” and have a label_hash (default). If set toTrue
, this will return all label rows, including those that do not have a label_hash.data_hashes
- List of data hashes to filter by.label_hashes
- List of label hashes to filter by.
Returns:
A list of LabelRowMetadata instances for all the matching label rows.
Raises:
UnknownError
- If an error occurs while retrieving the data.
set_label_status
def set_label_status(label_hash: str, label_status: LabelStatus) -> bool
Set the label status for a label row to a desired value.
Arguments:
label_hash
- Unique identifier of the label row whose status is to be updated.label_status
- The new status that needs to be set.
Returns:
True if the label status was successfully updated, False otherwise.
Raises:
AuthorisationError
- If the label_hash provided is invalid or not a member of the project.UnknownError
- If an error occurs while updating the status.
get_label_row
@deprecated(version="0.1.123", alternative=".list_label_rows_v2")
def get_label_row(uid: str,
get_signed_url: bool = True,
*,
include_object_feature_hashes: Optional[Set[str]] = None,
include_classification_feature_hashes: Optional[
Set[str]] = None,
include_reviews: bool = False) -> LabelRow
DEPRECATED: Prefer using the list_label_rows_v2 function to interact with label rows.
Retrieve label row. If you need to retrieve multiple label rows, prefer using get_label_rows instead.
Arguments:
uid
- A label_hash (uid) string.get_signed_url
- Whether to generate signed urls to the data asset. Generating these should be disabled if the signed urls are not used to speed up the request.include_object_feature_hashes
- If None all the objects will be included. Otherwise, only objects labels will be included of which the feature_hash has been added.include_classification_feature_hashes
- If None all the classifications will be included. Otherwise, only classification labels will be included of which the feature_hash has been added.include_reviews
- Whether to request read only information about the reviews of the label row.
Returns:
LabelRow
- A label row instance.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If no label exists by the specified label_hash (uid).UnknownError
- If an error occurs while retrieving the label.OperationNotAllowed
- If the read operation is not allowed by the API key.
get_label_rows
@deprecated(version="0.1.123", alternative=".list_label_rows_v2")
def get_label_rows(uids: List[str],
get_signed_url: bool = True,
*,
include_object_feature_hashes: Optional[Set[str]] = None,
include_classification_feature_hashes: Optional[
Set[str]] = None,
include_reviews: bool = False) -> List[LabelRow]
DEPRECATED: Prefer using the list_label_rows_v2 function to interact with label rows.
Retrieve a list of label rows. Duplicates will be dropped. The result will come back in a random order.
Arguments:
uids
- A list of label_hash (uid).get_signed_url
- Whether to generate signed urls to the data asset. Generating these should be disabled if the signed urls are not used to speed up the request.include_object_feature_hashes
- If None all the objects will be included. Otherwise, only objects labels will be included of which the feature_hash has been added.include_classification_feature_hashes
- If None all the classifications will be included. Otherwise, only classification labels will be included of which the feature_hash has been added.include_reviews
- Whether to request read only information about the reviews of the label row.
Returns:
List of LabelRow instances.
Raises:
MultiLabelLimitError
- If too many labels were requested. Check the error’s maximum_labels_allowed field to read the most up to date error limit.AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If no label exists by the specified label_hash (uid).UnknownError
- If an error occurs while retrieving the label.OperationNotAllowed
- If the read operation is not allowed by the API key.
save_label_row
@deprecated(version="0.1.123", alternative=".list_label_rows_v2")
def save_label_row(uid, label, validate_before_saving: bool = False)
DEPRECATED: Prefer using the list_label_rows_v2 function to interact with label rows.
Save an existing label row.
If you have a series of frame labels and have not updated answer dictionaries, call the construct_answer_dictionaries utility function to do so prior to saving labels.
Arguments:
uid
- A label_hash (uid) string.label
- A label row instance.validate_before_saving
- Enable stricter server-side integrity checks. Boolean,False
by default.
Returns:
bool
- True if the label row is successfully saved, False otherwise.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.ResourceNotFoundError
- If no label exists by the specified label_hash (uid).UnknownError
- If an error occurs while saving the label.OperationNotAllowed
- If the write operation is not allowed by the API key.AnswerDictionaryError
- If an object or classification instance is missing in answer dictionaries.CorruptedLabelError
- If a blurb is corrupted (e.g., if the frame labels have more frames than the video).
create_label_row
@deprecated(version="0.1.123", alternative=".list_label_rows_v2")
def create_label_row(uid: str)
DEPRECATED: Prefer using the list_label_rows_v2 function to interact with label rows.
Create a label row (for data in a project not previously labeled).
Arguments:
uid
- The data_hash (uid) of the data unit being labeled. Available in client.get_project().get(‘label_rows’) where label_status is NOT_LABELLED.
Returns:
LabelRow
- A label row instance.
Raises:
AuthenticationError
- If the project API key is invalid.AuthorisationError
- If access to the specified resource is restricted.UnknownError
- If an error occurs while saving the label.OperationNotAllowed
- If the write operation is not allowed by the API key.AnswerDictionaryError
- If an object or classification instance is missing in answer dictionaries.CorruptedLabelError
- If a blurb is corrupted (e.g., if the frame labels have more frames than the video).ResourceExistsError
- If a label row already exists for this project data. Avoids overriding existing work.
create_bundle
def create_bundle(bundle_size: Optional[int] = None) -> Bundle
Initializes a bundle to reduce the number of network calls performed by the Encord SDK.
See the :class:encord.http.bundle.Bundle
documentation for more details.
Arguments:
bundle_size
- maximum number of items bundled. If more actions provided to the bundle, they will be automatically split into separate api calls.
Returns:
Bundle
- An instance of the Bundle class.
list_collaborator_timers
def list_collaborator_timers(
after: datetime.datetime,
before: Optional[datetime.datetime] = None,
group_by_data_unit: bool = True) -> Iterable[CollaboratorTimer]
Provides information about time spent by each collaborator who has worked on the project within a specified range of dates.
Arguments:
after
- The beginning of the period of interest.before
- The end of the period of interest.group_by_data_unit
- If True, time spent by a collaborator for each data unit is provided separately. If False, all time spent in the scope of the project is aggregated together.
Yields:
CollaboratorTimer
- Information about the time spent by each collaborator.
list_datasets
def list_datasets() -> Iterable[ProjectDataset]
List all datasets associated with the project.
Returns:
Iterable[ProjectDataset]
- An iterable of ProjectDataset instances.
import_coco_labels
def import_coco_labels(
labels_dict: Dict[str,
Any], category_id_to_feature_hash: Dict[CategoryID,
str],
image_id_to_frame_index: Dict[ImageID, FrameIndex]) -> None
Import labels from a COCO format into your Encord project
Arguments:
labels_dict
Dict[str, Any] - Raw label dictionary conforming to Encord formatcategory_id_to_feature_hash
Dict[CategoryID, str] - Dictionary mapping category_id as used in the COCO data to the feature hash for the corresponding element in this Ontologyimage_id_to_frame_index
Dict[ImageID, FrameIndex] - Dictionary mapping int to FrameIndex(data_hash, frame_offset) which is used to identify the corresponding frame in the Encord setting
Was this page helpful?