Source code for supervisely.api.image_api

# coding: utf-8
"""Download/upload images from/to Supervisely."""

# docs
from __future__ import annotations

import io
import json
import re
import urllib.parse
from time import sleep
from typing import (
    Any,
    Callable,
    Dict,
    Generator,
    Iterator,
    List,
    NamedTuple,
    Optional,
    Tuple,
    Union,
)

import numpy as np
from requests_toolbelt import MultipartDecoder, MultipartEncoder
from tqdm import tqdm

from supervisely._utils import (
    abs_url,
    batched,
    compress_image_url,
    generate_free_name,
    get_bytes_hash,
    is_development,
)
from supervisely.annotation.annotation import Annotation
from supervisely.annotation.tag import Tag
from supervisely.annotation.tag_meta import TagMeta, TagValueType
from supervisely.api.entity_annotation.figure_api import FigureApi
from supervisely.api.entity_annotation.tag_api import TagApi
from supervisely.api.module_api import (
    ApiField,
    RemoveableBulkModuleApi,
    _get_single_item,
)
from supervisely.imaging import image as sly_image
from supervisely.io.fs import (
    ensure_base_path,
    get_file_ext,
    get_file_hash,
    get_file_name,
    get_file_name_with_ext,
    list_files,
    list_files_recursively,
)
from supervisely.project.project_type import (
    _MULTISPECTRAL_TAG_NAME,
    _MULTIVIEW_TAG_NAME,
)
from supervisely.sly_logger import logger


[docs]class ImageInfo(NamedTuple): """ Object with image parameters from Supervisely. :Example: .. code-block:: python ImageInfo( id=770915, name='IMG_3861.jpeg', link=None, hash='ZdpMD+ZMJx0R8BgsCzJcqM7qP4M8f1AEtoYc87xZmyQ=', mime='image/jpeg', ext='jpeg', size=148388, width=1067, height=800, labels_count=4, dataset_id=2532, created_at='2021-03-02T10:04:33.973Z', updated_at='2021-03-02T10:04:33.973Z', meta={}, path_original='/h5un6l2bnaz1vj8a9qgms4-public/images/original/7/h/Vo/...jpg', full_storage_url='http://app.supervise.ly/h5un6l2bnaz1vj8a9qgms4-public/images/original/7/h/Vo/...jpg'), tags=[] ) """ #: :class:`int`: Image ID in Supervisely. id: int #: :class:`str`: Image filename. name: str #: :class:`str`: Use link as ID for images that are expected to be stored at remote server. #: e.g. "http://your-server/image1.jpg". link: str #: :class:`str`: Image hash obtained by base64(sha256(file_content)). #: Use hash for files that are expected to be stored at Supervisely or your deployed agent. hash: str #: :class:`str`: Image MIME type. mime: str #: :class:`str`: Image file extension. ext: str #: :class:`int`: Image size (in bytes). size: int #: :class:`int`: Image width. width: int #: :class:`int`: Image height. height: int #: :class:`int`: Number of :class:`Labels<supervisely.annotation.label.Label>` in the Image. labels_count: int #: :class:`int`: :class:`Dataset<supervisely.project.project.Dataset>` ID in Supervisely. dataset_id: int #: :class:`str`: Image creation time. e.g. "2019-02-22T14:59:53.381Z". created_at: str #: :class:`str`: Time of last image update. e.g. "2019-02-22T14:59:53.381Z". updated_at: str #: :class:`dict`: Custom additional image info. meta: dict #: :class:`str`: Relative storage URL to image. e.g. #: "/h5un6l2bnaz1vj8a9qgms4-public/images/original/7/h/Vo/...jpg". path_original: str #: :class:`str`: Full storage URL to image. e.g. #: "http://app.supervise.ly/h5un6l2bnaz1vj8a9qgms4-public/images/original/7/h/Vo/...jpg". full_storage_url: str #: :class:`str`: Image :class:`Tags<supervisely.annotation.tag.Tag>` list. #: e.g. "[{'entityId': 2836466, 'tagId': 345022, 'id': 2224609, 'labelerLogin': 'admin', #: 'createdAt': '2021-03-05T14:15:39.923Z', 'updatedAt': '2021-03-05T14:15:39.923Z'}, {...}]". tags: List[Dict] @property def preview_url(self): """ Get Image preview URL. :return: Image preview URL. :rtype: :class:`str` """ res = self.full_storage_url if is_development(): res = abs_url(res) res = compress_image_url(url=res) return res
[docs]class ImageApi(RemoveableBulkModuleApi): """ API for working with :class:`Image<supervisely.imaging.image>`. :class:`ImageApi<ImageApi>` object is immutable. :param api: API connection to the server :type api: Api :Usage example: .. code-block:: python import os from dotenv import load_dotenv import supervisely as sly # Load secrets and create API object from .env file (recommended) # Learn more here: https://developer.supervisely.com/getting-started/basics-of-authentication if sly.is_development(): load_dotenv(os.path.expanduser("~/supervisely.env")) api = sly.Api.from_env() # Pass values into the API constructor (optional, not recommended) # api = sly.Api(server_address="https://app.supervise.ly", token="4r47N...xaTatb") image_info = api.image.get_info_by_id(image_id) # api usage example """ def __init__(self, api): super().__init__(api) self.figure = FigureApi(api) # @TODO: rename to object like in labeling UI self.tag = TagApi(api)
[docs] @staticmethod def info_sequence(): """ Get list of all :class:`ImageInfo<ImageInfo>` field names. :return: List of :class:`ImageInfo<ImageInfo>` field names.` :rtype: :class:`List[str]` """ return [ ApiField.ID, ApiField.NAME, ApiField.LINK, ApiField.HASH, ApiField.MIME, ApiField.EXT, ApiField.SIZE, ApiField.WIDTH, ApiField.HEIGHT, ApiField.LABELS_COUNT, ApiField.DATASET_ID, ApiField.CREATED_AT, ApiField.UPDATED_AT, ApiField.META, ApiField.PATH_ORIGINAL, ApiField.FULL_STORAGE_URL, ApiField.TAGS, ]
[docs] @staticmethod def info_tuple_name(): """ Get string name of :class:`ImageInfo<ImageInfo>` NamedTuple. :return: NamedTuple name. :rtype: :class:`str` """ return "ImageInfo"
def get_list_generator( self, dataset_id: int, filters: Optional[List[Dict[str, str]]] = None, sort: Optional[str] = "id", sort_order: Optional[str] = "asc", limit: Optional[int] = None, force_metadata_for_links: Optional[bool] = False, batch_size: Optional[int] = None, ) -> Iterator[List[ImageInfo]]: data = { ApiField.DATASET_ID: dataset_id, ApiField.FILTER: filters or [], ApiField.SORT: sort, ApiField.SORT_ORDER: sort_order, ApiField.FORCE_METADATA_FOR_LINKS: force_metadata_for_links, ApiField.PAGINATION_MODE: ApiField.TOKEN, } if batch_size is not None: data[ApiField.PER_PAGE] = batch_size else: # use default value on instance (20k) # #tag/Images/paths/~1images.list/get pass return self.get_list_all_pages_generator( "images.list", data, limit=limit, return_first_response=False, )
[docs] def get_list( self, dataset_id: int, filters: Optional[List[Dict[str, str]]] = None, sort: Optional[str] = "id", sort_order: Optional[str] = "asc", limit: Optional[int] = None, force_metadata_for_links: Optional[bool] = True, return_first_response: Optional[bool] = False, ) -> List[ImageInfo]: """ List of Images in the given :class:`Dataset<supervisely.project.project.Dataset>`. :param dataset_id: :class:`Dataset<supervisely.project.project.Dataset>` ID in which the Images are located. :type dataset_id: :class:`int` :param filters: List of params to sort output Images. :type filters: :class:`List[Dict]`, optional :param sort: Field name to sort. One of {'id' (default), 'name', 'description', 'labelsCount', 'createdAt', 'updatedAt'} :type sort: :class:`str`, optional :param sort_order: Sort order. One of {'asc' (default), 'desc'} :type sort_order: :class:`str`, optional :param limit: Max number of list elements. No limit if None (default). :type limit: :class:`int`, optional :param force_metadata_for_links: If True, updates meta for images with remote storage links when listing. :type force_metadata_for_links: bool, optional :param return_first_response: If True, returns first response without waiting for all pages. :type return_first_response: bool, optional :return: Objects with image information from Supervisely. :rtype: :class:`List[ImageInfo]<ImageInfo>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() # Get list of Images with width = 1067 img_infos = api.image.get_list(dataset_id, filters=[{ 'field': 'width', 'operator': '=', 'value': '1067' }]) print(img_infos) # Output: [ImageInfo(id=770915, # name='IMG_3861.jpeg', # link=None, # hash='ZdpMD+ZMJx0R8BgsCzJcqM7qP4M8f1AEtoYc87xZmyQ=', # mime='image/jpeg', # ext='jpeg', # size=148388, # width=1067, # height=800, # labels_count=4, # dataset_id=2532, # created_at='2021-03-02T10:04:33.973Z', # updated_at='2021-03-02T10:04:33.973Z', # meta={}, # path_original='/h5un6l2bnaz1vj8a9qgms4-public/images/original/7/h/Vo/...jpg', # full_storage_url='http://app.supervise.ly/h5un6l2bnaz1vj8a9qgms4-public/images/original/7/h/Vo/...jpg'), # tags=[], # ImageInfo(id=770916, # name='IMG_1836.jpeg', # link=None, # hash='YZKQrZH5C0rBvGGA3p7hjWahz3/pV09u5m30Bz8GeYs=', # mime='image/jpeg', # ext='jpeg', # size=140222, # width=1067, # height=800, # labels_count=3, # dataset_id=2532, # created_at='2021-03-02T10:04:33.973Z', # updated_at='2021-03-02T10:04:33.973Z', # meta={}, # path_original='/h5un6l2bnaz1vj8a9qgms4-public/images/original/C/Y/Hq/...jpg', # full_storage_url='http://app.supervise.ly/h5un6l2bnaz1vj8a9qgms4-public/images/original/C/Y/Hq/...jpg'), # tags=[] # ] """ return self.get_list_all_pages( "images.list", { ApiField.DATASET_ID: dataset_id, ApiField.FILTER: filters or [], ApiField.SORT: sort, ApiField.SORT_ORDER: sort_order, ApiField.FORCE_METADATA_FOR_LINKS: force_metadata_for_links, }, limit=limit, return_first_response=return_first_response, )
[docs] def get_filtered_list( self, dataset_id: int, filters: Optional[List[Dict]] = None, sort: Optional[str] = "id", sort_order: Optional[str] = "asc", force_metadata_for_links: Optional[bool] = True, limit: Optional[int] = None, return_first_response: Optional[bool] = False, ) -> List[ImageInfo]: """ List of filtered Images in the given :class:`Dataset<supervisely.project.project.Dataset>`. Differs in a more flexible filter format from the get_list() method. :param dataset_id: :class:`Dataset<supervisely.project.project.Dataset>` ID in which the Images are located. :type dataset_id: :class:`int` :param filters: List of params to sort output Images. :type filters: :class:`List[Dict]`, optional :param sort: Field name to sort. One of {'id' (default), 'name', 'description', 'labelsCount', 'createdAt', 'updatedAt'}. :type sort: :class:`str`, optional :param sort_order: Sort order. One of {'asc' (default), 'desc'} :type sort_order: :class:`str`, optional :return: Objects with image information from Supervisely. :rtype: :class:`List[ImageInfo]<ImageInfo>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() # Get list of Images with names containing subsequence '2008' img_infos = api.image.get_filtered_list(dataset_id, filters=[{ 'type': 'images_filename', 'data': { 'value': '2008' } }]) """ if filters is None or not filters: return self.get_list( dataset_id, sort=sort, sort_order=sort_order, limit=limit, force_metadata_for_links=force_metadata_for_links, return_first_response=return_first_response, ) if not all(["type" in filter.keys() for filter in filters]): raise ValueError("'type' field not found in filter") if not all(["data" in filter.keys() for filter in filters]): raise ValueError("'data' field not found in filter") allowed_filter_types = [ "images_filename", "images_tag", "objects_tag", "objects_class", "objects_annotator", "tagged_by_annotator", "issues_count", ] if not all([filter["type"] in allowed_filter_types for filter in filters]): raise ValueError(f"'type' field must be one of: {allowed_filter_types}") return self.get_list_all_pages( "images.list", { ApiField.DATASET_ID: dataset_id, ApiField.FILTERS: filters, ApiField.SORT: sort, ApiField.SORT_ORDER: sort_order, ApiField.FORCE_METADATA_FOR_LINKS: force_metadata_for_links, }, limit=limit, return_first_response=return_first_response, )
[docs] def get_info_by_id(self, id: int, force_metadata_for_links=True) -> ImageInfo: """ Get Image information by ID. :param id: Image ID in Supervisely. :type id: int :return: Object with image information from Supervisely. :rtype: :class:`ImageInfo<ImageInfo>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() # You can get Image ID by listing all images in the Dataset as shown in get_list # Or you can open certain image in Supervisely Annotation Tool UI and get last digits of the URL img_info = api.image.get_info_by_id(770918) """ return self._get_info_by_id( id, "images.info", fields={ApiField.FORCE_METADATA_FOR_LINKS: force_metadata_for_links}, )
def _get_info_by_filters(self, parent_id, filters, force_metadata_for_links): """_get_info_by_filters""" items = self.get_list(parent_id, filters, force_metadata_for_links=force_metadata_for_links) return _get_single_item(items)
[docs] def get_info_by_name( self, dataset_id: int, name: str, force_metadata_for_links: Optional[bool] = True ) -> ImageInfo: """Returns image info by image name from given dataset id. :param dataset_id: Dataset ID in Supervisely, where Image is located. :type dataset_id: int :param name: Image name in Supervisely. :type name: str :param force_metadata_for_links: If True, returns full_storage_url and path_original fields in ImageInfo. :type force_metadata_for_links: bool, optional :return: Object with image information from Supervisely. :rtype: :class:`ImageInfo<ImageInfo>` """ return self._get_info_by_name( get_info_by_filters_fn=lambda module_name: self._get_info_by_filters( dataset_id, module_name, force_metadata_for_links ), name=name, )
# @TODO: reimplement to new method images.bulk.info
[docs] def get_info_by_id_batch( self, ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None, force_metadata_for_links=True, ) -> List[ImageInfo]: """ Get Images information by ID. :param ids: Images IDs in Supervisely. :type ids: List[int] :param progress_cb: Function for tracking the progress. :type progress_cb: tqdm or callable, optional :return: Objects with image information from Supervisely. :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_ids = [376728, 376729, 376730, 376731, 376732, 376733] img_infos = image.get_info_by_id_batch(img_ids) """ results = [] if len(ids) == 0: return results dataset_id = self.get_info_by_id(ids[0], force_metadata_for_links=False).dataset_id for batch in batched(ids): filters = [{"field": ApiField.ID, "operator": "in", "value": batch}] results.extend( self.get_list_all_pages( "images.list", { ApiField.DATASET_ID: dataset_id, ApiField.FILTER: filters, ApiField.FORCE_METADATA_FOR_LINKS: force_metadata_for_links, }, ) ) if progress_cb is not None: progress_cb(len(batch)) temp_map = {info.id: info for info in results} ordered_results = [temp_map[id] for id in ids] return ordered_results
def _download(self, id, is_stream=False): """ :param id: int :param is_stream: bool :return: Response class object contain metadata of image with given id """ response = self._api.post("images.download", {ApiField.ID: id}, stream=is_stream) return response
[docs] def download_np(self, id: int, keep_alpha: Optional[bool] = False) -> np.ndarray: """ Download Image with given id in numpy format. :param id: Image ID in Supervisely. :type id: int :param keep_alpha: If True keeps alpha mask for image, otherwise don't. :type keep_alpha: bool, optional :return: Image in RGB numpy matrix format :rtype: :class:`np.ndarray` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_np = api.image.download_np(770918) """ response = self._download(id) img = sly_image.read_bytes(response.content, keep_alpha) return img
[docs] def download_path(self, id: int, path: str) -> None: """ Downloads Image from Dataset to local path by ID. :param id: Image ID in Supervisely. :type id: int :param path: Local save path for Image. :type path: str :return: None :rtype: :class:`NoneType` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_info = api.image.get_info_by_id(770918) save_path = os.path.join("/home/admin/work/projects/lemons_annotated/ds1/test_imgs/", img_info.name) api.image.download_path(770918, save_path) """ response = self._download(id, is_stream=True) ensure_base_path(path) with open(path, "wb") as fd: for chunk in response.iter_content(chunk_size=1024 * 1024): fd.write(chunk)
[docs] def download(self, id: int, path: str) -> None: """ Downloads Image from Dataset to local path by ID. :param id: Image ID in Supervisely. :type id: int :param path: Local save path for Image. :type path: str :return: None :rtype: :class:`NoneType` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_info = api.image.get_info_by_id(770918) save_path = os.path.join("/home/admin/work/projects/lemons_annotated/ds1/test_imgs/", img_info.name) api.image.download_path(770918, save_path) """ self.download_path(id=id, path=path)
def _download_batch( self, dataset_id: int, ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None ): """ Get image id and it content from given dataset and list of images ids. """ for batch_ids in batched(ids): response = self._api.post( "images.bulk.download", {ApiField.DATASET_ID: dataset_id, ApiField.IMAGE_IDS: batch_ids}, ) decoder = MultipartDecoder.from_response(response) for part in decoder.parts: content_utf8 = part.headers[b"Content-Disposition"].decode("utf-8") # Find name="1245" preceded by a whitespace, semicolon or beginning of line. # The regex has 2 capture group: one for the prefix and one for the actual name value. img_id = int(re.findall(r'(^|[\s;])name="(\d*)"', content_utf8)[0][1]) if progress_cb is not None: progress_cb(1) yield img_id, part
[docs] def download_paths( self, dataset_id: int, ids: List[int], paths: List[str], progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> None: """ Download Images with given ids and saves them for the given paths. :param dataset_id: Dataset ID in Supervisely, where Images are located. :type dataset_id: :class:`int` :param ids: List of Image IDs in Supervisely. :type ids: :class:`List[int]` :param paths: Local save paths for Images. :type paths: :class:`List[str]` :param progress_cb: Function for tracking download progress. :type progress_cb: tqdm or callable, optional :raises: :class:`ValueError` if len(ids) != len(paths) :return: None :rtype: :class:`NoneType` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() local_save_dir = "/home/admin/work/projects/lemons_annotated/ds1/test_imgs" save_paths = [] image_ids = [771755, 771756, 771757, 771758, 771759, 771760] img_infos = api.image.get_info_by_id_batch(image_ids) p = tqdm(desc="Images downloaded: ", total=len(img_infos)) for img_info in img_infos: save_paths.append(os.path.join(local_save_dir, img_info.name)) api.image.download_paths(2573, image_ids, save_paths, progress_cb=p) # Progress: # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 0, "total": 6, "timestamp": "2021-03-15T19:47:15.406Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 1, "total": 6, "timestamp": "2021-03-15T19:47:16.366Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 2, "total": 6, "timestamp": "2021-03-15T19:47:16.367Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 3, "total": 6, "timestamp": "2021-03-15T19:47:16.367Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 4, "total": 6, "timestamp": "2021-03-15T19:47:16.367Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 5, "total": 6, "timestamp": "2021-03-15T19:47:16.368Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 6, "total": 6, "timestamp": "2021-03-15T19:47:16.368Z", "level": "info"} """ if len(ids) == 0: return if len(ids) != len(paths): raise ValueError('Can not match "ids" and "paths" lists, len(ids) != len(paths)') id_to_path = {id: path for id, path in zip(ids, paths)} for img_id, resp_part in self._download_batch(dataset_id, ids, progress_cb): with open(id_to_path[img_id], "wb") as w: w.write(resp_part.content)
[docs] def download_bytes( self, dataset_id: int, ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None ) -> List[bytes]: """ Download Images with given IDs from Dataset in Binary format. :param dataset_id: Dataset ID in Supervisely, where Images are located. :type dataset_id: int :param ids: List of Image IDs in Supervisely. :type ids: List[int] :param progress_cb: Function for tracking download progress. :type progress_cb: tqdm or callable, optional :return: List of Images in binary format :rtype: :class:`List[bytes]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_bytes = api.image.download_bytes(dataset_id, [770918]) print(img_bytes) # Output: [b'\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01\x01\...] """ if len(ids) == 0: return [] id_to_img = {} for img_id, resp_part in self._download_batch(dataset_id, ids, progress_cb): id_to_img[img_id] = resp_part.content return [id_to_img[id] for id in ids]
[docs] def download_nps( self, dataset_id: int, ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None, keep_alpha: Optional[bool] = False, ) -> List[np.ndarray]: """ Download Images with given IDs in numpy format. :param dataset_id: Dataset ID in Supervisely, where Images are located. :type dataset_id: int :param ids: List of Images IDs in Supervisely. :type ids: List[int] :param progress_cb: Function for tracking download progress. :type progress_cb: tqdm or callable, optional :param keep_alpha: If True keeps alpha mask for Image, otherwise don't. :type keep_alpha: bool, optional :return: List of Images in RGB numpy matrix format :rtype: :class:`List[np.ndarray]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_ids = [770918, 770919, 770920] image_nps = api.image.download_nps(dataset_id, image_ids) """ return [ sly_image.read_bytes(img_bytes, keep_alpha) for img_bytes in self.download_bytes( dataset_id=dataset_id, ids=ids, progress_cb=progress_cb ) ]
def download_nps_generator( self, dataset_id: int, ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None, keep_alpha: Optional[bool] = False, ) -> Generator[Tuple[int, np.ndarray], None, None]: for img_id, img_part in self._download_batch(dataset_id, ids, progress_cb): img_bytes = img_part.content yield img_id, sly_image.read_bytes(img_bytes, keep_alpha)
[docs] def check_existing_hashes( self, hashes: List[str], progress_cb: Optional[Union[tqdm, Callable]] = None ) -> List[str]: """ Checks existing hashes for Images. :param hashes: List of hashes. :type hashes: List[str] :param progress_cb: Function for tracking progress of checking. :type progress_cb: tqdm or callable, optional :return: List of existing hashes :rtype: :class:`List[str]` :Usage example: Checkout detailed example `here <https://app.supervise.ly/explore/notebooks/guide-10-check-existing-images-and-upload-only-the-new-ones-1545/overview>`_ (you must be logged into your Supervisely account) .. code-block:: python # Helpful method when your uploading was interrupted # You can check what images has been successfully uploaded by their hashes and what not # And continue uploading the rest of the images from that point import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() # Find project project = api.project.get_info_by_id(WORKSPACE_ID, PROJECT_ID) # Get paths of all images in a directory images_paths = sly.fs.list_files('images_to_upload') # Calculate hashes for all images paths hash_to_image = {} images_hashes = [] for idx, item in enumerate(images_paths): item_hash = sly.fs.get_file_hash(item) images_hashes.append(item_hash) hash_to_image[item_hash] = item # Get hashes that are already on server remote_hashes = api.image.check_existing_hashes(images_hashes) already_uploaded_images = {hh: hash_to_image[hh] for hh in remote_hashes} """ results = [] if len(hashes) == 0: return results for hashes_batch in batched(hashes, batch_size=900): response = self._api.post("images.internal.hashes.list", hashes_batch) results.extend(response.json()) if progress_cb is not None: progress_cb(len(hashes_batch)) return results
[docs] def check_image_uploaded(self, hash: str) -> bool: """ Checks if Image has been uploaded. :param hash: Image hash in Supervisely. :type hash: str :return: True if Image with given hash exist, otherwise False :rtype: :class:`bool` :Usage example: .. code-block:: import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_check_uploaded = api.image.check_image_uploaded("YZKQrZH5C0rBvGGA3p7hjWahz3/pV09u5m30Bz8GeYs=") print(image_check_uploaded) # Output: True """ response = self._api.post("images.internal.hashes.list", [hash]) results = response.json() if len(results) == 0: return False else: return True
def _upload_uniq_images_single_req(self, func_item_to_byte_stream, hashes_items_to_upload): """ Upload images (binary data) to server with single request. Expects unique images that aren't exist at server. :param func_item_to_byte_stream: converter for "item" to byte stream :param hashes_items_to_upload: list of pairs (hash, item) :return: list of hashes for successfully uploaded items """ content_dict = {} for idx, (_, item) in enumerate(hashes_items_to_upload): content_dict["{}-file".format(idx)] = ( str(idx), func_item_to_byte_stream(item), "image/*", ) encoder = MultipartEncoder(fields=content_dict) resp = self._api.post("images.bulk.upload", encoder) # close all opened files for value in content_dict.values(): from io import BufferedReader if isinstance(value[1], BufferedReader): value[1].close() resp_list = json.loads(resp.text) remote_hashes = [d["hash"] for d in resp_list if "hash" in d] if len(remote_hashes) != len(hashes_items_to_upload): problem_items = [ (hsh, item, resp["errors"]) for (hsh, item), resp in zip(hashes_items_to_upload, resp_list) if resp.get("errors") ] logger.warn( "Not all images were uploaded within request.", extra={ "total_cnt": len(hashes_items_to_upload), "ok_cnt": len(remote_hashes), "items": problem_items, }, ) return remote_hashes def _upload_data_bulk( self, func_item_to_byte_stream, items_hashes, retry_cnt=3, progress_cb=None ): """ Upload images (binary data) to server. Works with already existing or duplicating images. :param func_item_to_byte_stream: converter for "item" to byte stream :param items_hashes: iterable of pairs (item, hash) where "item" is a some descriptor (e.g. image file path) for image data, and "hash" is a hash for the image binary data :param retry_cnt: int, number of retries to send the whole set of items :param progress_cb: callback or tqdm object to account progress (in number of items) """ # count all items to adjust progress_cb and create hash to item mapping with unique hashes items_count_total = 0 hash_to_items = {} for item, i_hash in items_hashes: hash_to_items[i_hash] = item items_count_total += 1 unique_hashes = set(hash_to_items.keys()) remote_hashes = set( self.check_existing_hashes(list(unique_hashes)) ) # existing -- from server if progress_cb: progress_cb(len(remote_hashes)) pending_hashes = unique_hashes - remote_hashes # @TODO: some correlation with sly.io.network_exceptions. Should we perform retries here? for retry_idx in range(retry_cnt): # single attempt to upload all data which is not uploaded yet for hashes in batched(list(pending_hashes)): pending_hashes_items = [(h, hash_to_items[h]) for h in hashes] hashes_rcv = self._upload_uniq_images_single_req( func_item_to_byte_stream, pending_hashes_items ) pending_hashes -= set(hashes_rcv) if set(hashes_rcv) - set(hashes): logger.warn( "Hash inconsistency in images bulk upload.", extra={"sent": hashes, "received": hashes_rcv}, ) if progress_cb: progress_cb(len(hashes_rcv)) if not pending_hashes: if progress_cb is not None: progress_cb(items_count_total - len(unique_hashes)) return warning_items = [] for h in pending_hashes: item_data = hash_to_items[h] if isinstance(item_data, (bytes, bytearray)): item_data = "some bytes ..." warning_items.append((h, item_data)) logger.warn( "Unable to upload images (data).", extra={ "retry_idx": retry_idx, "items": warning_items, }, ) # now retry it for the case if it is a shadow server/connection error raise ValueError( "Unable to upload images (data). " "Please check if images are in supported format and if ones aren't corrupted." )
[docs] def upload_path( self, dataset_id: int, name: str, path: str, meta: Optional[Dict] = None ) -> ImageInfo: """ Uploads Image with given name from given local path to Dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param name: Image name with extension. :type name: str :param path: Local Image path. :type path: str :param meta: Image metadata. :type meta: dict, optional :return: Information about Image. See :class:`info_sequence<info_sequence>` :rtype: :class:`ImageInfo` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_info = api.image.upload_path(dataset_id, name="7777.jpeg", path="/home/admin/Downloads/7777.jpeg") """ metas = None if meta is None else [meta] return self.upload_paths(dataset_id, [name], [path], metas=metas)[0]
[docs] def upload_paths( self, dataset_id: int, names: List[str], paths: List[str], progress_cb: Optional[Union[tqdm, Callable]] = None, metas: Optional[List[Dict]] = None, ) -> List[ImageInfo]: """ Uploads Images with given names from given local path to Dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param names: List of Images names with extension. :type names: List[str] :param paths: List of local Images pathes. :type paths: List[str] :param progress_cb: Function for tracking the progress of uploading. :type progress_cb: tqdm or callable, optional :param metas: Images metadata. :type metas: List[dict], optional :raises: :class:`ValueError` if len(names) != len(paths) :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_names = ["7777.jpeg", "8888.jpeg", "9999.jpeg"] image_paths = ["/home/admin/Downloads/img/770918.jpeg", "/home/admin/Downloads/img/770919.jpeg", "/home/admin/Downloads/img/770920.jpeg"] img_infos = api.image.upload_path(dataset_id, names=img_names, paths=img_paths) """ def path_to_bytes_stream(path): return open(path, "rb") hashes = [get_file_hash(x) for x in paths] self._upload_data_bulk(path_to_bytes_stream, zip(paths, hashes), progress_cb=progress_cb) return self.upload_hashes(dataset_id, names, hashes, metas=metas)
[docs] def upload_np( self, dataset_id: int, name: str, img: np.ndarray, meta: Optional[Dict] = None ) -> ImageInfo: """ Upload given Image in numpy format with given name to Dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param name: Image name with extension. :type name: str :param img: image in RGB format(numpy matrix) :type img: np.ndarray :param meta: Image metadata. :type meta: dict, optional :return: Information about Image. See :class:`info_sequence<info_sequence>` :rtype: :class:`ImageInfo` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_np = sly.image.read("/home/admin/Downloads/7777.jpeg") img_info = api.image.upload_np(dataset_id, name="7777.jpeg", img=img_np) """ metas = None if meta is None else [meta] return self.upload_nps(dataset_id, [name], [img], metas=metas)[0]
[docs] def upload_nps( self, dataset_id: int, names: List[str], imgs: List[np.ndarray], progress_cb: Optional[Union[tqdm, Callable]] = None, metas: Optional[List[Dict]] = None, ) -> List[ImageInfo]: """ Upload given Images in numpy format with given names to Dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param names: Images names with extension. :type names: List[str] :param imgs: Images in RGB numpy matrix format :type imgs: List[np.ndarray] :param progress_cb: Function for tracking the progress of uploading. :type progress_cb: tqdm or callable, optional :param metas: Images metadata. :type metas: List[dict], optional :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_np_1 = sly.image.read("/home/admin/Downloads/7777.jpeg") img_np_2 = sly.image.read("/home/admin/Downloads/8888.jpeg") img_np_3 = sly.image.read("/home/admin/Downloads/9999.jpeg") img_names = ["7777.jpeg", "8888.jpeg", "9999.jpeg"] img_nps = [img_np_1, img_np_2, img_np_3] img_infos = api.image.upload_nps(dataset_id, names=img_names, imgs=img_nps) """ def img_to_bytes_stream(item): img, name = item[0], item[1] img_bytes = sly_image.write_bytes(img, get_file_ext(name)) return io.BytesIO(img_bytes) def img_to_hash(item): img, name = item[0], item[1] return sly_image.get_hash(img, get_file_ext(name)) img_name_list = list(zip(imgs, names)) hashes = [img_to_hash(x) for x in img_name_list] self._upload_data_bulk( img_to_bytes_stream, zip(img_name_list, hashes), progress_cb=progress_cb ) return self.upload_hashes(dataset_id, names, hashes, metas=metas)
[docs] def upload_hash( self, dataset_id: int, name: str, hash: str, meta: Optional[Dict] = None ) -> ImageInfo: """ Upload Image from given hash to Dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param name: Image name with extension. :type name: str :param hash: Image hash. :type hash: str :param meta: Image metadata. :type meta: dict, optional :return: Information about Image. See :class:`info_sequence<info_sequence>` :rtype: :class:`ImageInfo` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() dst_dataset_id = 452984 im_info = api.image.get_info_by_id(193940090) hash = im_info.hash # It is necessary to upload image with the same name(extention) as in src dataset name = im_info.name meta = {1: 'meta_example'} new_in_info = api.image.upload_hash(dst_dataset_id, name, hash, meta) print(json.dumps(new_in_info, indent=4)) # Output: [ # 196793586, # "IMG_0748.jpeg", # null, # "NEjmnmdd7DOzaFAKK/nCIl5CtcwZeMkhW3CHe875p9g=", # "image/jpeg", # "jpeg", # 66885, # 600, # 500, # 0, # 452984, # "2021-03-16T09:09:45.587Z", # "2021-03-16T09:09:45.587Z", # { # "1": "meta_example" # }, # "/h5un6l2bnaz1vj8a9qgms4-public/images/original/P/a/kn/W2mzMQg435d6wG0.jpg", # "https://app.supervise.ly/h5un6l2bnaz1vj8a9qgms4-public/images/original/P/a/kn/W2mzMQg435hiHJAPgMU.jpg" # ] """ metas = None if meta is None else [meta] return self.upload_hashes(dataset_id, [name], [hash], metas=metas)[0]
[docs] def upload_hashes( self, dataset_id: int, names: List[str], hashes: List[str], progress_cb: Optional[Union[tqdm, Callable]] = None, metas: Optional[List[Dict]] = None, batch_size: Optional[int] = 50, skip_validation: Optional[bool] = False, ) -> List[ImageInfo]: """ Upload images from given hashes to Dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param names: Images names with extension. :type names: List[str] :param hashes: Images hashes. :type hashes: List[str] :param progress_cb: Function for tracking the progress of uploading. :type progress_cb: tqdm or callable, optional :param metas: Images metadata. :type metas: List[dict], optional :param batch_size: Number of images to upload in one batch. :type batch_size: int, optional :param skip_validation: Skips validation for images, can result in invalid images being uploaded. :type skip_validation: bool, optional :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() src_dataset_id = 447130 hashes = [] names = [] metas = [] imgs_info = api.image.get_list(src_dataset_id) # Create lists of hashes, images names and meta information for each image for im_info in imgs_info: hashes.append(im_info.hash) # It is necessary to upload images with the same names(extentions) as in src dataset names.append(im_info.name) metas.append({im_info.name: im_info.size}) dst_dataset_id = 452984 progress = sly.Progress("Images upload: ", len(hashes)) new_imgs_info = api.image.upload_hashes(dst_dataset_id, names, hashes, progress.iters_done_report, metas) # Output: # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 0, "total": 10, "timestamp": "2021-03-16T11:59:07.444Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 10, "total": 10, "timestamp": "2021-03-16T11:59:07.644Z", "level": "info"} """ return self._upload_bulk_add( lambda item: (ApiField.HASH, item), dataset_id, names, hashes, progress_cb, metas=metas, batch_size=batch_size, skip_validation=skip_validation, )
[docs] def upload_id( self, dataset_id: int, name: str, id: int, meta: Optional[Dict] = None ) -> ImageInfo: """ Upload Image by ID to Dataset. :param dataset_id: Destination Dataset ID in Supervisely. :type dataset_id: int :param name: Image name with extension. :type name: str :param id: Source image ID in Supervisely. :type id: int :param meta: Image metadata. :type meta: dict, optional :return: Information about Image. See :class:`info_sequence<info_sequence>` :rtype: :class:`ImageInfo` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() dst_dataset_id = 452984 im_info = api.image.get_info_by_id(193940090) id = im_info.id # It is necessary to upload image with the same name(extention) as in src dataset name = im_info.name meta = {1: 'meta_example'} new_in_info = api.image.upload_id(dst_dataset_id, name, id, meta) print(json.dumps(new_in_info, indent=4)) # Output: [ # 196793605, # "IMG_0748.jpeg", # null, # "NEjmnmdd7DOzaFAKK/nCIl5CtcwZeMkhW3CHe875p9g=", # "image/jpeg", # "jpeg", # 66885, # 600, # 500, # 0, # 452984, # "2021-03-16T09:27:12.620Z", # "2021-03-16T09:27:12.620Z", # { # "1": "meta_example" # }, # "/h5un6l2bnaz1vj8a9qgms4-public/images/original/P/a/kn/W2mzMQg435d6wG0AJGJTOsL1FqMUNOPqu4VdzFAN36LqtGwBIE4AmLOQ1BAxuIyB0bHJAPgMU.jpg", # "https://app.supervise.ly/h5un6l2bnaz1vj8a9qgms4-public/images/original/P/a/kn/iEaDEkejnfnb1Tz56ka0hiHJAPgMU.jpg" # ] """ metas = None if meta is None else [meta] return self.upload_ids(dataset_id, [name], [id], metas=metas)[0]
[docs] def upload_ids( self, dataset_id: int, names: List[str], ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None, metas: Optional[List[Dict]] = None, batch_size: Optional[int] = 50, force_metadata_for_links: bool = True, infos: List[ImageInfo] = None, skip_validation: Optional[bool] = False, ) -> List[ImageInfo]: """ Upload Images by IDs to Dataset. :param dataset_id: Destination Dataset ID in Supervisely. :type dataset_id: int :param names: Source images names with extension. :type names: List[str] :param ids: Images IDs. :type ids: List[int] :param progress_cb: Function for tracking the progress of uploading. :type progress_cb: tqdm or callable, optional :param metas: Images metadata. :type metas: List[dict], optional :param batch_size: Number of images to upload in one batch. :type batch_size: int, optional :param force_metadata_for_links: Calculate metadata for links. If False, metadata will be empty. :type force_metadata_for_links: bool, optional :param infos: List of ImageInfo objects. If None, will be requested from server. :type infos: List[ImageInfo], optional :param skip_validation: Skips validation for images, can result in invalid images being uploaded. :type skip_validation: bool, optional :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() src_dataset_id = 447130 ids = [] names = [] metas = [] imgs_info = api.image.get_list(src_dataset_id) # Create lists of ids, images names and meta information for each image for im_info in imgs_info: ids.append(im_info.id) # It is necessary to upload images with the same names(extentions) as in src dataset names.append(im_info.name) metas.append({im_info.name: im_info.size}) dst_dataset_id = 452984 progress = sly.Progress("Images upload: ", len(ids)) new_imgs_info = api.image.upload_ids(dst_dataset_id, names, ids, progress.iters_done_report, metas) # Output: # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 0, "total": 10, "timestamp": "2021-03-16T12:31:36.550Z", "level": "info"} # {"message": "progress", "event_type": "EventType.PROGRESS", "subtask": "Images downloaded: ", "current": 10, "total": 10, "timestamp": "2021-03-16T12:31:37.119Z", "level": "info"} """ if metas is None: metas = [{}] * len(names) if infos is None: infos = self.get_info_by_id_batch( ids, force_metadata_for_links=force_metadata_for_links ) # prev implementation # hashes = [info.hash for info in infos] # return self.upload_hashes(dataset_id, names, hashes, progress_cb, metas=metas) links, links_names, links_order, links_metas = [], [], [], [] hashes, hashes_names, hashes_order, hashes_metas = [], [], [], [] for idx, (name, info, meta) in enumerate(zip(names, infos, metas)): if info.link is not None: links.append(info.link) links_names.append(name) links_order.append(idx) links_metas.append(meta) else: hashes.append(info.hash) hashes_names.append(name) hashes_order.append(idx) hashes_metas.append(meta) result = [None] * len(names) if len(links) > 0: res_infos_links = self.upload_links( dataset_id, links_names, links, progress_cb, metas=links_metas, batch_size=batch_size, force_metadata_for_links=force_metadata_for_links, skip_validation=skip_validation, ) for info, pos in zip(res_infos_links, links_order): result[pos] = info if len(hashes) > 0: res_infos_hashes = self.upload_hashes( dataset_id, hashes_names, hashes, progress_cb, metas=hashes_metas, batch_size=batch_size, skip_validation=skip_validation, ) for info, pos in zip(res_infos_hashes, hashes_order): result[pos] = info return result
def _upload_bulk_add( self, func_item_to_kv, dataset_id, names, items, progress_cb=None, metas=None, batch_size=50, force_metadata_for_links=True, skip_validation=False, ): """ """ results = [] if len(names) == 0: return results if len(names) != len(items): raise ValueError('Can not match "names" and "items" lists, len(names) != len(items)') if metas is None: metas = [{}] * len(names) else: if len(names) != len(metas): raise ValueError('Can not match "names" and "metas" len(names) != len(metas)') for batch in batched(list(zip(names, items, metas)), batch_size=batch_size): images = [] for name, item, meta in batch: item_tuple = func_item_to_kv(item) # @TODO: 'title' -> ApiField.NAME image_data = {"title": name, item_tuple[0]: item_tuple[1]} if len(meta) != 0 and type(meta) == dict: image_data[ApiField.META] = meta images.append(image_data) response = self._api.post( "images.bulk.add", { ApiField.DATASET_ID: dataset_id, ApiField.IMAGES: images, ApiField.FORCE_METADATA_FOR_LINKS: force_metadata_for_links, ApiField.SKIP_VALIDATION: skip_validation, }, ) if progress_cb is not None: progress_cb(len(images)) for info_json in response.json(): info_json_copy = info_json.copy() if info_json.get(ApiField.MIME, None) is not None: info_json_copy[ApiField.EXT] = info_json[ApiField.MIME].split("/")[1] # results.append(self.InfoType(*[info_json_copy[field_name] for field_name in self.info_sequence()])) results.append(self._convert_json_info(info_json_copy)) # name_to_res = {img_info.name: img_info for img_info in results} # ordered_results = [name_to_res[name] for name in names] return results # ordered_results # @TODO: reimplement def _convert_json_info(self, info: dict, skip_missing=True): """ """ if info is None: return None temp_ext = None field_values = [] for field_name in self.info_sequence(): if field_name == ApiField.EXT: continue if skip_missing is True: val = info.get(field_name, None) else: val = info[field_name] field_values.append(val) if field_name == ApiField.MIME: if val is not None: temp_ext = val.split("/")[1] else: temp_ext = None field_values.append(temp_ext) for idx, field_name in enumerate(self.info_sequence()): if field_name == ApiField.NAME: cur_ext = get_file_ext(field_values[idx]).replace(".", "").lower() if not cur_ext: field_values[idx] = "{}.{}".format(field_values[idx], temp_ext) break if temp_ext is None: break if temp_ext == "jpeg" and cur_ext in ["jpg", "jpeg", "mpo"]: break if temp_ext != cur_ext: field_values[idx] = "{}.{}".format(field_values[idx], temp_ext) break res = self.InfoType(*field_values) d = res._asdict() return ImageInfo(**d) def _remove_batch_api_method_name(self): """ """ return "images.bulk.remove" def _remove_batch_field_name(self): """ """ return ApiField.IMAGE_IDS
[docs] def copy_batch( self, dst_dataset_id: int, ids: List[int], change_name_if_conflict: Optional[bool] = False, with_annotations: Optional[bool] = False, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[ImageInfo]: """ Copies Images with given IDs to Dataset. :param dst_dataset_id: Destination Dataset ID in Supervisely. :type dst_dataset_id: int :param ids: Images IDs in Supervisely. :type ids: List[int] :param change_name_if_conflict: If True adds suffix to the end of Image name when Dataset already contains an Image with identical name, If False and images with the identical names already exist in Dataset raises error. :type change_name_if_conflict: bool, optional :param with_annotations: If True Image will be copied to Dataset with annotations, otherwise only Images without annotations. :type with_annotations: bool, optional :param progress_cb: Function for tracking the progress of copying. :type progress_cb: tqdm or callable, optional :raises: :class:`TypeError` if type of ids is not list :raises: :class:`ValueError` if images ids are from the destination Dataset :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() ds_lemon_id = 1780 ds_lemon_img_infos = api.image.get_list(ds_lemon_id) lemons_img_ids = [lemon_img_info.id for lemon_img_info in ds_lemon_img_infos] ds_fruit_id = 2574 ds_fruit_img_infos = api.image.copy_batch(ds_fruit_id, lemons_img_ids, with_annotations=True) """ if type(ids) is not list: raise TypeError( "ids parameter has type {!r}. but has to be of type {!r}".format(type(ids), list) ) if len(ids) == 0: return existing_images = self.get_list(dst_dataset_id) existing_names = {image.name for image in existing_images} ids_info = self.get_info_by_id_batch(ids, force_metadata_for_links=False) temp_ds_ids = [] for info in ids_info: if info.dataset_id not in temp_ds_ids: temp_ds_ids.append(info.dataset_id) if len(temp_ds_ids) > 1: raise ValueError("Images ids have to be from the same dataset") if change_name_if_conflict: new_names = [ generate_free_name(existing_names, info.name, with_ext=True, extend_used_names=True) for info in ids_info ] else: new_names = [info.name for info in ids_info] names_intersection = existing_names.intersection(set(new_names)) if len(names_intersection) != 0: raise ValueError( "Images with the same names already exist in destination dataset. " 'Please, use argument "change_name_if_conflict=True" to automatically resolve ' "names intersection" ) new_images = self.upload_ids(dst_dataset_id, new_names, ids, progress_cb) new_ids = [new_image.id for new_image in new_images] if with_annotations: src_project_id = self._api.dataset.get_info_by_id(ids_info[0].dataset_id).project_id dst_project_id = self._api.dataset.get_info_by_id(dst_dataset_id).project_id self._api.project.merge_metas(src_project_id, dst_project_id) self._api.annotation.copy_batch(ids, new_ids) return new_images
[docs] def copy_batch_optimized( self, src_dataset_id: int, src_image_infos: List[ImageInfo], dst_dataset_id: int, with_annotations: Optional[bool] = True, progress_cb: Optional[Union[tqdm, Callable]] = None, dst_names: List[ImageInfo] = None, batch_size: Optional[int] = 500, skip_validation: Optional[bool] = False, save_source_date: Optional[bool] = True, ) -> List[ImageInfo]: """ Copies Images with given IDs to Dataset. :param src_dataset_id: Source Dataset ID in Supervisely. :type src_dataset_id: int :param src_image_infos: ImageInfo objects of images to copy. :type src_image_infos: List [ :class:`ImageInfo` ] :param dst_dataset_id: Destination Dataset ID in Supervisely. :type dst_dataset_id: int :param with_annotations: If True Image will be copied to Dataset with annotations, otherwise only Images without annotations. :type with_annotations: bool, optional :param progress_cb: Function for tracking the progress of copying. :type progress_cb: tqdm or callable, optional :param dst_names: ImageInfo list with existing items in destination dataset. :type dst_names: List [ :class:`ImageInfo` ], optional :param batch_size: Number of elements to copy for each request. :type batch_size: int, optional :param skip_validation: Flag for skipping additinal validations. :type skip_validation: bool, optional :param save_source_date: Save source annotation dates (creation and modification) or create a new date. :type save_source_date: bool, optional :raises: :class:`TypeError` if type of src_image_infos is not list :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() src_ds_id = 2231 img_infos = api.image.get_list(src_ds_id) dest_ds_id = 2574 dest_img_infos = api.image.copy_batch_optimized(src_ds_id, img_infos, dest_ds_id) """ if type(src_image_infos) is not list: raise TypeError( "src_image_infos parameter has type {!r}. but has to be of type {!r}".format( type(src_image_infos), list ) ) if len(src_image_infos) == 0: return if dst_names is None: existing_images = self.get_list(dst_dataset_id, force_metadata_for_links=False) existing_names = {image.name for image in existing_images} new_names = [ generate_free_name(existing_names, info.name, with_ext=True, extend_used_names=True) for info in src_image_infos ] else: if len(dst_names) != len(src_image_infos): raise RuntimeError("len(dst_names) != len(src_image_infos)") new_names = dst_names src_ids = [info.id for info in src_image_infos] new_images = self.upload_ids( dst_dataset_id, new_names, src_ids, progress_cb, batch_size=batch_size, force_metadata_for_links=False, infos=src_image_infos, skip_validation=skip_validation, ) new_ids = [new_image.id for new_image in new_images] if with_annotations: src_project_id = self._api.dataset.get_info_by_id(src_dataset_id).project_id dst_project_id = self._api.dataset.get_info_by_id(dst_dataset_id).project_id self._api.project.merge_metas(src_project_id, dst_project_id) self._api.annotation.copy_batch_by_ids( src_ids, new_ids, batch_size=batch_size, save_source_date=save_source_date, ) return new_images
[docs] def move_batch( self, dst_dataset_id: int, ids: List[int], change_name_if_conflict: Optional[bool] = False, with_annotations: Optional[bool] = False, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[ImageInfo]: """ Moves Images with given IDs to Dataset. :param dst_dataset_id: Destination Dataset ID in Supervisely. :type dst_dataset_id: int :param ids: Images IDs in Supervisely. :type ids: List[int] :param change_name_if_conflict: If True adds suffix to the end of Image name when Dataset already contains an Image with identical name, If False and images with the identical names already exist in Dataset raises error. :type change_name_if_conflict: bool, optional :param with_annotations: If True Image will be copied to Dataset with annotations, otherwise only Images without annotations. :type with_annotations: bool, optional :param progress_cb: Function for tracking the progress of moving. :type progress_cb: tqdm or callable, optional :raises: :class:`TypeError` if type of ids is not list :raises: :class:`ValueError` if images ids are from the destination Dataset :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() ds_lemon_id = 1780 ds_kiwi_id = 1233 ds_lemon_img_infos = api.image.get_list(ds_lemon_id) ds_kiwi_img_infos = api.image.get_list(ds_kiwi_id) fruit_img_ids = [] for lemon_img_info, kiwi_img_info in zip(ds_lemon_img_infos, ds_kiwi_img_infos): fruit_img_ids.append(lemon_img_info.id) fruit_img_ids.append(kiwi_img_info.id) ds_fruit_id = 2574 ds_fruit_img_infos = api.image.move_batch(ds_fruit_id, fruit_img_ids, with_annotations=True) """ new_images = self.copy_batch( dst_dataset_id, ids, change_name_if_conflict, with_annotations, progress_cb ) self.remove_batch(ids) return new_images
[docs] def move_batch_optimized( self, src_dataset_id: int, src_image_infos: List[ImageInfo], dst_dataset_id: int, with_annotations: Optional[bool] = True, progress_cb: Optional[Union[tqdm, Callable]] = None, dst_names: List[ImageInfo] = None, batch_size: Optional[int] = 500, skip_validation: Optional[bool] = False, save_source_date: Optional[bool] = True, ) -> List[ImageInfo]: """ Moves Images with given IDs to Dataset. :param src_dataset_id: Source Dataset ID in Supervisely. :type src_dataset_id: int :param src_image_infos: ImageInfo objects of images to move. :type src_image_infos: List [ :class:`ImageInfo` ] :param dst_dataset_id: Destination Dataset ID in Supervisely. :type dst_dataset_id: int :param with_annotations: If True Image will be copied to Dataset with annotations, otherwise only Images without annotations. :type with_annotations: bool, optional :param progress_cb: Function for tracking the progress of moving. :type progress_cb: tqdm or callable, optional :param dst_names: ImageInfo list with existing items in destination dataset. :type dst_names: List [ :class:`ImageInfo` ], optional :param batch_size: Number of elements to copy for each request. :type batch_size: int, optional :param skip_validation: Flag for skipping additinal validations. :type skip_validation: bool, optional :param save_source_date: Save source annotation dates (creation and modification) or create a new date. :type save_source_date: bool, optional :raises: :class:`TypeError` if type of src_image_infos is not list :return: List with information about Images. See :class:`info_sequence<info_sequence>` :rtype: :class:`List[ImageInfo]` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() src_ds_id = 2231 img_infos = api.image.get_list(src_ds_id) dest_ds_id = 2574 dest_img_infos = api.image.move_batch_optimized(src_ds_id, img_infos, dest_ds_id) """ new_images = self.copy_batch_optimized( src_dataset_id, src_image_infos, dst_dataset_id, with_annotations=with_annotations, progress_cb=progress_cb, dst_names=dst_names, batch_size=batch_size, skip_validation=skip_validation, save_source_date=save_source_date, ) src_ids = [info.id for info in src_image_infos] self.remove_batch(src_ids, batch_size=batch_size) return new_images
[docs] def copy( self, dst_dataset_id: int, id: int, change_name_if_conflict: Optional[bool] = False, with_annotations: Optional[bool] = False, ) -> ImageInfo: """ Copies Image with given ID to destination Dataset. :param dst_dataset_id: Destination Dataset ID in Supervisely. :type dst_dataset_id: int :param id: Image ID in Supervisely. :type id: int :param change_name_if_conflict: If True adds suffix to the end of Image name when Dataset already contains an Image with identical name, If False and images with the identical names already exist in Dataset raises error. :type change_name_if_conflict: bool, optional :param with_annotations: If True Image will be copied to Dataset with annotations, otherwise only Images without annotations. :type with_annotations: bool, optional :return: Information about Image. See :class:`info_sequence<info_sequence>` :rtype: :class:`ImageInfo` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() dst_ds_id = 365184 img_id = 121236920 img_info = api.image.copy(dst_ds_id, img_id, with_annotations=True) """ return self.copy_batch(dst_dataset_id, [id], change_name_if_conflict, with_annotations)[0]
[docs] def move( self, dst_dataset_id: int, id: int, change_name_if_conflict: Optional[bool] = False, with_annotations: Optional[bool] = False, ) -> ImageInfo: """ Moves Image with given ID to destination Dataset. :param dst_dataset_id: Destination Dataset ID in Supervisely. :type dst_dataset_id: int :param id: Image ID in Supervisely. :type id: int :param change_name_if_conflict: If True adds suffix to the end of Image name when Dataset already contains an Image with identical name, If False and images with the identical names already exist in Dataset raises error. :type change_name_if_conflict: bool, optional :param with_annotations: If True Image will be copied to Dataset with annotations, otherwise only Images without annotations. :type with_annotations: bool, optional :return: Information about Image. See :class:`info_sequence<info_sequence>` :rtype: :class:`ImageInfo` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() dst_ds_id = 365484 img_id = 533336920 img_info = api.image.copy(dst_ds_id, img_id, with_annotations=True) """ return self.move_batch(dst_dataset_id, [id], change_name_if_conflict, with_annotations)[0]
[docs] def url( self, team_id: int, workspace_id: int, project_id: int, dataset_id: int, image_id: int, ) -> str: """ Gets Image URL by ID. :param team_id: Team ID in Supervisely. :type team_id: int :param workspace_id: Workspace ID in Supervisely. :type workspace_id: int :param project_id: Project ID in Supervisely. :type project_id: int :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param image_id: Image ID in Supervisely. :type image_id: int :return: Image URL :rtype: :class:`str` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() team_id = 16087 workspace_id = 23821 project_id = 53939 dataset_id = 254737 image_id = 121236920 img_url = api.image.url(team_id, workspace_id, project_id, dataset_id, image_id) print(url) # Output: https://app.supervise.ly/app/images/16087/23821/53939/254737#image-121236920 """ result = urllib.parse.urljoin( self._api.server_address, "app/images/{}/{}/{}/{}#image-{}".format( team_id, workspace_id, project_id, dataset_id, image_id ), ) return result
def _download_batch_by_hashes( self, hashes, retry_attemps=4 ) -> Generator[Tuple[str, Any, bool], None, None]: """ Download Images with given hashes by batches from Supervisely server. :param hashes: List of images hashes in Supervisely. :type hashes: List[str] :param retry_attemps: Number of attempts to download images. :type retry_attemps: int, optional :return: Generator with images hashes, images data and verification status. :rtype: :class:`Generator[Tuple[str, Any, bool], None, None]` """ for batch_hashes in batched(hashes): for attempt in range(retry_attemps + 1): # the first attempt is not counted as a retry if len(batch_hashes) == 0: break successful_hashes = [] response = self._api.post( "images.bulk.download-by-hash", {ApiField.HASHES: batch_hashes} ) decoder = MultipartDecoder.from_response(response) for part in decoder.parts: content_utf8 = part.headers[b"Content-Disposition"].decode("utf-8") # Find name="1245" preceded by a whitespace, semicolon or beginning of line. # The regex has 2 capture group: one for the prefix and one for the actual name value. h = content_utf8.replace('form-data; name="', "")[:-1] image_data = io.BytesIO(part.content).getvalue() image_hash = get_bytes_hash(image_data) verified = False if image_hash == h: successful_hashes.append(h) verified = True yield h, part, verified batch_hashes = [h for h in batch_hashes if h not in successful_hashes] if len(batch_hashes) > 0: if attempt == retry_attemps: logger.warning( "Failed to download images with hashes: %s. Skipping.", batch_hashes ) break else: next_attempt = attempt + 1 logger.warning( "Failed to download images with hashes: %s. Retrying (%d/%d).", batch_hashes, next_attempt, retry_attemps, ) sleep(2 ** (next_attempt))
[docs] def download_paths_by_hashes( self, hashes: List[str], paths: List[str], progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> None: """ Download Images with given hashes in Supervisely server and saves them for the given paths. :param hashes: List of images hashes in Supervisely. :type hashes: List[str] :param paths: List of paths to save images. :type paths: List[str] :param progress_cb: Function for tracking download progress. :type progress_cb: tqdm or callable, optional :raises: :class:`ValueError` if len(hashes) != len(paths) :return: None :rtype: :class:`NoneType` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() dataset_id = 447130 dir_for_save = '/home/admin/Downloads/img' hashes = [] paths = [] imgs_info = api.image.get_list(dataset_id) for im_info in imgs_info: hashes.append(im_info.hash) # It is necessary to save images with the same names(extentions) as on the server paths.append(os.path.join(dir_for_save, im_info.name)) api.image.download_paths_by_hashes(hashes, paths) """ if len(hashes) == 0: return if len(hashes) != len(paths): raise ValueError('Can not match "hashes" and "paths" lists, len(hashes) != len(paths)') h_to_path = {h: path for h, path in zip(hashes, paths)} for h, resp_part, verified in self._download_batch_by_hashes(list(set(hashes))): ensure_base_path(h_to_path[h]) with open(h_to_path[h], "wb") as w: w.write(resp_part.content) if progress_cb is not None: if verified: progress_cb(1)
def download_nps_by_hashes_generator( self, hashes: List[str], keep_alpha: bool = False, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> Generator[Tuple[str, np.ndarray], None, None]: if len(hashes) == 0: return [] if len(hashes) != len(set(hashes)): logger.warn("Found nonunique hashes in download task") for im_hash, resp_part, verified in self._download_batch_by_hashes(hashes): yield im_hash, sly_image.read_bytes(resp_part.content, keep_alpha) if progress_cb is not None: if verified: progress_cb(1)
[docs] def download_nps_by_hashes( self, hashes: List[str], keep_alpha: bool = False, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[np.ndarray]: """ Download Images with given hashes in Supervisely server in numpy format. :param hashes: List of images hashes in Supervisely. :type hashes: List[str] :param progress_cb: Function for tracking download progress. :type progress_cb: tqdm or callable, optional :return: List of images :rtype: :class: List[np.ndarray] :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_ids = [770918, 770919, 770920] image_hashes = [] for img_id in image_ids: img_info = api.image.get_info_by_id(image_id) image_hashes.append(img_info.hash) image_nps = api.image.download_nps_by_hashes(image_hashes) """ return [ img for _, img in self.download_nps_by_hashes_generator( hashes, keep_alpha, progress_cb, ) ]
[docs] def get_project_id(self, image_id: int) -> int: """ Gets Project ID by Image ID. :param image_id: Image ID in Supervisely. :type image_id: int :return: Project ID where Image is located. :rtype: :class:`int` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() img_id = 121236920 img_project_id = api.image.get_project_id(img_id) print(img_project_id) # Output: 53939 """ dataset_id = self.get_info_by_id(image_id, force_metadata_for_links=False).dataset_id project_id = self._api.dataset.get_info_by_id(dataset_id).project_id return project_id
@staticmethod def _get_free_name(exist_check_fn, name): """ """ res_title = name suffix = 1 name_without_ext = get_file_name(name) ext = get_file_ext(name) while exist_check_fn(res_title): res_title = "{}_{:03d}{}".format(name_without_ext, suffix, ext) suffix += 1 return res_title
[docs] def storage_url(self, path_original: str) -> str: """ Get full Image URL link in Supervisely server. :param path_original: Original Image path in Supervisely server. :type path_original: str :return: Full Image URL link in Supervisely server :rtype: :class:`str` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_id = 376729 img_info = api.image.get_info_by_id(image_id) img_storage_url = api.image.storage_url(img_info.path_original) """ return path_original
[docs] def preview_url( self, url: str, width: Optional[int] = None, height: Optional[int] = None, quality: Optional[int] = 70, ) -> str: """ Previews Image with the given resolution parameters. :param url: Full Image storage URL. :type url: str :param width: Preview Image width. :type width: int :param height: Preview Image height. :type height: int :param quality: Preview Image quality. :type quality: int :return: New URL with resized Image :rtype: :class:`str` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_id = 376729 img_info = api.image.get_info_by_id(image_id) img_preview_url = api.image.preview_url(img_info.full_storage_url, width=512, height=256) """ # @TODO: if both width and height are defined, and they are not proportioned to original image resolution, # then images will be croped from center if width is None: width = "" if height is None: height = "" return url.replace( "/image-converter", f"/previews/{width}x{height},jpeg,q{quality}/image-converter", )
[docs] def update_meta(self, id: int, meta: Dict) -> Dict: """ It is possible to add custom JSON data to every image for storing some additional information. Updates Image metadata by ID. Metadata is visible in Labeling Tool. Supervisely also have 2 apps: import metadata and export metadata :param id: Image ID in Supervisely. :type id: int :param meta: Image metadata. :type meta: dict :raises: :class:`TypeError` if meta type is not dict :return: Image information in dict format with new meta :rtype: :class:`dict` :Usage example: .. code-block:: python import os import json import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_info = api.image.get_info_by_id(id=3212008) print(image_info.meta) # Output: {} new_meta = {'Camera Make': 'Canon', 'Color Space': 'sRGB', 'Focal Length': '16 mm'} new_image_info = api.image.update_meta(id=3212008, meta=new_meta) image_info = api.image.get_info_by_id(id=3212008) print(json.dumps(obj=image_info.meta, indent=4)) # Output: { # "Camera Make": "Canon", # "Color Space": "sRGB", # "Focal Length": "16 mm" # } """ if type(meta) is not dict: raise TypeError("Meta must be dict, not {}".format(type(meta))) response = self._api.post("images.editInfo", {ApiField.ID: id, ApiField.META: meta}) return response.json()
[docs] def add_tag(self, image_id: int, tag_id: int, value: Optional[Union[str, int]] = None) -> None: """ Add tag with given ID to Image by ID. :param image_id: Image ID in Supervisely. :type image_id: int :param tag_id: Tag ID in Supervisely. :type tag_id: int :param value: Tag value. :type value: int or str or None, optional :return: :class:`None<None>` :rtype: :class:`NoneType<NoneType>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_id = 2389126 tag_id = 277083 api.image.add_tag(image_id, tag_id) """ self.add_tag_batch([image_id], tag_id, value)
[docs] def add_tag_batch( self, image_ids: List[int], tag_id: int, value: Optional[Union[str, int]] = None, progress_cb: Optional[Union[tqdm, Callable]] = None, batch_size: Optional[int] = 100, tag_meta: Optional[TagMeta] = None, ) -> None: """ Add tag with given ID to Images by IDs. :param image_ids: List of Images IDs in Supervisely. :type image_ids: List[int] :param tag_id: Tag ID in Supervisely. :type tag_id: int :param value: Tag value. :type value: int or str or None, optional :param progress_cb: Function for tracking progress of adding tag. :type progress_cb: tqdm or callable, optional :param batch_size: Batch size :type batch_size: int, optional :param tag_meta: Tag Meta. Needed for value validation, omit to skip validation :type tag_meta: TagMeta, optional :return: :class:`None<None>` :rtype: :class:`NoneType<NoneType>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_ids = [2389126, 2389127] tag_id = 277083 api.image.add_tag_batch(image_ids, tag_id) """ if tag_meta: if not (tag_meta.sly_id == tag_id): raise ValueError("tag_meta.sly_id and tag_id should be same") if not tag_meta.is_valid_value(value): raise ValueError("Tag {} can not have value {}".format(tag_meta.name, value)) else: # No value validation pass for batch_ids in batched(image_ids, batch_size): data = {ApiField.TAG_ID: tag_id, ApiField.IDS: batch_ids} if value is not None: data[ApiField.VALUE] = value self._api.post("image-tags.bulk.add-to-image", data) if progress_cb is not None: progress_cb(len(batch_ids))
[docs] def remove_batch( self, ids: List[int], progress_cb: Optional[Union[tqdm, Callable]] = None, batch_size: Optional[int] = 50, ): """ Remove images from supervisely by ids. :param ids: List of Images IDs in Supervisely. :type ids: List[int] :param progress_cb: Function for tracking progress of removing. :type progress_cb: tqdm or callable, optional :return: :class:`None<None>` :rtype: :class:`NoneType<NoneType>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_ids = [2389126, 2389127] api.image.remove_batch(image_ids) """ super(ImageApi, self).remove_batch(ids, progress_cb=progress_cb, batch_size=batch_size)
[docs] def remove(self, image_id: int): """ Remove image from supervisely by id. All image IDs must belong to the same dataset. Therefore, it is necessary to sort IDs before calling this method. :param image_id: Images ID in Supervisely. :type image_id: int :return: :class:`None<None>` :rtype: :class:`NoneType<NoneType>` :Usage example: .. code-block:: python import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' api = sly.Api.from_env() image_id = 2389126 api.image.remove(image_id) """ super(ImageApi, self).remove(image_id)
[docs] def exists(self, parent_id: int, name: str) -> bool: """Check if image with given name exists in dataset with given id. :param parent_id: Dataset ID in Supervisely. :type parent_id: int :param name: Image name in Supervisely. :type name: str :return: True if image exists, False otherwise. :rtype: bool""" return self.get_info_by_name(parent_id, name, force_metadata_for_links=False) is not None
[docs] def upload_multispectral( self, dataset_id: int, image_name: str, channels: Optional[List[np.ndarray]] = None, rgb_images: Optional[List[str]] = None, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[ImageInfo]: """Uploads multispectral image to Supervisely, if channels are provided, they will be uploaded as separate images. If rgb_images are provided, they will be uploaded without splitting into channels as RGB images. :param dataset_id: dataset ID to upload images to :type dataset_id: int :param image_name: name of the image with extension. :type image_name: str :param channels: list of numpy arrays with image channels :type channels: List[np.ndarray], optional :param rgb_images: list of paths to RGB images which will be uploaded as is :type rgb_images: List[str], optional :param progress_cb: function for tracking upload progress :type progress_cb: tqdm or callable, optional :return: list of uploaded images infos :rtype: List[ImageInfo] :Usage example: .. code-block:: python import os import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' # Load secrets and create API object from .env file (recommended) # Learn more here: https://developer.supervisely.com/getting-started/basics-of-authentication load_dotenv(os.path.expanduser("~/supervisely.env")) api = sly.Api.from_env() image_name = "demo1.png" image = cv2.imread(f"demo_data/{image_name}") # Extract channels as 2d numpy arrays: channels = [a, b, c] channels = [image[:, :, i] for i in range(image.shape[2])] image_infos = api.image.upload_multispectral(api, dataset.id, image_name, channels) """ group_tag_meta = TagMeta(_MULTISPECTRAL_TAG_NAME, TagValueType.ANY_STRING) group_tag = Tag(meta=group_tag_meta, value=image_name) image_basename = get_file_name(image_name) nps_for_upload = [] if rgb_images is not None: for rgb_image in rgb_images: nps_for_upload.append(sly_image.read(rgb_image)) if channels is not None: for channel in channels: nps_for_upload.append(channel) anns = [] names = [] for i, np_for_upload in enumerate(nps_for_upload): anns.append(Annotation(np_for_upload.shape).add_tag(group_tag)) names.append(f"{image_basename}_{i}.png") image_infos = self.upload_nps(dataset_id, names, nps_for_upload, progress_cb=progress_cb) image_ids = [image_info.id for image_info in image_infos] self._api.annotation.upload_anns(image_ids, anns) return image_infos
[docs] def upload_multiview_images( self, dataset_id: int, group_name: str, paths: List[str], metas: Optional[List[Dict]] = None, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[ImageInfo]: """ Uploads images to Supervisely and adds a tag to them. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param tag_name: Tag name in Supervisely. If tag does not exist in project, create it first. Tag must be of type ANY_STRING. :type tag_name: str :param group_name: Group name. All images will be assigned by tag with this group name. :type group_name: str :param paths: List of paths to images. :type paths: List[str] :param metas: List of image metas (optional) :type metas: Optional[List[Dict]] :param progress_cb: Function for tracking upload progress. :type progress_cb: Optional[Union[tqdm, Callable]] :return: List of uploaded images infos :rtype: List[ImageInfo] :raises Exception: if tag does not exist in project or tag is not of type ANY_STRING :Usage example: .. code-block:: python import os from dotenv import load_dotenv import supervisely as sly os.environ['SERVER_ADDRESS'] = 'https://app.supervise.ly' os.environ['API_TOKEN'] = 'Your Supervisely API Token' # Load secrets and create API object from .env file (recommended) # Learn more here: https://developer.supervisely.com/getting-started/basics-of-authentication load_dotenv(os.path.expanduser("~/supervisely.env")) api = sly.Api.from_env() dataset_id = 123456 paths = ['path/to/audi_01.png', 'path/to/audi_02.png'] group_name = 'audi' image_infos = api.image.upload_multiview_images(dataset_id, group_name, paths) """ group_tag_meta = TagMeta(_MULTIVIEW_TAG_NAME, TagValueType.ANY_STRING) group_tag = Tag(meta=group_tag_meta, value=group_name) for path in paths: if get_file_ext(path) not in sly_image.SUPPORTED_IMG_EXTS: raise RuntimeError( f"Image {path} has unsupported extension. Supported extensions: {sly_image.SUPPORTED_IMG_EXTS}" ) names = [get_file_name(path) for path in paths] image_infos = self.upload_paths( dataset_id=dataset_id, names=names, paths=paths, progress_cb=progress_cb, metas=metas, ) anns = [Annotation((info.height, info.width)).add_tag(group_tag) for info in image_infos] image_ids = [image_info.id for image_info in image_infos] self._api.annotation.upload_anns(image_ids, anns) uploaded_image_infos = self.get_list( dataset_id, filters=[{"field": "id", "operator": "in", "value": image_ids}] ) return uploaded_image_infos
[docs] def get_free_names(self, dataset_id: int, names: List[str]) -> List[str]: """ Returns list of free names for given dataset. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param names: List of names to check. :type names: List[str] :return: List of free names. :rtype: List[str] """ images_in_dataset = self.get_list(dataset_id, force_metadata_for_links=False) used_names = {image_info.name for image_info in images_in_dataset} new_names = [ generate_free_name(used_names, name, with_ext=True, extend_used_names=True) for name in names ] return new_names
[docs] def raise_name_intersections_if_exist( self, dataset_id: int, names: List[str], message: str = None ): """ Raises error if images with given names already exist in dataset. Default error message: "Images with the following names already exist in dataset [ID={dataset_id}]: {name_intersections}. Please, rename images and try again or set change_name_if_conflict=True to rename automatically on upload." :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param names: List of names to check. :type names: List[str] :param message: Error message. :type message: str, optional :return: None :rtype: None """ images_in_dataset = self.get_list(dataset_id) used_names = {image_info.name for image_info in images_in_dataset} name_intersections = used_names.intersection(set(names)) if message is None: message = f"Images with the following names already exist in dataset [ID={dataset_id}]: {name_intersections}. Please, rename images and try again or set change_name_if_conflict=True to rename automatically on upload." if len(name_intersections) > 0: raise ValueError(f"{message}")
[docs] def upload_dir( self, dataset_id: int, dir_path: str, recursive: Optional[bool] = True, change_name_if_conflict: Optional[bool] = True, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[ImageInfo]: """ Uploads all images with supported extensions from given directory to Supervisely. Optionally, uploads images from subdirectories of given directory. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param dir_path: Path to directory with images. :type dir_path: str :param recursive: If True uploads images from subdirectories of given directory recursively, otherwise only images from given directory. :type recursive: bool, optional :param change_name_if_conflict: If True adds suffix to the end of Image name when Dataset already contains an Image with identical name, If False and images with the identical names already exist in Dataset raises error. :type change_name_if_conflict: bool, optional :param progress_cb: Function for tracking upload progress. :type progress_cb: Optional[Union[tqdm, Callable]] :return: List of uploaded images infos :rtype: List[ImageInfo] """ if recursive: paths = list_files_recursively(dir_path, filter_fn=sly_image.is_valid_format) else: paths = list_files(dir_path, filter_fn=sly_image.is_valid_format) names = [get_file_name_with_ext(path) for path in paths] if change_name_if_conflict is True: names = self.get_free_names(dataset_id, names) else: self.raise_name_intersections_if_exist(dataset_id, names) image_infos = self.upload_paths(dataset_id, names, paths, progress_cb=progress_cb) return image_infos
[docs] def upload_dirs( self, dataset_id: int, dir_paths: List[str], recursive: Optional[bool] = True, change_name_if_conflict: Optional[bool] = True, progress_cb: Optional[Union[tqdm, Callable]] = None, ) -> List[ImageInfo]: """ Uploads all images with supported extensions from given directories to Supervisely. Optionally, uploads images from subdirectories of given directories. :param dataset_id: Dataset ID in Supervisely. :type dataset_id: int :param dir_paths: List of paths to directories with images. :type dir_paths: List[str] :param recursive: If True uploads images from subdirectories of given directories recursively, otherwise only images from given directories. :type recursive: bool, optional :param change_name_if_conflict: If True adds suffix to the end of Image name when Dataset already contains an Image with identical name, If False and images with the identical names already exist in Dataset raises error. :type change_name_if_conflict: bool, optional :param progress_cb: Function for tracking upload progress. :type progress_cb: Optional[Union[tqdm, Callable]] :return: List of uploaded images infos :rtype: List[ImageInfo] """ image_infos = [] for dir_path in dir_paths: image_infos.extend( self.upload_dir( dataset_id, dir_path, recursive, change_name_if_conflict, progress_cb ) ) return image_infos