18 from typing
import List, Dict, Tuple, Optional
19 from typing_extensions
import Self
21 from math
import cos, sin, radians
23 from PIL
import Image, ImageDraw
27 from .bbox
import BBox
28 from .classes_format
import ImageDatasetClasses
29 from ....codable
import Codable, KeyDescriptor
32 SegmentationType = List[int]
35 def toPoly(segmentation: List[int]) -> List[Tuple[int, int]]:
36 points: List[Tuple[int, int]] = []
38 for index
in range(0, len(segmentation) - 1, 2):
39 points.append((segmentation[index], segmentation[index + 1]))
47 Segmentation Instance class
54 Bounding Box as a python class
55 segmentations : List[SegmentationType]
56 list of segmentations that define the precise boundaries of object
61 segmentations: List[SegmentationType]
64 def _keyDescriptors(cls) -> Dict[str, KeyDescriptor]:
65 descriptors = super()._keyDescriptors()
74 def create(cls, classId: UUID, bbox: BBox, segmentations: List[SegmentationType]) -> Self:
76 Creates CoretexSegmentationInstance object with provided parameters
83 Bounding Box as a python class
84 segmentations : List[SegmentationType]
85 list of segmentations that define the precise boundaries of object
89 The created CoretexSegmentationInstance object
96 obj.segmentations = segmentations
102 Generates segmentation mask based on provided
103 width and height of image\n
104 Pixel values are equal to class IDs
109 width of image in pixels
111 height of image in pixels
115 np.ndarray -> segmentation mask represented as np.ndarray
119 ValueError -> if segmentation has less then 4 values
122 image = Image.new(
"L", (width, height))
125 if len(segmentation) < 4:
126 raise ValueError(f
">> [Coretex] Segmentation has too few values ({len(segmentation)}. Minimum: 4)")
128 draw = ImageDraw.Draw(image)
129 draw.polygon(toPoly(segmentation), fill = 1)
131 return np.array(image)
135 Works the same way as extractSegmentationMask function
136 Values that are > 0 are capped to 1
141 width of image in pixels
143 height of image in pixels
147 np.ndarray -> binary segmentation mask represented as np.ndarray
151 binaryMask[binaryMask > 0] = 1
157 Calculates centroid of segmentations
161 Tuple[int, int] -> x, y coordinates of centroid
164 flattenedSegmentations = [element
for sublist
in self.
segmentationssegmentations
for element
in sublist]
166 listCX = [value
for index, value
in enumerate(flattenedSegmentations)
if index % 2 == 0]
167 centerX = sum(listCX) // len(listCX)
169 listCY = [value
for index, value
in enumerate(flattenedSegmentations)
if index % 2 != 0]
170 centerY = sum(listCY) // len(listCY)
172 return centerX, centerY
176 Centers segmentations to the specified center point
180 newCentroid : Tuple[int, int]
181 x, y coordinates of centroid
184 newCenterX, newCenterY = newCentroid
185 oldCenterX, oldCenterY = self.
centroidcentroid()
187 modifiedSegmentations: List[List[int]] = []
190 modifiedSegmentation: List[int] = []
192 for i
in range(0, len(segmentation), 2):
193 x = segmentation[i] + (newCenterX - oldCenterX)
194 y = segmentation[i+1] + (newCenterY - oldCenterY)
196 modifiedSegmentation.append(x)
197 modifiedSegmentation.append(y)
199 modifiedSegmentations.append(modifiedSegmentation)
206 origin: Optional[Tuple[int, int]] =
None
210 Rotates segmentations of CoretexSegmentationInstance object
221 rotatedSegmentations: List[List[int]] = []
222 centerX, centerY = origin
226 theta = radians(-degrees)
227 cosang, sinang = cos(theta), sin(theta)
230 rotatedSegmentation: List[int] = []
232 for i
in range(0, len(segmentation), 2):
233 x = segmentation[i] - centerX
234 y = segmentation[i + 1] - centerY
236 newX = int(x * cosang - y * sinang) + centerX
237 newY = int(x * sinang + y * cosang) + centerY
239 rotatedSegmentation.append(newX)
240 rotatedSegmentation.append(newY)
242 rotatedSegmentations.append(rotatedSegmentation)
250 Image Annotation class
255 name of annotation class
260 instances : List[CoretexSegmentationInstance]
261 list of SegmentationInstance objects
267 instances: List[CoretexSegmentationInstance]
270 def _keyDescriptors(cls) -> Dict[str, KeyDescriptor]:
271 descriptors = super()._keyDescriptors()
272 descriptors[
"instances"] =
KeyDescriptor(
"instances", CoretexSegmentationInstance, list)
282 instances: List[CoretexSegmentationInstance]
285 Creates CoretexImageAnnotation object with provided parameters
290 name of annotation class
295 instances : List[CoretexSegmentationInstance]
296 list of SegmentationInstance objects
300 The created CoretexImageAnnotation object
308 obj.instances = instances
314 Generates segmentation mask of provided ImageDatasetClasses object
318 classes : ImageDatasetClasses
319 list of dataset classes
323 np.ndarray -> segmentation mask represented as np.ndarray
326 image = Image.new(
"L", (self.width, self.height))
328 for instance
in self.instances:
329 labelId = classes.labelIdForClassId(instance.classId)
333 for segmentation
in instance.segmentations:
334 if len(segmentation) < 4:
335 raise ValueError(f
">> [Coretex] Segmentation has too few values ({len(segmentation)}. Minimum: 4)")
337 draw = ImageDraw.Draw(image)
338 draw.polygon(toPoly(segmentation), fill = labelId + 1)
340 return np.asarray(image)