18 from typing
import List, Set
19 from pathlib
import Path
24 from shapely.geometry
import Polygon
27 import skimage.measure
29 from ..base_converter
import BaseConverter
30 from ...annotation
import CoretexImageAnnotation, CoretexSegmentationInstance, ImageDatasetClass, BBox
33 class HumanSegmentationConverter(BaseConverter):
35 def __init__(self, datasetName: str, projectId: int, datasetPath: str) ->
None:
36 super().__init__(datasetName, projectId, datasetPath)
38 self.__imagesPath = os.path.join(datasetPath,
"images")
39 self.__annotationsPath = os.path.join(datasetPath,
"annotations")
41 self.__imageNames = list(filter(
42 lambda path: path.endswith(
"jpg"),
43 os.listdir(self.__imagesPath))
47 def __backgroundClass(self) -> ImageDatasetClass:
48 coretexClass = self._dataset.classByName(
"background")
49 if coretexClass
is None:
50 raise ValueError(f
">> [Coretex] Class: (background) is not a part of dataset")
55 def __humanClass(self) -> ImageDatasetClass:
56 coretexClass = self._dataset.classByName(
"human")
57 if coretexClass
is None:
58 raise ValueError(f
">> [Coretex] Class: (human) is not a part of dataset")
62 def _dataSource(self) -> List[str]:
63 return self.__imageNames
65 def _extractLabels(self) -> Set[str]:
66 return set([
"background",
"human"])
68 def __extractPolygons(self, annotationPath: str, imageWidth: int, imageHeight: int) -> List[List[int]]:
69 imageFile = Image.open(annotationPath)
71 maskImage = imageFile.resize((imageWidth, imageHeight), Image.Resampling.LANCZOS)
72 subMaskArray = np.asarray(maskImage, dtype = np.uint8)
75 subMaskArray[:, 0] = 0
76 subMaskArray[0, :] = 0
77 subMaskArray[:, -1] = 0
78 subMaskArray[-1, :] = 0
80 contours = skimage.measure.find_contours(subMaskArray, 0.5)
82 segmentations: List[List[int]] = []
83 for contour
in contours:
84 for i
in range(len(contour)):
86 contour[i] = (col - 1, row - 1)
89 poly = Polygon(contour)
91 if poly.geom_type ==
'MultiPolygon':
93 poly = poly.convex_hull
96 if poly.geom_type ==
'Polygon':
97 segmentation = np.array(poly.exterior.coords).ravel().tolist()
98 segmentations.append(segmentation)
101 segmentations.sort(key=len, reverse=
True)
105 def __extractInstances(self, annotationPath: str, imageWidth: int, imageHeight: int) -> List[CoretexSegmentationInstance]:
106 polygons = self.__extractPolygons(annotationPath, imageWidth, imageHeight)
107 if len(polygons) == 0:
110 instances: List[CoretexSegmentationInstance] = []
112 largestBBox = BBox.fromPoly(polygons[0])
113 firstPolygon = Polygon(np.array(polygons[0]).reshape((-1, 2)))
115 backgroundBoundingBox = BBox(0, 0, imageWidth, imageHeight)
116 instances.append(CoretexSegmentationInstance.create(
117 self.__backgroundClass.classIds[0],
118 backgroundBoundingBox,
119 [backgroundBoundingBox.polygon]
122 instances.append(CoretexSegmentationInstance.create(
123 self.__humanClass.classIds[0],
128 for index
in range(1, len(polygons)):
129 currentBBox = BBox.fromPoly(polygons[index])
130 currentPolygon = Polygon(np.array(polygons[index]).reshape((-1, 2)))
132 instances.append(CoretexSegmentationInstance.create(
133 self.__backgroundClass.classIds[0]
if currentPolygon.intersects(firstPolygon)
else self.__humanClass.classIds[0],
140 def _extractSingleAnnotation(self, imageName: str) ->
None:
141 imagePath = os.path.join(self.__imagesPath, imageName)
142 annotationPath = os.path.join(self.__annotationsPath, f
"{Path(imagePath).stem}.png")
144 image = Image.open(imagePath)
145 instances = self.__extractInstances(annotationPath, image.width, image.height)
147 coretexAnnotation = CoretexImageAnnotation.create(imageName, image.width, image.height, instances)
148 self._saveImageAnnotationPair(imagePath, coretexAnnotation)