@@ -1258,6 +1258,7 @@ def dict_to_store_semantic_segmentor(
1258
1258
1259
1259
"""
1260
1260
preds = patch_output ["predictions" ]
1261
+ preds = preds [0 ]
1261
1262
layer_list = np .unique (preds )
1262
1263
layer_list = np .delete (layer_list , np .where (layer_list == 0 ))
1263
1264
layer_info_dict = {}
@@ -1273,76 +1274,32 @@ def dict_to_store_semantic_segmentor(
1273
1274
cv2 .RETR_TREE ,
1274
1275
cv2 .CHAIN_APPROX_NONE ,
1275
1276
)
1276
- for layer in contours :
1277
- coords = layer [:, 0 , :]
1277
+ for layer_ in contours :
1278
+ coords = layer_ [:, 0 , :]
1278
1279
layer_info_dict [count ] = {
1279
1280
"contours" : coords ,
1280
- "type" : class_dict [ type_class ] ,
1281
+ "type" : "mask" ,
1281
1282
}
1282
1283
count += 1
1283
1284
1284
1285
origin = (0 , 0 )
1285
-
1286
- annotations .append (
1287
- Annotation (
1288
- geometry = make_valid_poly (
1289
- feature2geometry (
1286
+ scaled_coords = np .array ([scale_factor * coords ])
1287
+ feature_geom = feature2geometry (
1290
1288
{
1291
1289
"type" : "Polygon" ,
1292
- "coordinates" : scale_factor * coords ,
1290
+ "coordinates" : scaled_coords ,
1293
1291
},
1294
- ),
1292
+ )
1293
+ annotations .append (
1294
+ Annotation (
1295
+ geometry = make_valid_poly (
1296
+ feature_geom ,
1295
1297
origin = origin ,
1296
1298
),
1297
- properties = {},
1299
+ properties = {"type" : "mask" },
1298
1300
)
1299
1301
)
1300
1302
1301
- # return layer_info_dict
1302
-
1303
- # if "coordinates" not in patch_output:
1304
- # # we cant create annotations without coordinates
1305
- # msg = "Patch output must contain coordinates."
1306
- # raise ValueError(msg)
1307
- #
1308
- # # get relevant keys
1309
- # class_probs = get_zarr_array(patch_output.get("probabilities", []))
1310
- # preds = get_zarr_array(patch_output.get("predictions", []))
1311
- #
1312
- # patch_coords = np.array(patch_output.get("coordinates", []))
1313
- # if not np.all(np.array(scale_factor) == 1):
1314
- # patch_coords = patch_coords * (np.tile(scale_factor, 2)) # to baseline mpp
1315
- # patch_coords = patch_coords.astype(float)
1316
- # labels = patch_output.get("labels", [])
1317
- # # get classes to consider
1318
- # if len(class_probs) == 0:
1319
- # classes_predicted = np.unique(preds).tolist()
1320
- # else:
1321
- # classes_predicted = range(len(class_probs[0]))
1322
- #
1323
- # if class_dict is None:
1324
- # # if no class dict create a default one
1325
- # if len(class_probs) == 0:
1326
- # class_dict = {i: i for i in np.unique(np.append(preds, labels)).tolist()}
1327
- # else:
1328
- # class_dict = {i: i for i in range(len(class_probs[0]))}
1329
- #
1330
- # # find what keys we need to save
1331
- # keys = ["predictions"]
1332
- # keys = keys + [key for key in ["probabilities", "labels"] if key in patch_output]
1333
- #
1334
- # # put patch predictions into a store
1335
- # annotations = patch_predictions_as_annotations(
1336
- # preds,
1337
- # keys,
1338
- # class_dict,
1339
- # class_probs,
1340
- # patch_coords,
1341
- # classes_predicted,
1342
- # labels,
1343
- # )
1344
- #
1345
- # store = SQLiteStore()
1346
1303
_ = store .append_many (annotations , [str (i ) for i in range (len (annotations ))])
1347
1304
1348
1305
# # if a save director is provided, then dump store into a file
0 commit comments