segmenter: bugfixes

This commit is contained in:
Romain Bazile 2020-10-01 15:56:09 +02:00
parent fbacf21016
commit a35b70e0fb
2 changed files with 48 additions and 24 deletions

View file

@ -264,7 +264,9 @@ class ImagerProcess(multiprocessing.Process):
nodered_metadata = last_message["config"] nodered_metadata = last_message["config"]
# Definition of the few important metadata # Definition of the few important metadata
local_metadata = { local_metadata = {
"process_datetime": datetime.datetime.now().isoformat(), "process_datetime": datetime.datetime.now()
.isoformat()
.split(".")[0],
"acq_camera_resolution": self.__resolution, "acq_camera_resolution": self.__resolution,
"acq_camera_iso": self.__iso, "acq_camera_iso": self.__iso,
"acq_camera_shutter_speed": self.__shutter_speed, "acq_camera_shutter_speed": self.__shutter_speed,
@ -350,6 +352,7 @@ class ImagerProcess(multiprocessing.Process):
# We only keep the date '2020-09-25T15:25:21.079769' # We only keep the date '2020-09-25T15:25:21.079769'
self.__global_metadata["process_datetime"].split("T")[0], self.__global_metadata["process_datetime"].split("T")[0],
str(self.__global_metadata["sample_id"]), str(self.__global_metadata["sample_id"]),
str(self.__global_metadata["acq_id"]),
) )
if not os.path.exists(self.__export_path): if not os.path.exists(self.__export_path):
# create the path! # create the path!
@ -374,7 +377,7 @@ class ImagerProcess(multiprocessing.Process):
json.dumps( json.dumps(
{ {
"action": "move", "action": "move",
"direction": "BACKWARD", "direction": "FORWARD",
"volume": self.__pump_volume, "volume": self.__pump_volume,
"flowrate": 2, "flowrate": 2,
} }
@ -399,6 +402,7 @@ class ImagerProcess(multiprocessing.Process):
filename_path = os.path.join(self.__export_path, filename) filename_path = os.path.join(self.__export_path, filename)
logger.info(f"Capturing an image to {filename_path}") logger.info(f"Capturing an image to {filename_path}")
# TODO Insert here a delay to stabilize the flow before we image
# Capture an image with the proper filename # Capture an image with the proper filename
self.__camera.capture(filename_path) self.__camera.capture(filename_path)
@ -409,7 +413,7 @@ class ImagerProcess(multiprocessing.Process):
# Publish the name of the image to via MQTT to Node-RED # Publish the name of the image to via MQTT to Node-RED
self.imager_client.client.publish( self.imager_client.client.publish(
"status/imager", "status/imager",
f'{{"status":"{filename} has been imaged."}}', f'{{"status":"{self.__img_done + 1}/{self.__img_goal} has been imaged to {filename}."}}',
) )
# Increment the counter # Increment the counter

View file

@ -174,7 +174,7 @@ class SegmenterProcess(multiprocessing.Process):
# Define the name of each object # Define the name of each object
object_fn = morphocut.str.Format( object_fn = morphocut.str.Format(
os.path.join("/home/pi/PlanktonScope/", "OBJECTS", "{name}.jpg"), os.path.join(self.__working_path, "objects", "{name}.jpg"),
name=object_id, name=object_id,
) )
@ -242,31 +242,51 @@ class SegmenterProcess(multiprocessing.Process):
self.segmenter_client.client.publish( self.segmenter_client.client.publish(
"status/segmenter", '{"status":"Started"}' "status/segmenter", '{"status":"Started"}'
) )
img_paths = [x[0] for x in os.walk(self.__img_path)] img_paths = [x[0] for x in os.walk(self.__img_path)]
logger.info(f"The pipeline will be run in {len(img_paths)} directories") logger.info(f"The pipeline will be run in {len(img_paths)} directories")
logger.debug(f"The pipeline will be run in these directories {img_paths}")
for path in img_paths: for path in img_paths:
logger.info(f"Loading the metadata file for {path}") logger.info("Checking for the presence of metadata.json")
with open(os.path.join(path, "metadata.json"), "r") as config_file: if os.path.exists(os.path.join(path, "metadata.json")):
self.__global_metadata = json.load(config_file) # The file exists, let's run the pipe!
logger.debug(f"Configuration loaded is {self.__global_metadata}") logger.info(f"Loading the metadata file for {path}")
with open(os.path.join(path, "metadata.json"), "r") as config_file:
self.__global_metadata = json.load(config_file)
logger.debug(
f"Configuration loaded is {self.__global_metadata}"
)
# Define the name of the .zip file that will contain the images and the .tsv table for EcoTaxa project = self.__global_metadata["sample_project"].replace(" ", "_")
self.__archive_fn = os.path.join( date = self.__global_metadata["process_datetime"]
self.__ecotaxa_path, sample = self.__global_metadata["sample_id"]
# filename includes project name, timestamp and sample id # Define the name of the .zip file that will contain the images and the .tsv table for EcoTaxa
f"export_{self.__global_metadata['sample_project']}_{self.__global_metadata['process_datetime']}_{self.__global_metadata['sample_id']}.zip", self.__archive_fn = os.path.join(
) self.__ecotaxa_path,
# filename includes project name, timestamp and sample id
f"export_{project}_{date}_{sample}.zip",
)
logger.info(f"Starting the pipeline in {path}") self.__working_path = path
# Start the MorphoCut Pipeline on the found path
self.__working_path = path
try: # Create the objects path
self.__pipe.run() if not os.path.exists(os.path.join(self.__working_path, "objects")):
except Exception as e: # create the path!
logger.exception(f"There was an error in the pipeline {e}") os.makedirs(os.path.join(self.__working_path, "objects"))
logger.info(f"Pipeline has been run for {path}")
logger.debug(f"The archive folder is {self.__archive_fn}")
self.__create_morphocut_pipeline()
logger.info(f"Starting the pipeline in {path}")
# Start the MorphoCut Pipeline on the found path
try:
self.__pipe.run()
except Exception as e:
logger.exception(f"There was an error in the pipeline {e}")
logger.info(f"Pipeline has been run for {path}")
else:
logger.info("Moving to the next folder, this one's empty")
# remove directory # remove directory
# shutil.rmtree(import_path) # shutil.rmtree(import_path)
@ -320,7 +340,7 @@ class SegmenterProcess(multiprocessing.Process):
) )
# Instantiate the morphocut pipeline # Instantiate the morphocut pipeline
self.__create_morphocut_pipeline() # self.__create_morphocut_pipeline()
# Publish the status "Ready" to via MQTT to Node-RED # Publish the status "Ready" to via MQTT to Node-RED
self.segmenter_client.client.publish("status/segmenter", '{"status":"Ready"}') self.segmenter_client.client.publish("status/segmenter", '{"status":"Ready"}')