From 24aeee4c46ef40cbaa688ed6e9ae36d9eb590daa Mon Sep 17 00:00:00 2001 From: Romain Bazile Date: Fri, 18 Jun 2021 11:19:58 +0200 Subject: [PATCH] segmenter: first pass, not really working as intended (cherry picked from commit e4d7843a35c0c50875bb1e0fe3d29ad3551105e5) --- flows/main.json | 1026 ++++++++++++++++++++++------- scripts/planktoscope/segmenter.py | 984 ++++++++++++++++++++------- 2 files changed, 1557 insertions(+), 453 deletions(-) diff --git a/flows/main.json b/flows/main.json index 5bc15f3..bb3a3ec 100644 --- a/flows/main.json +++ b/flows/main.json @@ -671,33 +671,6 @@ "width": "10", "collapse": false }, - { - "id": "b001a150.faa548", - "type": "ui_spacer", - "name": "spacer", - "group": "46be9c86.dea684", - "order": 3, - "width": 3, - "height": 1 - }, - { - "id": "e900ba8d.100b6", - "type": "ui_spacer", - "name": "spacer", - "group": "46be9c86.dea684", - "order": 5, - "width": 3, - "height": 1 - }, - { - "id": "a082e7cf.54863", - "type": "ui_spacer", - "name": "spacer", - "group": "46be9c86.dea684", - "order": 7, - "width": 10, - "height": 1 - }, { "id": "52d1b77.28369c8", "type": "ui_group", @@ -771,80 +744,6 @@ "disabled": false, "hidden": false }, - { - "id": "6be36295.0ab324", - "type": "ui_group", - "z": "", - "name": "Settings", - "tab": "b0fb559a.6966a8", - "order": 1, - "disp": false, - "width": "6", - "collapse": false - }, - { - "id": "a748d93b.3e49e8", - "type": "ui_spacer", - "name": "spacer", - "group": "4322c187.e73e5", - "order": 8, - "width": 10, - "height": 1 - }, - { - "id": "4916e0fe.8e26f8", - "type": "ui_spacer", - "name": "spacer", - "group": "b5d61bc7.54fe48", - "order": 2, - "width": 2, - "height": 1 - }, - { - "id": "366cefec.a908d8", - "type": "ui_spacer", - "name": "spacer", - "group": "b5d61bc7.54fe48", - "order": 4, - "width": 2, - "height": 1 - }, - { - "id": "635fb8ab.69d218", - "type": "ui_spacer", - "name": "spacer", - "group": "b5d61bc7.54fe48", - "order": 5, - "width": 2, - "height": 1 - }, - { - "id": "213ca819.bc3248", - "type": "ui_spacer", - "name": "spacer", - "group": "b5d61bc7.54fe48", - "order": 6, - "width": 2, - "height": 1 - }, - { - "id": "2b17d559.4543ca", - "type": "ui_spacer", - "name": "spacer", - "group": "b5d61bc7.54fe48", - "order": 7, - "width": 2, - "height": 1 - }, - { - "id": "2d0a5067.ac6518", - "type": "ui_spacer", - "name": "spacer", - "group": "b5d61bc7.54fe48", - "order": 8, - "width": 2, - "height": 1 - }, { "id": "bf163cd3.8abe38", "type": "ui_spacer", @@ -993,6 +892,143 @@ "width": 8, "height": 1 }, + { + "id": "10cbc233.b5ddf6", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 3, + "width": 1, + "height": 1 + }, + { + "id": "24270875.aa257", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 6, + "width": 1, + "height": 1 + }, + { + "id": "fe98ada7.23f03", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 7, + "width": 12, + "height": 1 + }, + { + "id": "b5d2fa87.dec9f", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 8, + "width": 12, + "height": 1 + }, + { + "id": "8a8ac899.77d62", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 9, + "width": 3, + "height": 1 + }, + { + "id": "99833ac1.3576e", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 11, + "width": 3, + "height": 1 + }, + { + "id": "dbd0aacd.462f78", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 12, + "width": 4, + "height": 1 + }, + { + "id": "61749739.bde6a", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 14, + "width": 4, + "height": 1 + }, + { + "id": "1ce5fab4.b4ba3d", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 15, + "width": 12, + "height": 1 + }, + { + "id": "67214b13.71efb4", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 16, + "width": 12, + "height": 1 + }, + { + "id": "a13796ac.04361", + "type": "ui_spacer", + "name": "spacer", + "group": "1be83144.4fe4bf", + "order": 18, + "width": 8, + "height": 1 + }, + { + "id": "7fdc961c.837a1", + "type": "ui_spacer", + "name": "spacer", + "group": "46be9c86.dea684", + "order": 3, + "width": 3, + "height": 1 + }, + { + "id": "ff9fd243.02bfd", + "type": "ui_spacer", + "name": "spacer", + "group": "46be9c86.dea684", + "order": 5, + "width": 3, + "height": 1 + }, + { + "id": "6f51c860.a27ff", + "type": "ui_spacer", + "name": "spacer", + "group": "46be9c86.dea684", + "order": 7, + "width": 10, + "height": 1 + }, + { + "id": "cfe2288f.a8862", + "type": "ui_group", + "z": "", + "name": "GPS Status", + "tab": "d9cd733b.ab73d", + "order": 8, + "disp": true, + "width": "6", + "collapse": false + }, { "id": "4e78af2d.90be7", "type": "ui_ui_control", @@ -1617,7 +1653,7 @@ "initialize": "", "finalize": "", "x": 800, - "y": 380, + "y": 400, "wires": [ [] ] @@ -2335,8 +2371,8 @@ "payload": "BACKWARD", "payloadType": "str", "topic": "actuator/pump", - "x": 180, - "y": 220, + "x": 160, + "y": 260, "wires": [ [ "3cb96380.e575ec" @@ -2361,46 +2397,14 @@ "payload": "FORWARD", "payloadType": "str", "topic": "actuator/pump", - "x": 180, - "y": 260, + "x": 160, + "y": 300, "wires": [ [ "3cb96380.e575ec" ] ] }, - { - "id": "961d27e7.da65c8", - "type": "ui_switch", - "z": "bccd1f23.87219", - "name": "light_state", - "label": "Light", - "tooltip": "", - "group": "4248342d.e55fac", - "order": 1, - "width": 0, - "height": 0, - "passthru": true, - "decouple": "false", - "topic": "", - "style": "", - "onvalue": "true", - "onvalueType": "bool", - "onicon": "", - "oncolor": "", - "offvalue": "false", - "offvalueType": "bool", - "officon": "", - "offcolor": "", - "x": 280, - "y": 40, - "wires": [ - [ - "cbb8afed.0a026" - ] - ], - "icon": "font-awesome/fa-lightbulb-o" - }, { "id": "cc966678.da8d08", "type": "ui_dropdown", @@ -2410,7 +2414,7 @@ "tooltip": "", "place": "Select option", "group": "4248342d.e55fac", - "order": 2, + "order": 3, "width": 0, "height": 0, "passthru": true, @@ -2458,7 +2462,7 @@ "z": "bccd1f23.87219", "group": "4248342d.e55fac", "name": "Magnification", - "order": 3, + "order": 4, "width": 0, "height": 0, "format": "
\n Magnification : X\n \n \n
", @@ -2516,7 +2520,7 @@ "z": "bccd1f23.87219", "group": "4248342d.e55fac", "name": "process_pixel", - "order": 4, + "order": 5, "width": 0, "height": 0, "format": "
\n Pixel resolution :\n \n μm\n
", @@ -2536,7 +2540,7 @@ "z": "bccd1f23.87219", "group": "4248342d.e55fac", "name": "min_size", - "order": 5, + "order": 6, "width": 0, "height": 0, "format": "
\n Smallest cells to explore :\n \n μm\n
", @@ -2555,7 +2559,7 @@ "z": "bccd1f23.87219", "group": "4248342d.e55fac", "name": "max_size", - "order": 6, + "order": 7, "width": 0, "height": 0, "format": "
\n Biggest cells to explore :\n \n μm\n
", @@ -2774,8 +2778,8 @@ "noerr": 0, "initialize": "", "finalize": "", - "x": 370, - "y": 240, + "x": 350, + "y": 280, "wires": [ [ "bdc8ce57.de1f08" @@ -2806,8 +2810,8 @@ "raw": false, "topic": "", "name": "", - "x": 570, - "y": 240, + "x": 550, + "y": 280, "wires": [ [] ] @@ -2821,8 +2825,8 @@ "qos": "", "retain": "", "broker": "8dc3722c.06efa8", - "x": 550, - "y": 180, + "x": 530, + "y": 220, "wires": [] }, { @@ -3029,57 +3033,13 @@ "once": true, "onceDelay": 0.1, "topic": "", - "payload": "false", - "payloadType": "bool", - "x": 110, - "y": 40, + "payload": "{\"action\":\"off\"}", + "payloadType": "str", + "x": 150, + "y": 160, "wires": [ [ - "961d27e7.da65c8" - ] - ] - }, - { - "id": "16aa0238.209276", - "type": "ui_slider", - "z": "bccd1f23.87219", - "name": "Iso slider", - "label": "ISO", - "tooltip": "Possible values are 100, 200, 320, 400, 500, 640, 800. 500 by default.", - "group": "8c38a81e.9897a8", - "order": 2, - "width": 0, - "height": 0, - "passthru": true, - "outs": "end", - "topic": "imager/image", - "min": "100", - "max": "800", - "step": "20", - "x": 360, - "y": 720, - "wires": [ - [ - "bb090334.1e21a8" - ] - ] - }, - { - "id": "bb090334.1e21a8", - "type": "function", - "z": "bccd1f23.87219", - "name": "round iso", - "func": "// Iso should be one of 60, 100, 200, 320, 400, 500, 640, 800\n\nif (msg.payload <= 80){\n msg.payload = 60;\n return msg;\n}\n\nif (msg.payload <= 150){\n msg.payload = 100;\n return msg;\n}\n\nif (msg.payload <= 260){\n msg.payload = 200;\n return msg;\n}\n\nif (msg.payload <= 360){\n msg.payload = 320;\n return msg;\n}\n\nif (msg.payload <= 450){\n msg.payload = 400;\n return msg;\n}\n\nif (msg.payload <= 565){\n msg.payload = 500;\n return msg;\n}\n\nif (msg.payload <= 700){\n msg.payload = 640;\n return msg;\n}\n\n\nif (700 < msg.payload){\n msg.payload = 800;\n return msg;\n}\n", - "outputs": 1, - "noerr": 0, - "initialize": "", - "finalize": "", - "x": 520, - "y": 720, - "wires": [ - [ - "16aa0238.209276", - "8ea9dc9a.c7d87" + "f0775525.cf806" ] ] }, @@ -3109,16 +3069,17 @@ "label": "Shutter Speed", "tooltip": "In microseconds, up to 1000µs, 125µs by default", "group": "8c38a81e.9897a8", - "order": 1, + "order": 5, "width": 0, "height": 0, "passthru": true, "outs": "end", "topic": "imager/image", + "topicType": "str", "min": "125", "max": "1000", "step": "1", - "x": 390, + "x": 400, "y": 800, "wires": [ [ @@ -3203,7 +3164,7 @@ "y": 720, "wires": [ [ - "16aa0238.209276" + "87f8732c.caf648" ] ] }, @@ -3263,7 +3224,7 @@ "z": "cb95299c.2817c8", "name": "Start segmentation", "group": "abeb6dad.635a2", - "order": 2, + "order": 4, "width": 5, "height": 1, "passthru": false, @@ -3275,6 +3236,7 @@ "payload": "{\"action\":\"segment\"}", "payloadType": "json", "topic": "segmenter/segment", + "topicType": "str", "x": 370, "y": 300, "wires": [ @@ -3289,7 +3251,7 @@ "z": "cb95299c.2817c8", "name": "Stop segmentation", "group": "abeb6dad.635a2", - "order": 3, + "order": 5, "width": 5, "height": 1, "passthru": true, @@ -4019,8 +3981,8 @@ "label": "GPS Status:", "format": "{{msg.payload}}", "layout": "row-center", - "x": 740, - "y": 780, + "x": 730, + "y": 840, "wires": [] }, { @@ -4066,8 +4028,8 @@ "payload": "{\"action\":\"stop\"}", "payloadType": "json", "topic": "actuator/pump", - "x": 170, - "y": 180, + "x": 150, + "y": 220, "wires": [ [ "bdc8ce57.de1f08" @@ -4985,7 +4947,7 @@ "label": "WB: Red", "tooltip": "from 1.0 to 8.0", "group": "8c38a81e.9897a8", - "order": 4, + "order": 3, "width": 8, "height": 1, "passthru": true, @@ -5029,7 +4991,7 @@ "label": "WB: Blue", "tooltip": "from 1.0 to 8.0", "group": "8c38a81e.9897a8", - "order": 5, + "order": 4, "width": 8, "height": 1, "passthru": true, @@ -5098,7 +5060,7 @@ "label": "Auto White Balance", "tooltip": "", "group": "8c38a81e.9897a8", - "order": 3, + "order": 2, "width": 2, "height": 2, "passthru": true, @@ -5238,8 +5200,8 @@ "noerr": 0, "initialize": "", "finalize": "", - "x": 560, - "y": 200, + "x": 760, + "y": 240, "wires": [ [] ] @@ -5587,13 +5549,13 @@ "id": "33c28dc1.238002", "type": "function", "z": "cb95299c.2817c8", - "name": "reset obj_counter", - "func": "global.set('obj_counter', 0);\n\nreturn msg;global.set('img_counter', 0);\nglobal.set('obj_counter', 0);\n", + "name": "prepare segmentation", + "func": "global.set('obj_counter', 0);\n\nvar segmentation_list = flow.get('segmentation_list')\nif (segmentation_list !== undefined && segmentation_list !== \"\") {\n msg.payload['path'] = segmentation_list\n}\n\nreturn msg;", "outputs": 1, "noerr": 0, "initialize": "", "finalize": "", - "x": 590, + "x": 600, "y": 300, "wires": [ [ @@ -6091,7 +6053,7 @@ "z": "9daf9e2b.019fc", "name": "", "group": "1be83144.4fe4bf", - "order": 13, + "order": 17, "width": 2, "height": 1, "passthru": false, @@ -6137,7 +6099,7 @@ "z": "9daf9e2b.019fc", "name": "", "group": "1be83144.4fe4bf", - "order": 15, + "order": 19, "width": 2, "height": 1, "passthru": false, @@ -6223,7 +6185,7 @@ "z": "9daf9e2b.019fc", "name": "Restart Python", "group": "1be83144.4fe4bf", - "order": 11, + "order": 13, "width": 4, "height": 1, "passthru": true, @@ -6616,7 +6578,7 @@ "z": "9daf9e2b.019fc", "name": "Update", "group": "1be83144.4fe4bf", - "order": 8, + "order": 10, "width": 6, "height": 1, "passthru": false, @@ -6642,7 +6604,7 @@ "type": "exec", "z": "9daf9e2b.019fc", "command": "bash /home/pi/PlanktonScope/scripts/bash/update.sh", - "addpay": false, + "addpay": true, "append": "", "useSpawn": "true", "timer": "", @@ -6677,8 +6639,8 @@ "fwdInMessages": true, "resendOnRefresh": true, "templateScope": "local", - "x": 410, - "y": 200, + "x": 150, + "y": 260, "wires": [ [] ] @@ -8121,8 +8083,8 @@ "z": "cb95299c.2817c8", "name": "", "env": [], - "x": 210, - "y": 200, + "x": 410, + "y": 240, "wires": [ [ "3b72d11c.86e9e6" @@ -8139,8 +8101,8 @@ "noerr": 0, "initialize": "", "finalize": "", - "x": 380, - "y": 200, + "x": 580, + "y": 240, "wires": [ [ "c8749cbb.55254" @@ -8238,7 +8200,7 @@ "topic": "", "payload": "start", "payloadType": "str", - "x": 1110, + "x": 1190, "y": 720, "wires": [ [ @@ -8435,8 +8397,8 @@ "noerr": 0, "initialize": "", "finalize": "", - "x": 480, - "y": 840, + "x": 490, + "y": 900, "wires": [ [ "4fb4e0ad.c417c", @@ -9019,6 +8981,236 @@ ] ] }, + { + "id": "3ea12061.ce62c", + "type": "ui_list", + "z": "cb95299c.2817c8", + "group": "abeb6dad.635a2", + "name": "", + "order": 3, + "width": 10, + "height": 11, + "lineType": "two", + "actionType": "check", + "allowHTML": false, + "outputs": 1, + "topic": "", + "x": 650, + "y": 140, + "wires": [ + [ + "cb3b87b5.63c4", + "739ba213.584e3c" + ] + ] + }, + { + "id": "8bd8fb2c.53fa4", + "type": "dir2files", + "z": "cb95299c.2817c8", + "name": "", + "dirname": "/home/pi/data/img/", + "pathRegex": ".*", + "isRecursive": true, + "findDir": true, + "isArray": true, + "x": 460, + "y": 140, + "wires": [ + [ + "3ea12061.ce62c" + ] + ] + }, + { + "id": "127d4ee.f8ad1b1", + "type": "ui_button", + "z": "cb95299c.2817c8", + "name": "Refresh", + "group": "abeb6dad.635a2", + "order": 2, + "width": 0, + "height": 0, + "passthru": false, + "label": "Update acquisition's folder list", + "tooltip": "Refresh the list of previous acquisitions", + "color": "", + "bgcolor": "", + "icon": "mi-find_replace", + "payload": "", + "payloadType": "date", + "topic": "update", + "topicType": "str", + "x": 260, + "y": 140, + "wires": [ + [ + "8bd8fb2c.53fa4", + "56f845f5.e7c054" + ] + ] + }, + { + "id": "946ce9ee.092cf", + "type": "inject", + "z": "cb95299c.2817c8", + "name": "Init", + "props": [ + { + "p": "payload" + } + ], + "repeat": "", + "crontab": "", + "once": true, + "onceDelay": 0.1, + "topic": "", + "payload": "", + "payloadType": "date", + "x": 270, + "y": 100, + "wires": [ + [ + "8bd8fb2c.53fa4", + "56f845f5.e7c054" + ] + ] + }, + { + "id": "87f8732c.caf648", + "type": "ui_multistate_switch", + "z": "bccd1f23.87219", + "name": "ISO selector", + "group": "8c38a81e.9897a8", + "order": 1, + "width": 10, + "height": 1, + "label": "ISO", + "stateField": "payload", + "enableField": "enable", + "rounded": true, + "useThemeColors": true, + "hideSelectedLabel": false, + "options": [ + { + "label": "100", + "value": "100", + "valueType": "str", + "color": "#009933" + }, + { + "label": "200", + "value": "200", + "valueType": "str", + "color": "#999999" + }, + { + "label": "320", + "value": "320", + "valueType": "str", + "color": "#ff6666" + }, + { + "label": "400", + "value": "400", + "valueType": "str", + "color": "#009999" + }, + { + "label": "500", + "value": "500", + "valueType": "str", + "color": "#cccc00" + }, + { + "label": "640", + "value": "640", + "valueType": "str", + "color": "#ff33cc" + }, + { + "label": "800", + "value": "800", + "valueType": "str", + "color": "#cc6600" + } + ], + "x": 370, + "y": 720, + "wires": [ + [ + "8ea9dc9a.c7d87" + ] + ] + }, + { + "id": "f0775525.cf806", + "type": "ui_multistate_switch", + "z": "bccd1f23.87219", + "name": "light_state", + "group": "4248342d.e55fac", + "order": 1, + "width": 0, + "height": 0, + "label": "Light ", + "stateField": "payload", + "enableField": "enable", + "rounded": true, + "useThemeColors": true, + "hideSelectedLabel": false, + "options": [ + { + "label": "Off", + "value": "{\"action\":\"off\"}", + "valueType": "str", + "color": "#009933" + }, + { + "label": "On", + "value": "{\"action\":\"on\"}", + "valueType": "str", + "color": "#999999" + } + ], + "x": 310, + "y": 160, + "wires": [ + [ + "99ae4886.8d43c" + ] + ] + }, + { + "id": "99ae4886.8d43c", + "type": "function", + "z": "bccd1f23.87219", + "name": "Encapsulate settings", + "func": "msg.topic = \"light\"\nreturn msg;", + "outputs": 1, + "noerr": 0, + "initialize": "", + "finalize": "", + "x": 500, + "y": 160, + "wires": [ + [ + "d31fcead.7e2ef" + ] + ] + }, + { + "id": "d31fcead.7e2ef", + "type": "mqtt out", + "z": "bccd1f23.87219", + "name": "", + "topic": "", + "qos": "", + "retain": "", + "broker": "8dc3722c.06efa8", + "x": 670, + "y": 160, + "wires": [] + }, { "id": "83c5a708.a5715", "type": "exec", @@ -9106,8 +9298,8 @@ "payload": "", "topic": "branch", "topicType": "str", - "x": 1320, - "y": 340, + "x": 1020, + "y": 260, "wires": [ [ "2d2ef1fd.40e6e6" @@ -9158,6 +9350,29 @@ ] ] }, + { + "id": "d037a624.60bea8", + "type": "exec", + "z": "9daf9e2b.019fc", + "d": true, + "command": "git --git-dir=/home/pi/PlanktonScope/.git checkout", + "addpay": true, + "append": "", + "useSpawn": "false", + "timer": "", + "oldrc": false, + "name": "git checkout branch", + "x": 1230, + "y": 260, + "wires": [ + [], + [ + "d334d264.8a7728", + "83c5a708.a5715" + ], + [] + ] + }, { "id": "af2b8d95.195bb8", "type": "ui_text", @@ -9175,8 +9390,353 @@ "wires": [] }, { - "id": "aa2dbb6.4cd9948", - "type": "link out", + "id": "cb3b87b5.63c4", + "type": "function", + "z": "cb95299c.2817c8", + "name": "update segmentation_list", + "func": "var segmentation_list = flow.get('segmentation_list');\n\nif (segmentation_list === undefined || segmentation_list === \"\") {\n segmentation_list = []\n console.log(\"error\")\n}\n\nif (msg.payload.isChecked){\n if (segmentation_list.includes(msg.payload.title) === false){\n segmentation_list.push(msg.payload.title)\n }\n // Element already in list, don't push it more than once\n //segmentation_list.push(msg.payload[\"title\"])\n}\nelse {\n var pos = segmentation_list.indexOf('machin')\n segmentation_list.splice(pos, 1)\n}\n\nflow.set('segmentation_list', segmentation_list)", + "outputs": 1, + "noerr": 0, + "initialize": "", + "finalize": "", + "x": 910, + "y": 140, + "wires": [ + [] + ] + }, + { + "id": "56f845f5.e7c054", + "type": "change", + "z": "cb95299c.2817c8", + "name": "Reset segmentation_list", + "rules": [ + { + "t": "set", + "p": "segmentation_list", + "pt": "flow", + "to": "[]", + "tot": "json" + } + ], + "action": "", + "property": "", + "from": "", + "to": "", + "reg": false, + "x": 510, + "y": 100, + "wires": [ + [] + ] + }, + { + "id": "7fc72364.8f038c", + "type": "ui_template", + "z": "cb95299c.2817c8", + "group": "abeb6dad.635a2", + "name": "Update message", + "order": 1, + "width": 10, + "height": "3", + "format": "
You can choose here in which folder(s) you want the segmentation script to run. A few details though:\n
The segmentation is run recursively in all folders. So if you select a top level folder, the segmentation will be run in all subfolders.\n
Also, you will be able to chose wether for force the segmentation for folders in which it has run already.
", + "storeOutMessages": true, + "fwdInMessages": true, + "resendOnRefresh": true, + "templateScope": "local", + "x": 270, + "y": 40, + "wires": [ + [] + ] + }, + { + "id": "6c7f0cc5.ea30d4", + "type": "gpsd", + "z": "1371dec5.76e671", + "name": "", + "hostname": "localhost", + "port": "2947", + "tpv": true, + "sky": false, + "info": false, + "device": false, + "gst": false, + "att": false, + "x": 170, + "y": 1360, + "wires": [ + [ + "906072b1.42c7c8", + "cd3b05d6.5a3e2", + "9b29e0e2.475e08", + "96fefa8b.b7d538", + "49a98fe9.9450d8", + "f2a62d0.f5e135" + ] + ] + }, + { + "id": "96bc0179.5c37", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 1, + "width": 6, + "height": 1, + "name": "GPS Status Display", + "label": "GPS Status:", + "format": "{{msg.payload}}", + "layout": "row-center", + "x": 650, + "y": 1200, + "wires": [] + }, + { + "id": "8c731f52.2b5f3", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 5, + "width": 3, + "height": 1, + "name": "Latitude", + "label": "Latitude", + "format": "{{msg.payload.lat.deg}}°{{msg.payload.lat.min}}'{{msg.payload.lat.sec}}{{msg.payload.lat.dir}}", + "layout": "col-center", + "x": 620, + "y": 1240, + "wires": [] + }, + { + "id": "34e4b5ea.27b4a2", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 6, + "width": 3, + "height": 1, + "name": "Longitude", + "label": "Longitude", + "format": "{{msg.payload.lon.deg}}°{{msg.payload.lon.min}}'{{msg.payload.lon.sec}}{{msg.payload.lon.dir}}", + "layout": "col-center", + "x": 620, + "y": 1280, + "wires": [] + }, + { + "id": "906072b1.42c7c8", + "type": "function", + "z": "1371dec5.76e671", + "name": "Convert DD to DMS", + "func": "function ConvertDDToDMS(D, lng){\n // from https://stackoverflow.com/a/5786281/2108279\n return {\n dir : D<0?lng?'W':'S':lng?'E':'N',\n deg : 0|(D<0?D=-D:D),\n min : 0|D%1*60,\n sec :(0|D*60%1*6000)/100\n };\n}\n\nmsg.payload = {\n \"lat\":ConvertDDToDMS(msg.payload.lat, false),\n \"lon\":ConvertDDToDMS(msg.payload.lon, true)\n};\nreturn msg;", + "outputs": 1, + "noerr": 0, + "initialize": "", + "finalize": "", + "x": 410, + "y": 1260, + "wires": [ + [ + "8c731f52.2b5f3", + "34e4b5ea.27b4a2" + ] + ] + }, + { + "id": "4584c817.0be238", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 3, + "width": 3, + "height": 1, + "name": "Speed", + "label": "Speed", + "format": "{{msg.payload}} kts", + "layout": "col-center", + "x": 610, + "y": 1320, + "wires": [] + }, + { + "id": "cd3b05d6.5a3e2", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 4, + "width": 3, + "height": 1, + "name": "Direction", + "label": "Direction", + "format": "{{msg.payload.track}} °", + "layout": "col-center", + "x": 620, + "y": 1360, + "wires": [] + }, + { + "id": "9b29e0e2.475e08", + "type": "function", + "z": "1371dec5.76e671", + "name": "GPS Mode", + "func": "switch (msg.payload.mode){\n case 1:msg.payload = \"No Fix\"; break\n case 2:msg.payload = \"2D Fix\"; break\n case 3:msg.payload = \"3D Fix\"; break\n default: msg.payload = \"No info\"\n}\nreturn msg;", + "outputs": 1, + "noerr": 0, + "initialize": "", + "finalize": "", + "x": 390, + "y": 1200, + "wires": [ + [ + "96bc0179.5c37" + ] + ] + }, + { + "id": "96fefa8b.b7d538", + "type": "function", + "z": "1371dec5.76e671", + "name": "Speed conversion", + "func": "msg.payload = (0|msg.payload.speed) * 1.9438\nreturn msg;", + "outputs": 1, + "noerr": 0, + "initialize": "", + "finalize": "", + "x": 410, + "y": 1320, + "wires": [ + [ + "4584c817.0be238" + ] + ] + }, + { + "id": "49a98fe9.9450d8", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 2, + "width": 6, + "height": 1, + "name": "GPS UTC datetime", + "label": "GPS UTC datetime", + "format": "{{msg.payload.time}}", + "layout": "col-center", + "x": 650, + "y": 1400, + "wires": [] + }, + { + "id": "3c3f334e.d691ac", + "type": "ui_button", + "z": "1371dec5.76e671", + "name": "", + "group": "cfe2288f.a8862", + "order": 6, + "width": 0, + "height": 0, + "passthru": false, + "label": "Force local clock update", + "tooltip": "", + "color": "", + "bgcolor": "", + "icon": "", + "payload": "", + "payloadType": "str", + "topic": "topic", + "topicType": "msg", + "x": 670, + "y": 1480, + "wires": [ + [ + "c60030d4.317418" + ] + ] + }, + { + "id": "c98d3495.b04278", + "type": "comment", + "z": "9daf9e2b.019fc", + "name": "", + "info": "Ajouter ici un update en fait vers la nouvelle branche (il faut qu'on éteigne et rallume les scripts et tout le tintouin)", + "x": 1260, + "y": 280, + "wires": [] + }, + { + "id": "c60030d4.317418", + "type": "exec", + "z": "1371dec5.76e671", + "command": "sudo hwclock -w", + "addpay": false, + "append": "", + "useSpawn": "false", + "timer": "", + "oldrc": false, + "name": "", + "x": 920, + "y": 1480, + "wires": [ + [], + [], + [] + ] + }, + { + "id": "6d144078.508d9", + "type": "ui_text", + "z": "1371dec5.76e671", + "group": "cfe2288f.a8862", + "order": 2, + "width": 6, + "height": 1, + "name": "Local UTC datetime", + "label": "Local UTC datetime", + "format": "{{msg.payload}}", + "layout": "col-center", + "x": 650, + "y": 1440, + "wires": [] + }, + { + "id": "f2a62d0.f5e135", + "type": "function", + "z": "1371dec5.76e671", + "name": "", + "func": "msg.payload = new Date()\nreturn msg;", + "outputs": 1, + "noerr": 0, + "initialize": "", + "finalize": "", + "x": 380, + "y": 1440, + "wires": [ + [ + "6d144078.508d9" + ] + ] + }, + { + "id": "739ba213.584e3c", + "type": "debug", + "z": "cb95299c.2817c8", + "name": "", + "active": true, + "tosidebar": true, + "console": false, + "tostatus": false, + "complete": "true", + "targetType": "full", + "statusVal": "", + "statusType": "auto", + "x": 850, + "y": 200, + "wires": [] + }, + { + "id": "b6bc9b81.ff942", + "type": "function", "z": "9daf9e2b.019fc", "name": "get git revision", "links": [ diff --git a/scripts/planktoscope/segmenter.py b/scripts/planktoscope/segmenter.py index 1ab855e..fc539c8 100644 --- a/scripts/planktoscope/segmenter.py +++ b/scripts/planktoscope/segmenter.py @@ -19,7 +19,32 @@ import multiprocessing # Basic planktoscope libraries import planktoscope.mqtt -import planktoscope.light + + +################################################################################ +# Morphocut Libraries +################################################################################ +import morphocut +import morphocut.file +import morphocut.image +import morphocut.stat +import morphocut.stream +import morphocut.str +import morphocut.contrib.ecotaxa +import morphocut.contrib.zooprocess + +################################################################################ +# Other image processing Libraries +################################################################################ +import skimage.util +import skimage.transform +import skimage.measure +import cv2 +import scipy.stats +import numpy as np +import PIL.Image +import math + logger.info("planktoscope.segmenter is loaded") @@ -31,7 +56,7 @@ class SegmenterProcess(multiprocessing.Process): """This class contains the main definitions for the segmenter of the PlanktoScope""" @logger.catch - def __init__(self, event): + def __init__(self, event, data_path): """Initialize the Segmenter class Args: @@ -44,281 +69,795 @@ class SegmenterProcess(multiprocessing.Process): self.stop_event = event self.__pipe = None self.segmenter_client = None - self.__img_path = "/home/pi/data/img/" - self.__export_path = "/home/pi/data/export/" - self.__objects_base_path = "/home/pi/data/objects/" + # Where captured images are saved + self.__img_path = os.path.join(data_path, "img/") + # To save export folders + self.__export_path = os.path.join(data_path, "export/") + # To save objects to export + self.__objects_root = os.path.join(data_path, "objects/") + # To save debug masks + self.__debug_objects_root = os.path.join(data_path, "clean/") self.__ecotaxa_path = os.path.join(self.__export_path, "ecotaxa") self.__global_metadata = None + # path for current folder being segmented self.__working_path = "" + # combination of self.__objects_root and actual sample folder name self.__working_obj_path = "" + # combination of self.__debug_objects_root and actual sample folder name + self.__working_debug_path = "" self.__archive_fn = "" + self.__flat = None + self.__mask_array = None + self.__mask_to_remove = None + self.__save_debug_img = False - if not os.path.exists(self.__ecotaxa_path): - # create the path! - os.makedirs(self.__ecotaxa_path) - - if not os.path.exists(self.__objects_base_path): - # create the path! - os.makedirs(self.__objects_base_path) + # create all base path + for path in [ + self.__ecotaxa_path, + self.__objects_root, + self.__debug_objects_root, + ]: + if not os.path.exists(path): + # create the path! + os.makedirs(path) logger.success("planktoscope.segmenter is initialised and ready to go!") - def __create_morphocut_pipeline(self): - """Creates the Morphocut Pipeline""" - logger.debug("Let's start creating the Morphocut Pipeline") + def _find_files(self, path, extension): + for _, _, filenames in os.walk(path, topdown=True): + if filenames: + filenames = sorted(filenames) + return [fn for fn in filenames if fn.endswith(extension)] - with morphocut.Pipeline() as self.__pipe: - # TODO wrap morphocut.Call(logger.debug()) in something that allows it not to be added to the pipeline - # if the logger.level is not debug. Might not be as easy as it sounds. - # Recursively find .jpg files in import_path. - # Sort to get consecutive frames. - abs_path = morphocut.file.Find( - self.__working_path, [".jpg"], sort=True, verbose=True + def _manual_median(self, array_of_5): + array_of_5.sort(axis=0) + return array_of_5[2] + + def _save_image(self, image, path): + PIL.Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)).save(path) + + def _save_mask(self, mask, path): + PIL.Image.fromarray(mask).save(path) + + def _calculate_flat(self, images_list, images_number, images_root_path): + # TODO make this calculation optional if a flat already exists + # make sure image number is smaller than image list + if images_number > len(images_list): + logger.error( + "The image number can't be bigger than the lenght of the provided list!" + ) + images_number = len(images_list) + + logger.debug("Opening images") + # start = time.monotonic() + # Read images and build array + images_array = np.array( + [ + cv2.imread( + os.path.join(images_root_path, images_list[i]), + ) + for i in range(images_number) + ] + ) + + # logger.debug(time.monotonic() - start) + logger.success("Opening images") + + logger.info("Manual median calc") + # start = time.monotonic() + + self.__flat = self._manual_median(images_array) + # self.__flat = _numpy_median(images_array) + + # logger.debug(time.monotonic() - start) + + logger.success("Manual median calc") + + # cv2.imshow("flat_color", self.__flat.astype("uint8")) + # cv2.waitKey(0) + + return self.__flat + + def _open_and_apply_flat(self, filepath, flat_ref): + logger.info("Opening images") + start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # Read images + image = cv2.imread(filepath) + # print(image) + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + logger.success("Opening images") + + logger.info("Flat calc") + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + + # Correct image + image = image / self.__flat + + # adding one black pixel top left + image[0][0] = [0, 0, 0] + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + + image = skimage.exposure.rescale_intensity( + image, in_range=(0, 1.04), out_range="uint8" + ) + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + logger.debug(time.monotonic() - start) + logger.success("Flat calc") + + # cv2.imshow("img", img.astype("uint8")) + # cv2.waitKey(0) + if self.__save_debug_img: + self._save_image( + image, + os.path.join(self.__working_debug_path, "cleaned_image.jpg"), + ) + return image + + def _create_mask(self, img, saving_path): + logger.info("Starting the mask creation") + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + + logger.debug("Threshold calc") + # img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + # ret, img_thres = cv2.threshold(img_gray, 127, 200, cv2.THRESH_OTSU) + img_thres = cv2.adaptiveThreshold( + img_gray, + maxValue=255, + adaptiveMethod=cv2.ADAPTIVE_THRESH_MEAN_C, + thresholdType=cv2.THRESH_BINARY_INV, + blockSize=19, # must be odd + C=4, + ) + # img_thres = 255 - img_thres + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + # logger.success(f"Threshold used was {ret}") + logger.success(f"Threshold is done") + + # cv2.imshow("img_thres", img_thres) + # cv2.waitKey(0) + if self.__save_debug_img: + PIL.Image.fromarray(img_thres).save( + os.path.join(saving_path, "mask_1_thres.jpg") ) - # Extract name from abs_path - name = morphocut.Call( - lambda p: os.path.splitext(os.path.basename(p))[0], abs_path + logger.info("Erode calc") + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + + kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (2, 2)) + img_erode = cv2.erode(img_thres, kernel) + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + logger.success("Erode calc") + + # cv2.imshow("img_erode", img_erode.astype("uint8")) + # cv2.waitKey(0) + if self.__save_debug_img: + PIL.Image.fromarray(img_erode).save( + os.path.join(saving_path, "mask_2_erode.jpg") ) - morphocut.Call(planktoscope.light.segmenting()) + logger.info("Dilate calc") + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) - # Read image - img = morphocut.image.ImageReader(abs_path) + kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (4, 4)) + img_dilate = cv2.dilate(img_erode, kernel) - # Show progress bar for frames - morphocut.stream.TQDM(morphocut.str.Format("Frame {name}", name=name)) + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + logger.success("Dilate calc") - # Apply running median to approximate the background image - flat_field = morphocut.stat.RunningMedian(img, 5) - - # Correct image - img = img / flat_field - - # Rescale intensities and convert to uint8 to speed up calculations - img = morphocut.image.RescaleIntensity( - img, in_range=(0, 1.1), dtype="uint8" + # cv2.imshow("img_dilate", img_dilate.astype("uint8")) + # cv2.waitKey(0) + if self.__save_debug_img: + PIL.Image.fromarray(img_dilate).save( + os.path.join(saving_path, "mask_3_dilate.jpg") ) - # Filter variable to reduce memory load - morphocut.stream.FilterVariables(name, img) + logger.info("Close calc") + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) - # Save cleaned images - # frame_fn = morphocut.str.Format(os.path.join("/home/pi/PlanktonScope/tmp","CLEAN", "{name}.jpg"), name=name) - # morphocut.image.ImageWriter(frame_fn, img) + img_close = cv2.morphologyEx(img_dilate, cv2.MORPH_CLOSE, kernel) - # Convert image to uint8 gray - img_gray = morphocut.image.RGB2Gray(img) + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + logger.success("Close calc") - # ? - img_gray = morphocut.Call(skimage.util.img_as_ubyte, img_gray) - - # Canny edge detection using OpenCV - img_canny = morphocut.Call(cv2.Canny, img_gray, 50, 100) - - # Dilate using OpenCV - kernel = morphocut.Call( - cv2.getStructuringElement, cv2.MORPH_ELLIPSE, (15, 15) - ) - img_dilate = morphocut.Call(cv2.dilate, img_canny, kernel, iterations=2) - - # Close using OpenCV - kernel = morphocut.Call( - cv2.getStructuringElement, cv2.MORPH_ELLIPSE, (5, 5) - ) - img_close = morphocut.Call( - cv2.morphologyEx, img_dilate, cv2.MORPH_CLOSE, kernel, iterations=1 + # cv2.imshow("img_close", img_close.astype("uint8")) + # cv2.waitKey(0) + if self.__save_debug_img: + PIL.Image.fromarray(img_close).save( + os.path.join(saving_path, "mask_4_close.jpg") ) - # Erode using OpenCV - kernel = morphocut.Call( - cv2.getStructuringElement, cv2.MORPH_ELLIPSE, (15, 15) - ) - mask = morphocut.Call(cv2.erode, img_close, kernel, iterations=2) + logger.info("Erode calc 2") + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) - # Find objects - regionprops = morphocut.image.FindRegions( - mask, img_gray, min_area=1000, padding=10, warn_empty=name + img_erode_2 = cv2.erode(img_close, kernel) + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + logger.success("Erode calc 2") + + # cv2.imshow("img_erode_2", img_erode_2.astype("uint8")) + # cv2.waitKey(0) + if self.__save_debug_img: + self._save_mask( + img_erode_2, + os.path.join(saving_path, "mask_5_erode_2.jpg"), ) - # For an object, extract a vignette/ROI from the image - roi_orig = morphocut.image.ExtractROI(img, regionprops, bg_color=255) - - # Generate an object identifier - i = morphocut.stream.Enumerate() - - # morphocut.Call(print,i) - - # Define the ID of each object - object_id = morphocut.str.Format("{name}_{i:d}", name=name, i=i) - - # morphocut.Call(print,object_id) - - # Define the name of each object - object_fn = morphocut.str.Format( - os.path.join(self.__working_obj_path, "{name}.jpg"), - name=object_id, + img_final = self._apply_previous_mask(img_erode_2) + if self.__save_debug_img: + self._save_mask( + img_final, + os.path.join(saving_path, "mask_6_final.jpg"), ) + logger.success("Mask created") + return img_final - # Save the image of the object with its name - morphocut.image.ImageWriter(object_fn, roi_orig) + def _apply_previous_mask(self, mask): + if self.__mask_to_remove is not None: + # start = time.monotonic() + # np.append(self.__mask_to_remove, img_erode_2) + # logger.debug(time.monotonic() - start) + mask_and = mask & self.__mask_to_remove + img_final = mask - mask_and + self.__mask_to_remove = mask + logger.success("Done mask") + return img_final + else: + logger.debug("First mask") + self.__mask_to_remove = mask + return mask - # Calculate features. The calculated features are added to the global_metadata. - # Returns a Variable representing a dict for every object in the stream. - meta = morphocut.contrib.zooprocess.CalculateZooProcessFeatures( - regionprops, prefix="object_", meta=self.__global_metadata - ) + def _get_color_info(self, bgr_img, mask): + # bgr_mean, bgr_stddev = cv2.meanStdDev(bgr_img, mask=mask) + # (b_channel, g_channel, r_channel) = cv2.split(bgr_img) + quartiles = [0, 0.05, 0.25, 0.50, 0.75, 0.95, 1] + # b_quartiles = np.quantile(b_channel, quartiles) + # g_quartiles = np.quantile(g_channel, quartiles) + # r_quartiles = np.quantile(r_channel, quartiles) + hsv_img = cv2.cvtColor(bgr_img, cv2.COLOR_BGR2HSV) + (h_channel, s_channel, v_channel) = cv2.split(hsv_img) + # hsv_mean, hsv_stddev = cv2.meanStdDev(hsv_img, mask=mask) + h_mean = np.mean(h_channel) # add once numpy is upgraded to 1.20 , where=mask + s_mean = np.mean(s_channel) + v_mean = np.mean(v_channel) + h_stddev = np.std(h_channel) + s_stddev = np.std(s_channel) + v_stddev = np.std(v_channel) + # TODO Add skewness and kurtosis calculation (with scipy) here + # using https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.skew.html#scipy.stats.skew + # and https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.kurtosis.html#scipy.stats.kurtosis + # h_quartiles = np.quantile(h_channel, quartiles) + # s_quartiles = np.quantile(s_channel, quartiles) + # v_quartiles = np.quantile(v_channel, quartiles) + return { + # "object_MeanRedLevel": bgr_mean[2][0], + # "object_MeanGreenLevel": bgr_mean[1][0], + # "object_MeanBlueLevel": bgr_mean[0][0], + # "object_StdRedLevel": bgr_stddev[2][0], + # "object_StdGreenLevel": bgr_stddev[1][0], + # "object_StdBlueLevel": bgr_stddev[0][0], + # "object_minRedLevel": r_quartiles[0], + # "object_Q05RedLevel": r_quartiles[1], + # "object_Q25RedLevel": r_quartiles[2], + # "object_Q50RedLevel": r_quartiles[3], + # "object_Q75RedLevel": r_quartiles[4], + # "object_Q95RedLevel": r_quartiles[5], + # "object_maxRedLevel": r_quartiles[6], + # "object_minGreenLevel": g_quartiles[0], + # "object_Q05GreenLevel": g_quartiles[1], + # "object_Q25GreenLevel": g_quartiles[2], + # "object_Q50GreenLevel": g_quartiles[3], + # "object_Q75GreenLevel": g_quartiles[4], + # "object_Q95GreenLevel": g_quartiles[5], + # "object_maxGreenLevel": g_quartiles[6], + # "object_minBlueLevel": b_quartiles[0], + # "object_Q05BlueLevel": b_quartiles[1], + # "object_Q25BlueLevel": b_quartiles[2], + # "object_Q50BlueLevel": b_quartiles[3], + # "object_Q75BlueLevel": b_quartiles[4], + # "object_Q95BlueLevel": b_quartiles[5], + # "object_maxBlueLevel": b_quartiles[6], + "object_MeanHue": h_mean, + "object_MeanSaturation": s_mean, + "object_MeanValue": v_mean, + "object_StdHue": h_stddev, + "object_StdSaturation": s_stddev, + "object_StdValue": v_stddev, + # "object_minHue": h_quartiles[0], + # "object_Q05Hue": h_quartiles[1], + # "object_Q25Hue": h_quartiles[2], + # "object_Q50Hue": h_quartiles[3], + # "object_Q75Hue": h_quartiles[4], + # "object_Q95Hue": h_quartiles[5], + # "object_maxHue": h_quartiles[6], + # "object_minSaturation": s_quartiles[0], + # "object_Q05Saturation": s_quartiles[1], + # "object_Q25Saturation": s_quartiles[2], + # "object_Q50Saturation": s_quartiles[3], + # "object_Q75Saturation": s_quartiles[4], + # "object_Q95Saturation": s_quartiles[5], + # "object_maxSaturation": s_quartiles[6], + # "object_minValue": v_quartiles[0], + # "object_Q05Value": v_quartiles[1], + # "object_Q25Value": v_quartiles[2], + # "object_Q50Value": v_quartiles[3], + # "object_Q75Value": v_quartiles[4], + # "object_Q95Value": v_quartiles[5], + # "object_maxValue": v_quartiles[6], + } - # Get all the metadata - json_meta = morphocut.Call(json.dumps, meta, sort_keys=True, default=str) + def _extract_metadata_from_regionprop(self, prop): + return { + "label": prop.label, + # width of the smallest rectangle enclosing the object + "width": prop.bbox[3] - prop.bbox[1], + # height of the smallest rectangle enclosing the object + "height": prop.bbox[2] - prop.bbox[0], + # X coordinates of the top left point of the smallest rectangle enclosing the object + "bx": prop.bbox[1], + # Y coordinates of the top left point of the smallest rectangle enclosing the object + "by": prop.bbox[0], + # circularity : (4∗π ∗Area)/Perim^2 a value of 1 indicates a perfect circle, a value approaching 0 indicates an increasingly elongated polygon + "circ.": (4 * np.pi * prop.filled_area) / prop.perimeter ** 2, + # Surface area of the object excluding holes, in square pixels (=Area*(1-(%area/100)) + "area_exc": prop.area, + # Surface area of the object in square pixels + "area": prop.filled_area, + # Percentage of object’s surface area that is comprised of holes, defined as the background grey level + "%area": 1 - (prop.area / prop.filled_area), + # Primary axis of the best fitting ellipse for the object + "major": prop.major_axis_length, + # Secondary axis of the best fitting ellipse for the object + "minor": prop.minor_axis_length, + # Y position of the center of gravity of the object + "y": prop.centroid[0], + # X position of the center of gravity of the object + "x": prop.centroid[1], + # The area of the smallest polygon within which all points in the objet fit + "convex_area": prop.convex_area, + # # Minimum grey value within the object (0 = black) + # "min": prop.min_intensity, + # # Maximum grey value within the object (255 = white) + # "max": prop.max_intensity, + # # Average grey value within the object ; sum of the grey values of all pixels in the object divided by the number of pixels + # "mean": prop.mean_intensity, + # # Integrated density. The sum of the grey values of the pixels in the object (i.e. = Area*Mean) + # "intden": prop.filled_area * prop.mean_intensity, + # The length of the outside boundary of the object + "perim.": prop.perimeter, + # major/minor + "elongation": np.divide(prop.major_axis_length, prop.minor_axis_length), + # max-min + # "range": prop.max_intensity - prop.min_intensity, + # perim/area_exc + "perimareaexc": prop.perimeter / prop.area, + # perim/major + "perimmajor": prop.perimeter / prop.major_axis_length, + # (4 ∗ π ∗ Area_exc)/perim 2 + "circex": np.divide(4 * np.pi * prop.area, prop.perimeter ** 2), + # Angle between the primary axis and a line parallel to the x-axis of the image + "angle": prop.orientation / np.pi * 180 + 90, + # # X coordinate of the top left point of the image + # 'xstart': data_object['raw_img']['meta']['xstart'], + # # Y coordinate of the top left point of the image + # 'ystart': data_object['raw_img']['meta']['ystart'], + # Maximum feret diameter, i.e. the longest distance between any two points along the object boundary + # 'feret': data_object['raw_img']['meta']['feret'], + # feret/area_exc + # 'feretareaexc': data_object['raw_img']['meta']['feret'] / property.area, + # perim/feret + # 'perimferet': property.perimeter / data_object['raw_img']['meta']['feret'], + "bounding_box_area": prop.bbox_area, + "eccentricity": prop.eccentricity, + "equivalent_diameter": prop.equivalent_diameter, + "euler_number": prop.euler_number, + "extent": prop.extent, + "local_centroid_col": prop.local_centroid[1], + "local_centroid_row": prop.local_centroid[0], + "solidity": prop.solidity, + } - # Publish the json containing all the metadata to via MQTT to Node-RED - morphocut.Call( - self.segmenter_client.client.publish, - "status/segmenter/metric", - json_meta, - ) + def _slice_image(self, img, name, mask, start_count=0): + """Slice a given image using give mask - # Add object_id to the metadata dictionary - meta["object_id"] = object_id + Args: + img (img array): Image to slice + name (string): name of the original image + mask (mask binary array): mask to use slice with + start_count (int, optional): count start to number the objects, so each one is unique. Defaults to 0. - # Generate object filenames - orig_fn = morphocut.str.Format("{object_id}.jpg", object_id=object_id) + Returns: + tuple: (Number of saved objects, original number of objects before size filtering) + """ + # TODO retrieve here all those from the global metadata + minESD = 40 # microns + minArea = math.pi * (minESD / 2) * (minESD / 2) + pixel_size = 1.01 # to be retrieved from metadata + # minsizepix = minArea / pixel_size / pixel_size + minsizepix = (minESD / pixel_size) ** 2 - # Write objects to an EcoTaxa archive: - # roi image in original color, roi image in grayscale, metadata associated with each object - morphocut.contrib.ecotaxa.EcotaxaWriter( - self.__archive_fn, (orig_fn, roi_orig), meta - ) + labels, nlabels = skimage.measure.label(mask, return_num=True) + regionprops = skimage.measure.regionprops(labels) + regionprops_filtered = [ + region for region in regionprops if region.bbox_area >= minsizepix + ] + object_number = len(regionprops_filtered) + logger.debug(f"Found {nlabels} labels, or {object_number} after size filtering") - # Progress bar for objects - morphocut.stream.TQDM( - morphocut.str.Format("Object {object_id}", object_id=object_id) - ) - - id_json = morphocut.str.Format( - '{{"object_id":"{object_id}"}}', object_id=object_id - ) + # TODO Actually segment the images and save the ROI images (and their mask?) and the TSV for ecotaxa + for (i, region) in enumerate(regionprops_filtered): + region.label = i + start_count # Publish the object_id to via MQTT to Node-RED - morphocut.Call( - self.segmenter_client.client.publish, + self.segmenter_client.client.publish( "status/segmenter/object_id", - id_json, + f'{{"object_id":"{region.label}"}}', + ) + obj_image = img[region.slice] + object_id = f"{name}_{i}" + object_fn = os.path.join(self.__working_obj_path, f"{object_id}.jpg") + self._save_image(obj_image, object_fn) + + if self.__save_debug_img: + self._save_mask( + region.filled_image, + os.path.join(self.__working_debug_path, f"obj_{i}_mask.jpg"), + ) + + colors = self._get_color_info(obj_image, region.filled_image) + metadata = self._extract_metadata_from_regionprop(region) + self.__global_metadata["objects"].append( + { + "name": f"{object_id}", + "metadata": {**metadata, **colors}, + } ) - logger.info("Morphocut's Pipeline has been created") + if self.__save_debug_img: + if object_number: + for region in regionprops_filtered: + tagged_image = cv2.drawMarker( + img, + (int(region.centroid[1]), int(region.centroid[0])), + (0, 0, 255), + cv2.MARKER_CROSS, + ) + tagged_image = cv2.rectangle( + img, + pt1=region.bbox[-3:-5:-1], + pt2=region.bbox[-1:-3:-1], + color=(150, 0, 200), + thickness=1, + ) + + # contours = [region.bbox for region in regionprops_filtered] + # for contour in contours: + # tagged_image = cv2.rectangle( + # img, pt1=(contours[0][1],contours[0][0]), pt2=(contours[0][3],contours[0][2]), color=(0, 0, 255), thickness=2 + # ) + # contours = [region.coords for region in regionprops_filtered] + # for contour in contours: + # tagged_image = cv2.drawContours( + # img_erode_2, contour, -1, color=(0, 0, 255), thickness=2 + # ) + + # cv2.imshow("tagged_image", tagged_image.astype("uint8")) + # cv2.waitKey(0) + self._save_image( + tagged_image, + os.path.join(self.__debug_objects_root, name, "tagged.jpg"), + ) + else: + self._save_image( + img, + os.path.join(self.__debug_objects_root, name, "tagged.jpg"), + ) + return (object_number, len(regionprops)) + + def _pipe(self): + logger.info("Finding images") + images_list = self._find_files( + self.__working_path, ("JPG", "jpg", "JPEG", "jpeg") + ) + logger.debug(f"Images found are {images_list}") + images_count = len(images_list) + logger.debug(f"We found {images_count} images, good luck!") + + first_start = time.monotonic() + self.__mask_to_remove = None + average = 0 + total_objects = 0 + average_objects = 0 + recalculate_flat = True + # TODO check image list here to find if a flat exists + # we recalculate the flat every 10 pictures + if recalculate_flat: + self._calculate_flat(images_list[i : i + 10], 10, self.__working_path) + recalculate_flat = False + + if self.__save_debug_img: + self._save_image( + self.__flat, + os.path.join(self.__working_debug_path, "flat_color.jpg"), + ) + + average_time = 0 + + for (i, filename) in enumerate(images_list): + name = os.path.splitext(filename) + + # we recalculate the flat if the heuristics detected we should + if recalculate_flat: # not i % 10 and i < (images_count - 10) + if i > len(images_list) - 11: + flat = self._calculate_flat( + images_list[i - 10 : i], 10, self.__working_path + ) + else: + flat = self._calculate_flat( + images_list[i : i + 10], 10, self.__working_path + ) + recalculate_flat = False + if self.__save_debug_img: + self._save_image( + self.__flat, + os.path.join( + os.path.dirname(self.__working_debug_path), + f"flat_color_{i}.jpg", + ), + ) + + self.__working_debug_path = os.path.join( + self.__debug_objects_root, + self.__working_path.split(self.__img_path)[1].strip(), + name, + ) + + logger.debug(f"The debug objects path is {self.__working_debug_path}") + # Create the objects path + if not os.path.exists(self.__working_debug_path): + # create the path! + os.makedirs(self.__working_debug_path) + + start = time.monotonic() + logger.info(f"Starting work on {name}, image {i+1}/{images_count}") + + img = self._open_and_apply_flat( + os.path.join(self.__working_path, images_list[i]), self.__flat + ) + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + + mask = self._create_mask(img, self.__working_debug_path) + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + + # start = time.monotonic() + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + + objects_count, _ = self._slice_image(img, name, mask, total_objects) + total_objects += objects_count + # Simple heuristic to detect a movement of the flow cell and a change in the resulting flat + if objects_count > average_objects + 20: + logger.debug( + f"We need to recalculate a flat since we have {objects_count} new objects instead of the average of {average_objects}" + ) + recalculate_flat = True + average_objects = (average_objects * i + objects_count) / (i + 1) + + # logger.debug(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss) + # logger.debug(time.monotonic() - start) + delay = time.monotonic() - start + average_time = (average_time * i + delay) / (i + 1) + logger.success( + f"Work on {name} is OVER! Done in {delay}s, average time is {average_time}s, average number of objects is {average_objects}" + ) + logger.success( + f"We also found {objects_count} objects in this image, at a rate of {objects_count / delay} objects per second" + ) + logger.success(f"So far we found {total_objects} objects") + + total_duration = (time.monotonic() - first_start) / 60 + logger.success( + f"{images_count} images done in {total_duration} minutes, or an average of {average_time}s per image or {total_duration*60/images_count}s per image" + ) + logger.success( + f"We also found {total_objects} objects, or an average of {total_objects / (total_duration * 60)}objects per second" + ) + # cleanup + # we're done free some mem + self.__flat = None + + def segment_all(self, paths: list): + """Starts the segmentation in all the folders given recursively + + Args: + paths (list, optional): path list to recursively explore. Defaults to [self.__img_path]. + """ + img_paths = [] + for path in paths: + for x in os.walk(path): + if x[0] not in img_paths: + img_paths.append(x[0]) + self.segment_list(img_paths) + + def segment_list(self, path_list: list): + """Starts the segmentation in the folders given + + Args: + path_list (list): [description] + """ + # TODO check for recursive flag and force flag here + logger.info(f"The pipeline will be run in {len(path_list)} directories") + logger.debug(f"Those are {path_list}") + for path in path_list: + logger.debug(f"{path}: Checking for the presence of metadata.json") + if os.path.exists(os.path.join(path, "metadata.json")): + # The file exists, let's run the pipe! + if not self.segment_path(path): + logger.error(f"There was en error while segmenting {path}") + else: + logger.debug(f"Moving to the next folder, {path} has no metadata.json") + # Publish the status "Done" to via MQTT to Node-RED + self.segmenter_client.client.publish("status/segmenter", '{"status":"Done"}') + + def segment_path(self, path): + """Starts the segmentation in the given path + + Args: + path (string): path of folder to do segmentation in + """ + logger.info(f"Loading the metadata file for {path}") + with open(os.path.join(path, "metadata.json"), "r") as config_file: + self.__global_metadata = json.load(config_file) + logger.debug(f"Configuration loaded is {self.__global_metadata}") + + # Remove all the key,value pairs that don't start with acq, sample, object or process (for Ecotaxa) + self.__global_metadata = dict( + filter( + lambda item: item[0].startswith(("acq", "sample", "object", "process")), + self.__global_metadata.items(), + ) + ) + + project = self.__global_metadata["sample_project"].replace(" ", "_") + date = datetime.datetime.utcnow().isoformat() + sample = self.__global_metadata["sample_id"] + + # TODO Add process informations to metadata here + + # Define the name of the .zip file that will contain the images and the .tsv table for EcoTaxa + self.__archive_fn = os.path.join( + self.__ecotaxa_path, + # filename includes project name, timestamp and sample id + f"export_{project}_{date}_{sample}.zip", + ) + + self.__working_path = path + + # recreate the subfolder img architecture of this folder inside objects + # when we split the working path with the base img path, we get the date/sample architecture back + # os.path.relpath("/home/pi/data/img/2020-10-17/5/5","/home/pi/data/img/") => '2020-10-17/5/5' + + sample_path = os.path.relpath(self.__working_path, self.__img_path) + + logger.debug(f"base obj path is {self.__objects_root}") + logger.debug(f"sample path is {sample_path}") + + self.__working_obj_path = os.path.join(self.__objects_root, sample_path) + + logger.debug(f"The working objects path is {self.__working_obj_path}") + + self.__working_debug_path = os.path.join(self.__debug_objects_root, sample_path) + + logger.debug(f"The debug objects path is {self.__working_debug_path}") + + # Create the paths + for path in [self.__working_obj_path, self.__working_debug_path]: + if not os.path.exists(path): + # create the path! + os.makedirs(path) + + logger.debug(f"The archive folder is {self.__archive_fn}") + + logger.info(f"Starting the pipeline in {path}") + + try: + self._pipe() + except Exception as e: + logger.exception(f"There was an error in the pipeline {e}") + return False + + # Add file 'done' to path to mark the folder as already segmented + with open(os.path.join(self.__working_path, "done"), "w") as done_file: + done_file.writelines(datetime.datetime.utcnow().isoformat()) + logger.info(f"Pipeline has been run for {path}") + return True @logger.catch def treat_message(self): - action = "" + last_message = {} if self.segmenter_client.new_message_received(): logger.info("We received a new message") last_message = self.segmenter_client.msg["payload"] logger.debug(last_message) - action = self.segmenter_client.msg["payload"]["action"] self.segmenter_client.read_message() - # If the command is "segment" - if action == "segment": - # {"action":"segment"} - # Publish the status "Started" to via MQTT to Node-RED - self.segmenter_client.client.publish( - "status/segmenter", '{"status":"Started"}' - ) - img_paths = [x[0] for x in os.walk(self.__img_path)] - logger.info(f"The pipeline will be run in {len(img_paths)} directories") - logger.debug(f"Those are {img_paths}") - for path in img_paths: - logger.info(f"{path}: Checking for the presence of metadata.json") - if os.path.exists(os.path.join(path, "metadata.json")): - # The file exists, let's run the pipe! - logger.info(f"Loading the metadata file for {path}") - with open(os.path.join(path, "metadata.json"), "r") as config_file: - self.__global_metadata = json.load(config_file) - logger.debug( - f"Configuration loaded is {self.__global_metadata}" - ) + if "action" in last_message: + # If the command is "segment" + if last_message["action"] == "segment": + path = None + recursive = True + force = False + # {"action":"segment"} + if "settings" in last_message: + if "force" in last_message["settings"]: + # force rework of already done folder + force = last_message["settings"]["force"] + if "recursive" in last_message["settings"]: + # parse folders recursively starting from the given parameter + recursive = last_message["settings"]["recursive"] + # TODO eventually add customisation to segmenter parameters here - # TODO Remove all the keys that don't start with acq, sample, object or process - # Otherwise Ecotaxata is annoying as fuck - self.__global_metadata.pop("description") + if "path" in last_message: + path = last_message["path"] - project = self.__global_metadata["sample_project"].replace(" ", "_") - date = self.__global_metadata["process_datetime"] - sample = self.__global_metadata["sample_id"] - # Define the name of the .zip file that will contain the images and the .tsv table for EcoTaxa - self.__archive_fn = os.path.join( - self.__ecotaxa_path, - # filename includes project name, timestamp and sample id - f"export_{project}_{date}_{sample}.zip", - ) - - self.__working_path = path - - # recreate the subfolder img architecture of this folder inside objects - # when we split the working path with the base img path, we get the date/sample architecture back - # "/home/pi/data/img/2020-10-17/5/5".split("/home/pi/data/img/")[1] => '2020-10-17/5/5' - sample_path = self.__working_path.split(self.__img_path)[1].strip() - logger.debug(f"base obj path is {self.__objects_base_path}") - logger.debug(f"sample path is {sample_path}") - self.__working_obj_path = os.path.join( - self.__objects_base_path, sample_path - ) - - logger.debug( - f"The working objects path is {self.__working_obj_path}" - ) - # Create the objects path - if not os.path.exists(self.__working_obj_path): - # create the path! - os.makedirs(self.__working_obj_path) - - logger.debug(f"The archive folder is {self.__archive_fn}") - - self.__create_morphocut_pipeline() - - logger.info(f"Starting the pipeline in {path}") - - # Start the MorphoCut Pipeline on the found path - try: - self.__pipe.run() - except Exception as e: - planktoscope.light.error() - logger.exception(f"There was an error in the pipeline {e}") - logger.info(f"Pipeline has been run for {path}") + # Publish the status "Started" to via MQTT to Node-RED + self.segmenter_client.client.publish( + "status/segmenter", '{"status":"Started"}' + ) + if path: + if recursive: + self.segment_all(path) + else: + self.segment_list(path) else: - logger.info(f"Moving to the next folder, {path} is empty") + self.segment_all(self.__img_path) - # remove directory - # shutil.rmtree(import_path) + elif last_message["action"] == "stop": + logger.info("The segmentation has been interrupted.") - # Publish the status "Done" to via MQTT to Node-RED - self.segmenter_client.client.publish( - "status/segmenter", '{"status":"Done"}' - ) - planktoscope.light.ready() + # Publish the status "Interrupted" to via MQTT to Node-RED + self.segmenter_client.client.publish( + "status/segmenter", '{"status":"Interrupted"}' + ) - elif action == "stop": - logger.info("The segmentation has been interrupted.") + elif last_message["action"] == "update_config": + logger.error( + "We can't update the configuration while we are segmenting." + ) - # Publish the status "Interrupted" to via MQTT to Node-RED - self.segmenter_client.client.publish( - "status/segmenter", '{"status":"Interrupted"}' - ) - planktoscope.light.interrupted() + # Publish the status "Interrupted" to via MQTT to Node-RED + self.segmenter_client.client.publish( + "status/segmenter", '{"status":"Busy"}' + ) - elif action == "update_config": - logger.error("We can't update the configuration while we are segmenting.") - # Publish the status "Interrupted" to via MQTT to Node-RED - self.segmenter_client.client.publish( - "status/segmenter", '{"status":"Busy"}' - ) - - elif action != "": - logger.warning( - f"We did not understand the received request {action} - {last_message}" - ) + elif last_message["action"] != "": + logger.warning( + f"We did not understand the received request {action} - {last_message}" + ) ################################################################################ # While loop for capturing commands from Node-RED @@ -375,4 +914,9 @@ class SegmenterProcess(multiprocessing.Process): # This is called if this script is launched directly if __name__ == "__main__": # TODO This should be a test suite for this library - pass \ No newline at end of file + segmenter_thread = SegmenterProcess( + None, "/home/rbazile/Documents/pro/PlanktonPlanet/Planktonscope/Segmenter/data/" + ) + segmenter_thread.segment_path( + "/home/rbazile/Documents/pro/PlanktonPlanet/Planktonscope/Segmenter/data/test" + )