mirror of
https://github.com/deepmodeling/Uni-Lab-OS
synced 2026-03-26 22:50:58 +00:00
Compare commits
458 Commits
06b6f0d804
...
feat/sampl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5b5497dd0 | ||
|
|
b9d6f71970 | ||
|
|
5dda5c61ce | ||
|
|
957fb41a6f | ||
|
|
1d181743ea | ||
|
|
337789e270 | ||
|
|
26271bcab8 | ||
|
|
84a8223173 | ||
|
|
e8d1263488 | ||
|
|
380b39100d | ||
|
|
56eb7e2ab4 | ||
|
|
23ce145f74 | ||
|
|
b0da149252 | ||
|
|
07c9e6f0fe | ||
|
|
ccec6b9d77 | ||
|
|
dadfdf3d8d | ||
|
|
400bb073d4 | ||
|
|
3f63c36505 | ||
|
|
0ae94f7f3c | ||
|
|
7eacae6442 | ||
|
|
f7d2cb4b9e | ||
|
|
bf980d7248 | ||
|
|
27c0544bfc | ||
|
|
d48e77c9ae | ||
|
|
e70a5bea66 | ||
|
|
467d75dc03 | ||
|
|
9feeb0c430 | ||
|
|
b2f26ffb28 | ||
|
|
4b0d1553e9 | ||
|
|
67ddee2ab2 | ||
|
|
1bcdad9448 | ||
|
|
039c96fe01 | ||
|
|
e1555d10a0 | ||
|
|
f2a96b2041 | ||
|
|
329349639e | ||
|
|
e4cc111523 | ||
|
|
d245ceef1b | ||
|
|
6db7fbd721 | ||
|
|
ab05b858e1 | ||
|
|
43e4c71a8e | ||
|
|
2cf58ca452 | ||
|
|
fd73bb7dcb | ||
|
|
a02cecfd18 | ||
|
|
d6accc3f1c | ||
|
|
39dc443399 | ||
|
|
37b1fca962 | ||
|
|
216f19fb62 | ||
|
|
ec7ca6a1fe | ||
|
|
4c8022ee95 | ||
|
|
ad21644db0 | ||
|
|
9dfd58e9af | ||
|
|
31c9f9a172 | ||
|
|
02cd8de4c5 | ||
|
|
a66603ec1c | ||
|
|
ec015e16cd | ||
|
|
965bf36e8d | ||
|
|
aacf3497e0 | ||
|
|
657f952e7a | ||
|
|
0165590290 | ||
|
|
daea1ab54d | ||
|
|
93cb307396 | ||
|
|
1c312772ae | ||
|
|
bad1db5094 | ||
|
|
f26eb69eca | ||
|
|
12c0770c92 | ||
|
|
3d2d428a96 | ||
|
|
78bf57f590 | ||
|
|
e227cddab3 | ||
|
|
f2b993643f | ||
|
|
2e14bf197c | ||
|
|
66c18c080a | ||
|
|
a1c34f138e | ||
|
|
75bb5ec553 | ||
|
|
bb95c89829 | ||
|
|
394c140830 | ||
|
|
e6d8d41183 | ||
|
|
847a300af3 | ||
|
|
a201d7c307 | ||
|
|
3433766bc5 | ||
|
|
7e9e93b29c | ||
|
|
9e1e6da505 | ||
|
|
8a0f000bab | ||
|
|
2ffeb49acb | ||
|
|
5fec753fb9 | ||
|
|
acbaff7bb7 | ||
|
|
706323dc3e | ||
|
|
b0804d939c | ||
|
|
97788b4e07 | ||
|
|
39cc280c91 | ||
|
|
d0ac452405 | ||
|
|
152d3a7563 | ||
|
|
ef14737839 | ||
|
|
5d5569121c | ||
|
|
d23e85ade4 | ||
|
|
02afafd423 | ||
|
|
6ac510dcd2 | ||
|
|
ed56c1eba2 | ||
|
|
16ee3de086 | ||
|
|
ced961050d | ||
|
|
11b2c99836 | ||
|
|
04024bc8a3 | ||
|
|
154048107d | ||
|
|
0b896870ba | ||
|
|
ee609e4aa2 | ||
|
|
5551fbf360 | ||
|
|
e13b250632 | ||
|
|
b8278c5026 | ||
|
|
53e767a054 | ||
|
|
cf7032fa81 | ||
|
|
97681ba433 | ||
|
|
3fa81ab4f6 | ||
|
|
9f4a69ddf5 | ||
|
|
05ae4e72df | ||
|
|
2870c04086 | ||
|
|
343e87df0d | ||
|
|
5d0807cba6 | ||
|
|
4875977d5f | ||
|
|
956b1c905b | ||
|
|
944911c52a | ||
|
|
a13b790926 | ||
|
|
9feadd68c6 | ||
|
|
c68d5246d0 | ||
|
|
49073f2c77 | ||
|
|
b2afc29f15 | ||
|
|
4061280f6b | ||
|
|
6a681e1d73 | ||
|
|
653e6e1ac3 | ||
|
|
2c774bcd1d | ||
|
|
2ba395b681 | ||
|
|
b6b3d59083 | ||
|
|
f40e3f521c | ||
|
|
7cc2fe036f | ||
|
|
f81d20bb1d | ||
|
|
db1b5a869f | ||
|
|
0136630700 | ||
|
|
3c31811f9e | ||
|
|
64f02ff129 | ||
|
|
7d097b8222 | ||
|
|
d266d21104 | ||
|
|
b6d0bbcb17 | ||
|
|
31ebff8e37 | ||
|
|
2132895ba2 | ||
|
|
850eeae55a | ||
|
|
d869c14233 | ||
|
|
24101b3cec | ||
|
|
3bf8aad4d5 | ||
|
|
a599eb70e5 | ||
|
|
0bf6994f95 | ||
|
|
c36f53791c | ||
|
|
eb4d2d96c5 | ||
|
|
8233c41b1d | ||
|
|
0dfd4ce8a8 | ||
|
|
7953b3820e | ||
|
|
eed233fa76 | ||
|
|
0c55147ee4 | ||
|
|
ce6267b8e0 | ||
|
|
975e51cd96 | ||
|
|
c5056b381c | ||
|
|
c35da65b15 | ||
|
|
659cf05be6 | ||
|
|
3b8deb4d1d | ||
|
|
c796615f9f | ||
|
|
a5bad6074f | ||
|
|
1d3a07a736 | ||
|
|
cc2cd57cdf | ||
|
|
39bb7dc627 | ||
|
|
0fda155f55 | ||
|
|
6e3eacd2f0 | ||
|
|
062f1a2153 | ||
|
|
61e8d67800 | ||
|
|
d0884cdbd8 | ||
|
|
545ea45024 | ||
|
|
b9ddee8f2c | ||
|
|
a0c5095304 | ||
|
|
e504505137 | ||
|
|
4d9d5701e9 | ||
|
|
6016c4b588 | ||
|
|
be02bef9c4 | ||
|
|
e62f0c2585 | ||
|
|
b6de0623e2 | ||
|
|
9d081e9fcd | ||
|
|
85a58e3464 | ||
|
|
85590672d8 | ||
|
|
1d4018196d | ||
|
|
5d34f742af | ||
|
|
5bef19e6d6 | ||
|
|
f816799753 | ||
|
|
a45d841769 | ||
|
|
7f0b33b3e3 | ||
|
|
2006406a24 | ||
|
|
f94985632b | ||
|
|
12ba110569 | ||
|
|
97212be8b7 | ||
|
|
9bdd42f12f | ||
|
|
627140da03 | ||
|
|
5ceedb0565 | ||
|
|
8c77a20c43 | ||
|
|
3ff894feee | ||
|
|
fa5896ffdb | ||
|
|
eb504803ac | ||
|
|
8b0c845661 | ||
|
|
693873bfa9 | ||
|
|
57da2d8da2 | ||
|
|
8d1fd01259 | ||
|
|
388259e64b | ||
|
|
2c130e7f37 | ||
|
|
9f7c3f02f9 | ||
|
|
19dd80dcdb | ||
|
|
9d5ed627a2 | ||
|
|
2d0ff87bc8 | ||
|
|
d78475de9a | ||
|
|
88ae56806c | ||
|
|
95dd8beb81 | ||
|
|
4ab3fadbec | ||
|
|
229888f834 | ||
|
|
b443b39ebf | ||
|
|
0434bbc15b | ||
|
|
5791b81954 | ||
|
|
bd51c74fab | ||
|
|
ba81cbddf8 | ||
|
|
4e92a26057 | ||
|
|
c2895bb197 | ||
|
|
0423f4f452 | ||
|
|
41390fbef9 | ||
|
|
98bdb4e7e4 | ||
|
|
30037a077a | ||
|
|
6972680099 | ||
|
|
9d2c93807d | ||
|
|
e728007bc5 | ||
|
|
9c5ecda7cc | ||
|
|
2d26c3fac6 | ||
|
|
f5753afb7c | ||
|
|
398b2dde3f | ||
|
|
62c4135938 | ||
|
|
027b4269c4 | ||
|
|
3757bd9c58 | ||
|
|
c75b7d5aae | ||
|
|
dfc635189c | ||
|
|
d8f3ebac15 | ||
|
|
4a1e703a3a | ||
|
|
55d22a7c29 | ||
|
|
03a4e4ecba | ||
|
|
2316c34cb5 | ||
|
|
a8887161d3 | ||
|
|
25834f5ba0 | ||
|
|
a1e9332b51 | ||
|
|
357fc038ef | ||
|
|
fd58ef07f3 | ||
|
|
93dee2c1dc | ||
|
|
70fbf19009 | ||
|
|
9149155232 | ||
|
|
1ca1792e3c | ||
|
|
485e7e8dd2 | ||
|
|
4ddabdcb65 | ||
|
|
a5b0325301 | ||
|
|
50b44938c7 | ||
|
|
df0d2235b0 | ||
|
|
4e434eeb97 | ||
|
|
ca027bf0eb | ||
|
|
635a332b4e | ||
|
|
edf7a117ca | ||
|
|
70b2715996 | ||
|
|
7e8dfc2dc5 | ||
|
|
9b626489a8 | ||
|
|
03fe208743 | ||
|
|
e913e540a3 | ||
|
|
aed39b648d | ||
|
|
8c8359fab3 | ||
|
|
5d20be0762 | ||
|
|
09f745d300 | ||
|
|
bbcbcde9a4 | ||
|
|
42b437cdea | ||
|
|
ffd0f2d26a | ||
|
|
32422c0b3d | ||
|
|
c44e597dc0 | ||
|
|
4eef012a8e | ||
|
|
ac69452f3c | ||
|
|
57b30f627b | ||
|
|
2d2a4ca067 | ||
|
|
a2613aad4c | ||
|
|
54f75183ff | ||
|
|
735be067dc | ||
|
|
0fe62d64f0 | ||
|
|
2d4ecec1e1 | ||
|
|
0f976a1874 | ||
|
|
b263a7e679 | ||
|
|
7c7f1b31c5 | ||
|
|
00e668e140 | ||
|
|
4989f65a0b | ||
|
|
9fa3688196 | ||
|
|
40fb1ea49c | ||
|
|
18b0bb397e | ||
|
|
65abc5dbf7 | ||
|
|
2455ca15ba | ||
|
|
05a3ff607a | ||
|
|
ec882df36d | ||
|
|
43b992e3eb | ||
|
|
6422fa5a9a | ||
|
|
434b9e98e0 | ||
|
|
040073f430 | ||
|
|
3d95c9896a | ||
|
|
9aa97ed01e | ||
|
|
0b8bdf5e0a | ||
|
|
299f010754 | ||
|
|
15ce0d6883 | ||
|
|
dec474e1a7 | ||
|
|
5f187899fc | ||
|
|
c8d16c7024 | ||
|
|
25d46dc9d5 | ||
|
|
88c4d1a9d1 | ||
|
|
81fd8291c5 | ||
|
|
3a11eb90d4 | ||
|
|
387866b9c9 | ||
|
|
7f40f141f6 | ||
|
|
6fc7ed1b88 | ||
|
|
93f0e08d75 | ||
|
|
4b43734b55 | ||
|
|
174b1914d4 | ||
|
|
704e13f030 | ||
|
|
0c42d60cf2 | ||
|
|
df33e1a214 | ||
|
|
1f49924966 | ||
|
|
609b6006e8 | ||
|
|
67c01271b7 | ||
|
|
a1783f489e | ||
|
|
a8f6527de9 | ||
|
|
54cfaf15f3 | ||
|
|
5610c28b67 | ||
|
|
cfc1ee6e79 | ||
|
|
1c9d2ee98a | ||
|
|
3fe8f4ca44 | ||
|
|
2476821dcc | ||
|
|
7b426ed5ae | ||
|
|
9bbae96447 | ||
|
|
10aabb7592 | ||
|
|
709eb0d91c | ||
|
|
14b7d52825 | ||
|
|
a5397ffe12 | ||
|
|
c6c2da69ba | ||
|
|
622e579063 | ||
|
|
196e0f7e2b | ||
|
|
a632fd495e | ||
|
|
a8cc02a126 | ||
|
|
ad2e1432c6 | ||
|
|
c3b9583eac | ||
|
|
5c47cd0c8a | ||
|
|
63ab1af45d | ||
|
|
a8419dc0c3 | ||
|
|
34f05f2e25 | ||
|
|
0dc2488f02 | ||
|
|
f13156e792 | ||
|
|
13fd1ac572 | ||
|
|
f8ef6e0686 | ||
|
|
94a7b8aaca | ||
|
|
301bea639e | ||
|
|
4b5a83efa4 | ||
|
|
2889e9be2c | ||
|
|
304aebbba7 | ||
|
|
091c9fa247 | ||
|
|
67ca45a240 | ||
|
|
7aab2ea493 | ||
|
|
62f3a6d696 | ||
|
|
eb70ad0e18 | ||
|
|
768f43880e | ||
|
|
762c3c737c | ||
|
|
ace98a4472 | ||
|
|
41eaa88c6f | ||
|
|
a1a55a2c0a | ||
|
|
2eaa0ca729 | ||
|
|
6f8f070f40 | ||
|
|
da4bd927e0 | ||
|
|
01f8816597 | ||
|
|
e5006285df | ||
|
|
573c724a5c | ||
|
|
09549d2839 | ||
|
|
50c7777cea | ||
|
|
4888f02c09 | ||
|
|
779c9693d9 | ||
|
|
ffa841a41a | ||
|
|
fc669f09f8 | ||
|
|
2ca0311de6 | ||
|
|
94cdcbf24e | ||
|
|
1cd07915e7 | ||
|
|
b600fc666d | ||
|
|
9e214c56c1 | ||
|
|
bdf27a7e82 | ||
|
|
2493fb9f94 | ||
|
|
c7a0ff67a9 | ||
|
|
711a7c65fa | ||
|
|
cde7956896 | ||
|
|
95b6fd0451 | ||
|
|
513e848d89 | ||
|
|
58d1cc4720 | ||
|
|
5676dd6589 | ||
|
|
1ae274a833 | ||
|
|
22b88c8441 | ||
|
|
81bcc1907d | ||
|
|
8cffd3dc21 | ||
|
|
a722636938 | ||
|
|
f68340d932 | ||
|
|
361eae2f6d | ||
|
|
c25283ae04 | ||
|
|
961752fb0d | ||
|
|
55165024dd | ||
|
|
6ddceb8393 | ||
|
|
4e52c7d2f4 | ||
|
|
0b56efc89d | ||
|
|
a27b93396a | ||
|
|
2a60a6c27e | ||
|
|
5dda94044d | ||
|
|
0cfc6f45e3 | ||
|
|
831f4549f9 | ||
|
|
f4d4eb06d3 | ||
|
|
e3b8164f6b | ||
|
|
78c04acc2e | ||
|
|
cd0428ea78 | ||
|
|
bdddbd57ba | ||
|
|
a312de08a5 | ||
|
|
68513b5745 | ||
|
|
19027350fb | ||
|
|
bbbdb06bbc | ||
|
|
cd84e26126 | ||
|
|
ce5bab3af1 | ||
|
|
82d9ef6bf7 | ||
|
|
332b33c6f4 | ||
|
|
1ec642ee3a | ||
|
|
7d8e6d029b | ||
|
|
5ec8a57a1f | ||
|
|
ae3c1100ae | ||
|
|
14bc2e6cda | ||
|
|
9f823a4198 | ||
|
|
02c79363c1 | ||
|
|
227ff1284a | ||
|
|
4b7bde6be5 | ||
|
|
8a669ac35a | ||
|
|
a1538da39e | ||
|
|
0063df4cf3 | ||
|
|
e570ba4976 | ||
|
|
e8c1f76dbb | ||
|
|
f791c1a342 | ||
|
|
ea60cbe891 | ||
|
|
eac9b8ab3d | ||
|
|
573bcf1a6c | ||
|
|
50e93cb1af | ||
|
|
fe1a029a9b | ||
|
|
662c063f50 | ||
|
|
01cbbba0b3 | ||
|
|
e6c556cf19 | ||
|
|
0605f305ed | ||
|
|
37d8108ec4 | ||
|
|
6081dac561 | ||
|
|
5b2d066127 | ||
|
|
06e66765e7 | ||
|
|
98ce360088 | ||
|
|
5cd0f72fbd | ||
|
|
343f394203 | ||
|
|
46aa7a7bd2 | ||
|
|
a66369e2c3 |
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
package:
|
package:
|
||||||
name: unilabos
|
name: unilabos
|
||||||
version: 0.10.18
|
version: 0.10.17
|
||||||
|
|
||||||
source:
|
source:
|
||||||
path: ../../unilabos
|
path: ../../unilabos
|
||||||
@@ -46,15 +46,13 @@ requirements:
|
|||||||
- jinja2
|
- jinja2
|
||||||
- requests
|
- requests
|
||||||
- uvicorn
|
- uvicorn
|
||||||
- if: not osx
|
- opcua # [not osx]
|
||||||
then:
|
|
||||||
- opcua
|
|
||||||
- pyserial
|
- pyserial
|
||||||
- pandas
|
- pandas
|
||||||
- pymodbus
|
- pymodbus
|
||||||
- matplotlib
|
- matplotlib
|
||||||
- pylibftdi
|
- pylibftdi
|
||||||
- uni-lab::unilabos-env ==0.10.18
|
- uni-lab::unilabos-env ==0.10.17
|
||||||
|
|
||||||
about:
|
about:
|
||||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
package:
|
package:
|
||||||
name: unilabos-env
|
name: unilabos-env
|
||||||
version: 0.10.18
|
version: 0.10.17
|
||||||
|
|
||||||
build:
|
build:
|
||||||
noarch: generic
|
noarch: generic
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
package:
|
package:
|
||||||
name: unilabos-full
|
name: unilabos-full
|
||||||
version: 0.10.18
|
version: 0.10.17
|
||||||
|
|
||||||
build:
|
build:
|
||||||
noarch: generic
|
noarch: generic
|
||||||
@@ -11,7 +11,7 @@ build:
|
|||||||
requirements:
|
requirements:
|
||||||
run:
|
run:
|
||||||
# Base unilabos package (includes unilabos-env)
|
# Base unilabos package (includes unilabos-env)
|
||||||
- uni-lab::unilabos ==0.10.18
|
- uni-lab::unilabos ==0.10.17
|
||||||
# Documentation tools
|
# Documentation tools
|
||||||
- sphinx
|
- sphinx
|
||||||
- sphinx_rtd_theme
|
- sphinx_rtd_theme
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: ros-humble-unilabos-msgs
|
name: ros-humble-unilabos-msgs
|
||||||
version: 0.10.18
|
version: 0.10.17
|
||||||
source:
|
source:
|
||||||
path: ../../unilabos_msgs
|
path: ../../unilabos_msgs
|
||||||
target_directory: src
|
target_directory: src
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
package:
|
package:
|
||||||
name: unilabos
|
name: unilabos
|
||||||
version: "0.10.18"
|
version: "0.10.17"
|
||||||
|
|
||||||
source:
|
source:
|
||||||
path: ../..
|
path: ../..
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
@@ -25,7 +24,15 @@ class SimpleGraph:
|
|||||||
|
|
||||||
def add_edge(self, source, target, **attrs):
|
def add_edge(self, source, target, **attrs):
|
||||||
"""添加边"""
|
"""添加边"""
|
||||||
edge = {"source": source, "target": target, **attrs}
|
# edge = {"source": source, "target": target, **attrs}
|
||||||
|
edge = {
|
||||||
|
"source": source, "target": target,
|
||||||
|
"source_node_uuid": source,
|
||||||
|
"target_node_uuid": target,
|
||||||
|
"source_handle_io": "source",
|
||||||
|
"target_handle_io": "target",
|
||||||
|
**attrs
|
||||||
|
}
|
||||||
self.edges.append(edge)
|
self.edges.append(edge)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
@@ -42,6 +49,7 @@ class SimpleGraph:
|
|||||||
"multigraph": False,
|
"multigraph": False,
|
||||||
"graph": {},
|
"graph": {},
|
||||||
"nodes": nodes_list,
|
"nodes": nodes_list,
|
||||||
|
"edges": self.edges,
|
||||||
"links": self.edges,
|
"links": self.edges,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -58,495 +66,8 @@ def extract_json_from_markdown(text: str) -> str:
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def convert_to_type(val: str) -> Any:
|
|
||||||
"""将字符串值转换为适当的数据类型"""
|
|
||||||
if val == "True":
|
|
||||||
return True
|
|
||||||
if val == "False":
|
|
||||||
return False
|
|
||||||
if val == "?":
|
|
||||||
return None
|
|
||||||
if val.endswith(" g"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
if val.endswith("mg"):
|
|
||||||
return float(val.split("mg")[0])
|
|
||||||
elif val.endswith("mmol"):
|
|
||||||
return float(val.split("mmol")[0]) / 1000
|
|
||||||
elif val.endswith("mol"):
|
|
||||||
return float(val.split("mol")[0])
|
|
||||||
elif val.endswith("ml"):
|
|
||||||
return float(val.split("ml")[0])
|
|
||||||
elif val.endswith("RPM"):
|
|
||||||
return float(val.split("RPM")[0])
|
|
||||||
elif val.endswith(" °C"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
elif val.endswith(" %"):
|
|
||||||
return float(val.split(" ")[0])
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
def refactor_data(data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
||||||
"""统一的数据重构函数,根据操作类型自动选择模板"""
|
|
||||||
refactored_data = []
|
|
||||||
|
|
||||||
# 定义操作映射,包含生物实验和有机化学的所有操作
|
|
||||||
OPERATION_MAPPING = {
|
|
||||||
# 生物实验操作
|
|
||||||
"transfer_liquid": "SynBioFactory-liquid_handler.prcxi-transfer_liquid",
|
|
||||||
"transfer": "SynBioFactory-liquid_handler.biomek-transfer",
|
|
||||||
"incubation": "SynBioFactory-liquid_handler.biomek-incubation",
|
|
||||||
"move_labware": "SynBioFactory-liquid_handler.biomek-move_labware",
|
|
||||||
"oscillation": "SynBioFactory-liquid_handler.biomek-oscillation",
|
|
||||||
# 有机化学操作
|
|
||||||
"HeatChillToTemp": "SynBioFactory-workstation-HeatChillProtocol",
|
|
||||||
"StopHeatChill": "SynBioFactory-workstation-HeatChillStopProtocol",
|
|
||||||
"StartHeatChill": "SynBioFactory-workstation-HeatChillStartProtocol",
|
|
||||||
"HeatChill": "SynBioFactory-workstation-HeatChillProtocol",
|
|
||||||
"Dissolve": "SynBioFactory-workstation-DissolveProtocol",
|
|
||||||
"Transfer": "SynBioFactory-workstation-TransferProtocol",
|
|
||||||
"Evaporate": "SynBioFactory-workstation-EvaporateProtocol",
|
|
||||||
"Recrystallize": "SynBioFactory-workstation-RecrystallizeProtocol",
|
|
||||||
"Filter": "SynBioFactory-workstation-FilterProtocol",
|
|
||||||
"Dry": "SynBioFactory-workstation-DryProtocol",
|
|
||||||
"Add": "SynBioFactory-workstation-AddProtocol",
|
|
||||||
}
|
|
||||||
|
|
||||||
UNSUPPORTED_OPERATIONS = ["Purge", "Wait", "Stir", "ResetHandling"]
|
|
||||||
|
|
||||||
for step in data:
|
|
||||||
operation = step.get("action")
|
|
||||||
if not operation or operation in UNSUPPORTED_OPERATIONS:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 处理重复操作
|
|
||||||
if operation == "Repeat":
|
|
||||||
times = step.get("times", step.get("parameters", {}).get("times", 1))
|
|
||||||
sub_steps = step.get("steps", step.get("parameters", {}).get("steps", []))
|
|
||||||
for i in range(int(times)):
|
|
||||||
sub_data = refactor_data(sub_steps)
|
|
||||||
refactored_data.extend(sub_data)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 获取模板名称
|
|
||||||
template = OPERATION_MAPPING.get(operation)
|
|
||||||
if not template:
|
|
||||||
# 自动推断模板类型
|
|
||||||
if operation.lower() in ["transfer", "incubation", "move_labware", "oscillation"]:
|
|
||||||
template = f"SynBioFactory-liquid_handler.biomek-{operation}"
|
|
||||||
else:
|
|
||||||
template = f"SynBioFactory-workstation-{operation}Protocol"
|
|
||||||
|
|
||||||
# 创建步骤数据
|
|
||||||
step_data = {
|
|
||||||
"template": template,
|
|
||||||
"description": step.get("description", step.get("purpose", f"{operation} operation")),
|
|
||||||
"lab_node_type": "Device",
|
|
||||||
"parameters": step.get("parameters", step.get("action_args", {})),
|
|
||||||
}
|
|
||||||
refactored_data.append(step_data)
|
|
||||||
|
|
||||||
return refactored_data
|
|
||||||
|
|
||||||
|
|
||||||
def build_protocol_graph(
|
|
||||||
labware_info: List[Dict[str, Any]], protocol_steps: List[Dict[str, Any]], workstation_name: str
|
|
||||||
) -> SimpleGraph:
|
|
||||||
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑"""
|
|
||||||
G = SimpleGraph()
|
|
||||||
resource_last_writer = {}
|
|
||||||
LAB_NAME = "SynBioFactory"
|
|
||||||
|
|
||||||
protocol_steps = refactor_data(protocol_steps)
|
|
||||||
|
|
||||||
# 检查协议步骤中的模板来判断协议类型
|
|
||||||
has_biomek_template = any(
|
|
||||||
("biomek" in step.get("template", "")) or ("prcxi" in step.get("template", ""))
|
|
||||||
for step in protocol_steps
|
|
||||||
)
|
|
||||||
|
|
||||||
if has_biomek_template:
|
|
||||||
# 生物实验协议图构建
|
|
||||||
for labware_id, labware in labware_info.items():
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
labware_attrs = labware.copy()
|
|
||||||
labware_id = labware_attrs.pop("id", labware_attrs.get("name", f"labware_{uuid.uuid4()}"))
|
|
||||||
labware_attrs["description"] = labware_id
|
|
||||||
labware_attrs["lab_node_type"] = (
|
|
||||||
"Reagent" if "Plate" in str(labware_id) else "Labware" if "Rack" in str(labware_id) else "Sample"
|
|
||||||
)
|
|
||||||
labware_attrs["device_id"] = workstation_name
|
|
||||||
|
|
||||||
G.add_node(node_id, template=f"{LAB_NAME}-host_node-create_resource", **labware_attrs)
|
|
||||||
resource_last_writer[labware_id] = f"{node_id}:labware"
|
|
||||||
|
|
||||||
# 处理协议步骤
|
|
||||||
prev_node = None
|
|
||||||
for i, step in enumerate(protocol_steps):
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
G.add_node(node_id, **step)
|
|
||||||
|
|
||||||
# 添加控制流边
|
|
||||||
if prev_node is not None:
|
|
||||||
G.add_edge(prev_node, node_id, source_port="ready", target_port="ready")
|
|
||||||
prev_node = node_id
|
|
||||||
|
|
||||||
# 处理物料流
|
|
||||||
params = step.get("parameters", {})
|
|
||||||
if "sources" in params and params["sources"] in resource_last_writer:
|
|
||||||
source_node, source_port = resource_last_writer[params["sources"]].split(":")
|
|
||||||
G.add_edge(source_node, node_id, source_port=source_port, target_port="labware")
|
|
||||||
|
|
||||||
if "targets" in params:
|
|
||||||
resource_last_writer[params["targets"]] = f"{node_id}:labware"
|
|
||||||
|
|
||||||
# 添加协议结束节点
|
|
||||||
end_id = str(uuid.uuid4())
|
|
||||||
G.add_node(end_id, template=f"{LAB_NAME}-liquid_handler.biomek-run_protocol")
|
|
||||||
if prev_node is not None:
|
|
||||||
G.add_edge(prev_node, end_id, source_port="ready", target_port="ready")
|
|
||||||
|
|
||||||
else:
|
|
||||||
# 有机化学协议图构建
|
|
||||||
WORKSTATION_ID = workstation_name
|
|
||||||
|
|
||||||
# 为所有labware创建资源节点
|
|
||||||
for item_id, item in labware_info.items():
|
|
||||||
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# 判断节点类型
|
|
||||||
if item.get("type") == "hardware" or "reactor" in str(item_id).lower():
|
|
||||||
if "reactor" not in str(item_id).lower():
|
|
||||||
continue
|
|
||||||
lab_node_type = "Sample"
|
|
||||||
description = f"Prepare Reactor: {item_id}"
|
|
||||||
liquid_type = []
|
|
||||||
liquid_volume = []
|
|
||||||
else:
|
|
||||||
lab_node_type = "Reagent"
|
|
||||||
description = f"Add Reagent to Flask: {item_id}"
|
|
||||||
liquid_type = [item_id]
|
|
||||||
liquid_volume = [1e5]
|
|
||||||
|
|
||||||
G.add_node(
|
|
||||||
node_id,
|
|
||||||
template=f"{LAB_NAME}-host_node-create_resource",
|
|
||||||
description=description,
|
|
||||||
lab_node_type=lab_node_type,
|
|
||||||
res_id=item_id,
|
|
||||||
device_id=WORKSTATION_ID,
|
|
||||||
class_name="container",
|
|
||||||
parent=WORKSTATION_ID,
|
|
||||||
bind_locations={"x": 0.0, "y": 0.0, "z": 0.0},
|
|
||||||
liquid_input_slot=[-1],
|
|
||||||
liquid_type=liquid_type,
|
|
||||||
liquid_volume=liquid_volume,
|
|
||||||
slot_on_deck="",
|
|
||||||
role=item.get("role", ""),
|
|
||||||
)
|
|
||||||
resource_last_writer[item_id] = f"{node_id}:labware"
|
|
||||||
|
|
||||||
last_control_node_id = None
|
|
||||||
|
|
||||||
# 处理协议步骤
|
|
||||||
for step in protocol_steps:
|
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
G.add_node(node_id, **step)
|
|
||||||
|
|
||||||
# 控制流
|
|
||||||
if last_control_node_id is not None:
|
|
||||||
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
|
|
||||||
last_control_node_id = node_id
|
|
||||||
|
|
||||||
# 物料流
|
|
||||||
params = step.get("parameters", {})
|
|
||||||
input_resources = {
|
|
||||||
"Vessel": params.get("vessel"),
|
|
||||||
"ToVessel": params.get("to_vessel"),
|
|
||||||
"FromVessel": params.get("from_vessel"),
|
|
||||||
"reagent": params.get("reagent"),
|
|
||||||
"solvent": params.get("solvent"),
|
|
||||||
"compound": params.get("compound"),
|
|
||||||
"sources": params.get("sources"),
|
|
||||||
"targets": params.get("targets"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for target_port, resource_name in input_resources.items():
|
|
||||||
if resource_name and resource_name in resource_last_writer:
|
|
||||||
source_node, source_port = resource_last_writer[resource_name].split(":")
|
|
||||||
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
|
|
||||||
|
|
||||||
output_resources = {
|
|
||||||
"VesselOut": params.get("vessel"),
|
|
||||||
"FromVesselOut": params.get("from_vessel"),
|
|
||||||
"ToVesselOut": params.get("to_vessel"),
|
|
||||||
"FiltrateOut": params.get("filtrate_vessel"),
|
|
||||||
"reagent": params.get("reagent"),
|
|
||||||
"solvent": params.get("solvent"),
|
|
||||||
"compound": params.get("compound"),
|
|
||||||
"sources_out": params.get("sources"),
|
|
||||||
"targets_out": params.get("targets"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for source_port, resource_name in output_resources.items():
|
|
||||||
if resource_name:
|
|
||||||
resource_last_writer[resource_name] = f"{node_id}:{source_port}"
|
|
||||||
|
|
||||||
return G
|
|
||||||
|
|
||||||
|
|
||||||
def draw_protocol_graph(protocol_graph: SimpleGraph, output_path: str):
|
|
||||||
"""
|
|
||||||
(辅助功能) 使用 networkx 和 matplotlib 绘制协议工作流图,用于可视化。
|
|
||||||
"""
|
|
||||||
if not protocol_graph:
|
|
||||||
print("Cannot draw graph: Graph object is empty.")
|
|
||||||
return
|
|
||||||
|
|
||||||
G = nx.DiGraph()
|
|
||||||
|
|
||||||
for node_id, attrs in protocol_graph.nodes.items():
|
|
||||||
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
|
||||||
G.add_node(node_id, label=label, **attrs)
|
|
||||||
|
|
||||||
for edge in protocol_graph.edges:
|
|
||||||
G.add_edge(edge["source"], edge["target"])
|
|
||||||
|
|
||||||
plt.figure(figsize=(20, 15))
|
|
||||||
try:
|
|
||||||
pos = nx.nx_agraph.graphviz_layout(G, prog="dot")
|
|
||||||
except Exception:
|
|
||||||
pos = nx.shell_layout(G) # Fallback layout
|
|
||||||
|
|
||||||
node_labels = {node: data["label"] for node, data in G.nodes(data=True)}
|
|
||||||
nx.draw(
|
|
||||||
G,
|
|
||||||
pos,
|
|
||||||
with_labels=False,
|
|
||||||
node_size=2500,
|
|
||||||
node_color="skyblue",
|
|
||||||
node_shape="o",
|
|
||||||
edge_color="gray",
|
|
||||||
width=1.5,
|
|
||||||
arrowsize=15,
|
|
||||||
)
|
|
||||||
nx.draw_networkx_labels(G, pos, labels=node_labels, font_size=8, font_weight="bold")
|
|
||||||
|
|
||||||
plt.title("Chemical Protocol Workflow Graph", size=15)
|
|
||||||
plt.savefig(output_path, dpi=300, bbox_inches="tight")
|
|
||||||
plt.close()
|
|
||||||
print(f" - Visualization saved to '{output_path}'")
|
|
||||||
|
|
||||||
|
|
||||||
from networkx.drawing.nx_agraph import to_agraph
|
|
||||||
import re
|
|
||||||
|
|
||||||
COMPASS = {"n","e","s","w","ne","nw","se","sw","c"}
|
|
||||||
|
|
||||||
def _is_compass(port: str) -> bool:
|
|
||||||
return isinstance(port, str) and port.lower() in COMPASS
|
|
||||||
|
|
||||||
def draw_protocol_graph_with_ports(protocol_graph, output_path: str, rankdir: str = "LR"):
|
|
||||||
"""
|
|
||||||
使用 Graphviz 端口语法绘制协议工作流图。
|
|
||||||
- 若边上的 source_port/target_port 是 compass(n/e/s/w/...),直接用 compass。
|
|
||||||
- 否则自动为节点创建 record 形状并定义命名端口 <portname>。
|
|
||||||
最终由 PyGraphviz 渲染并输出到 output_path(后缀决定格式,如 .png/.svg/.pdf)。
|
|
||||||
"""
|
|
||||||
if not protocol_graph:
|
|
||||||
print("Cannot draw graph: Graph object is empty.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# 1) 先用 networkx 搭建有向图,保留端口属性
|
|
||||||
G = nx.DiGraph()
|
|
||||||
for node_id, attrs in protocol_graph.nodes.items():
|
|
||||||
label = attrs.get("description", attrs.get("template", node_id[:8]))
|
|
||||||
# 保留一个干净的“中心标签”,用于放在 record 的中间槽
|
|
||||||
G.add_node(node_id, _core_label=str(label), **{k:v for k,v in attrs.items() if k not in ("label",)})
|
|
||||||
|
|
||||||
edges_data = []
|
|
||||||
in_ports_by_node = {} # 收集命名输入端口
|
|
||||||
out_ports_by_node = {} # 收集命名输出端口
|
|
||||||
|
|
||||||
for edge in protocol_graph.edges:
|
|
||||||
u = edge["source"]
|
|
||||||
v = edge["target"]
|
|
||||||
sp = edge.get("source_port")
|
|
||||||
tp = edge.get("target_port")
|
|
||||||
|
|
||||||
# 记录到图里(保留原始端口信息)
|
|
||||||
G.add_edge(u, v, source_port=sp, target_port=tp)
|
|
||||||
edges_data.append((u, v, sp, tp))
|
|
||||||
|
|
||||||
# 如果不是 compass,就按“命名端口”先归类,等会儿给节点造 record
|
|
||||||
if sp and not _is_compass(sp):
|
|
||||||
out_ports_by_node.setdefault(u, set()).add(str(sp))
|
|
||||||
if tp and not _is_compass(tp):
|
|
||||||
in_ports_by_node.setdefault(v, set()).add(str(tp))
|
|
||||||
|
|
||||||
# 2) 转为 AGraph,使用 Graphviz 渲染
|
|
||||||
A = to_agraph(G)
|
|
||||||
A.graph_attr.update(rankdir=rankdir, splines="true", concentrate="false", fontsize="10")
|
|
||||||
A.node_attr.update(shape="box", style="rounded,filled", fillcolor="lightyellow", color="#999999", fontname="Helvetica")
|
|
||||||
A.edge_attr.update(arrowsize="0.8", color="#666666")
|
|
||||||
|
|
||||||
# 3) 为需要命名端口的节点设置 record 形状与 label
|
|
||||||
# 左列 = 输入端口;中间 = 核心标签;右列 = 输出端口
|
|
||||||
for n in A.nodes():
|
|
||||||
node = A.get_node(n)
|
|
||||||
core = G.nodes[n].get("_core_label", n)
|
|
||||||
|
|
||||||
in_ports = sorted(in_ports_by_node.get(n, []))
|
|
||||||
out_ports = sorted(out_ports_by_node.get(n, []))
|
|
||||||
|
|
||||||
# 如果该节点涉及命名端口,则用 record;否则保留原 box
|
|
||||||
if in_ports or out_ports:
|
|
||||||
def port_fields(ports):
|
|
||||||
if not ports:
|
|
||||||
return " " # 必须留一个空槽占位
|
|
||||||
# 每个端口一个小格子,<p> name
|
|
||||||
return "|".join(f"<{re.sub(r'[^A-Za-z0-9_:.|-]', '_', p)}> {p}" for p in ports)
|
|
||||||
|
|
||||||
left = port_fields(in_ports)
|
|
||||||
right = port_fields(out_ports)
|
|
||||||
|
|
||||||
# 三栏:左(入) | 中(节点名) | 右(出)
|
|
||||||
record_label = f"{{ {left} | {core} | {right} }}"
|
|
||||||
node.attr.update(shape="record", label=record_label)
|
|
||||||
else:
|
|
||||||
# 没有命名端口:普通盒子,显示核心标签
|
|
||||||
node.attr.update(label=str(core))
|
|
||||||
|
|
||||||
# 4) 给边设置 headport / tailport
|
|
||||||
# - 若端口为 compass:直接用 compass(e.g., headport="e")
|
|
||||||
# - 若端口为命名端口:使用在 record 中定义的 <port> 名(同名即可)
|
|
||||||
for (u, v, sp, tp) in edges_data:
|
|
||||||
e = A.get_edge(u, v)
|
|
||||||
|
|
||||||
# Graphviz 属性:tail 是源,head 是目标
|
|
||||||
if sp:
|
|
||||||
if _is_compass(sp):
|
|
||||||
e.attr["tailport"] = sp.lower()
|
|
||||||
else:
|
|
||||||
# 与 record label 中 <port> 名一致;特殊字符已在 label 中做了清洗
|
|
||||||
e.attr["tailport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(sp))
|
|
||||||
|
|
||||||
if tp:
|
|
||||||
if _is_compass(tp):
|
|
||||||
e.attr["headport"] = tp.lower()
|
|
||||||
else:
|
|
||||||
e.attr["headport"] = re.sub(r'[^A-Za-z0-9_:.|-]', '_', str(tp))
|
|
||||||
|
|
||||||
# 可选:若想让边更贴边缘,可设置 constraint/spline 等
|
|
||||||
# e.attr["arrowhead"] = "vee"
|
|
||||||
|
|
||||||
# 5) 输出
|
|
||||||
A.draw(output_path, prog="dot")
|
|
||||||
print(f" - Port-aware workflow rendered to '{output_path}'")
|
|
||||||
|
|
||||||
|
|
||||||
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
|
||||||
"""展平嵌套的XDL程序结构"""
|
|
||||||
flattened_operations = []
|
|
||||||
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
|
||||||
|
|
||||||
def extract_operations(element: ET.Element):
|
|
||||||
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
|
||||||
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
|
||||||
flattened_operations.append(element)
|
|
||||||
|
|
||||||
for child in element:
|
|
||||||
extract_operations(child)
|
|
||||||
|
|
||||||
for child in procedure_elem:
|
|
||||||
extract_operations(child)
|
|
||||||
|
|
||||||
return flattened_operations
|
|
||||||
|
|
||||||
|
|
||||||
def parse_xdl_content(xdl_content: str) -> tuple:
|
|
||||||
"""解析XDL内容"""
|
|
||||||
try:
|
|
||||||
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
|
||||||
root = ET.fromstring(xdl_content_cleaned)
|
|
||||||
|
|
||||||
synthesis_elem = root.find("Synthesis")
|
|
||||||
if synthesis_elem is None:
|
|
||||||
return None, None, None
|
|
||||||
|
|
||||||
# 解析硬件组件
|
|
||||||
hardware_elem = synthesis_elem.find("Hardware")
|
|
||||||
hardware = []
|
|
||||||
if hardware_elem is not None:
|
|
||||||
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
|
||||||
|
|
||||||
# 解析试剂
|
|
||||||
reagents_elem = synthesis_elem.find("Reagents")
|
|
||||||
reagents = []
|
|
||||||
if reagents_elem is not None:
|
|
||||||
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
|
||||||
|
|
||||||
# 解析程序
|
|
||||||
procedure_elem = synthesis_elem.find("Procedure")
|
|
||||||
if procedure_elem is None:
|
|
||||||
return None, None, None
|
|
||||||
|
|
||||||
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
|
||||||
return hardware, reagents, flattened_operations
|
|
||||||
|
|
||||||
except ET.ParseError as e:
|
|
||||||
raise ValueError(f"Invalid XDL format: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
将XDL XML格式转换为标准的字典格式
|
|
||||||
|
|
||||||
Args:
|
|
||||||
xdl_content: XDL XML内容
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
转换结果,包含步骤和器材信息
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
|
||||||
if hardware is None:
|
|
||||||
return {"error": "Failed to parse XDL content", "success": False}
|
|
||||||
|
|
||||||
# 将XDL元素转换为字典格式
|
|
||||||
steps_data = []
|
|
||||||
for elem in flattened_operations:
|
|
||||||
# 转换参数类型
|
|
||||||
parameters = {}
|
|
||||||
for key, val in elem.attrib.items():
|
|
||||||
converted_val = convert_to_type(val)
|
|
||||||
if converted_val is not None:
|
|
||||||
parameters[key] = converted_val
|
|
||||||
|
|
||||||
step_dict = {
|
|
||||||
"operation": elem.tag,
|
|
||||||
"parameters": parameters,
|
|
||||||
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
|
||||||
}
|
|
||||||
steps_data.append(step_dict)
|
|
||||||
|
|
||||||
# 合并硬件和试剂为统一的labware_info格式
|
|
||||||
labware_data = []
|
|
||||||
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
|
||||||
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"steps": steps_data,
|
|
||||||
"labware": labware_data,
|
|
||||||
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"XDL conversion failed: {str(e)}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
return {"error": error_msg, "success": False}
|
|
||||||
|
|
||||||
|
|
||||||
def create_workflow(
|
def create_workflow(
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -4,7 +4,7 @@ package_name = 'unilabos'
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name=package_name,
|
name=package_name,
|
||||||
version='0.10.18',
|
version='0.10.17',
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=['setuptools'],
|
install_requires=['setuptools'],
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
__version__ = "0.10.18"
|
__version__ = "0.10.17"
|
||||||
|
|||||||
@@ -171,12 +171,6 @@ def parse_args():
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Disable sending update feedback to server",
|
help="Disable sending update feedback to server",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--test_mode",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Test mode: all actions simulate execution and return mock results without running real hardware",
|
|
||||||
)
|
|
||||||
# workflow upload subcommand
|
# workflow upload subcommand
|
||||||
workflow_parser = subparsers.add_parser(
|
workflow_parser = subparsers.add_parser(
|
||||||
"workflow_upload",
|
"workflow_upload",
|
||||||
@@ -210,12 +204,6 @@ def parse_args():
|
|||||||
default=False,
|
default=False,
|
||||||
help="Whether to publish the workflow (default: False)",
|
help="Whether to publish the workflow (default: False)",
|
||||||
)
|
)
|
||||||
workflow_parser.add_argument(
|
|
||||||
"--description",
|
|
||||||
type=str,
|
|
||||||
default="",
|
|
||||||
help="Workflow description, used when publishing the workflow",
|
|
||||||
)
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
@@ -243,60 +231,52 @@ def main():
|
|||||||
# 加载配置文件,优先加载config,然后从env读取
|
# 加载配置文件,优先加载config,然后从env读取
|
||||||
config_path = args_dict.get("config")
|
config_path = args_dict.get("config")
|
||||||
|
|
||||||
# === 解析 working_dir ===
|
if check_mode:
|
||||||
# 规则1: working_dir 传入 → 检测 unilabos_data 子目录,已是则不修改
|
args_dict["working_dir"] = os.path.abspath(os.getcwd())
|
||||||
# 规则2: 仅 config_path 传入 → 用其父目录作为 working_dir
|
# 当 skip_env_check 时,默认使用当前目录作为 working_dir
|
||||||
# 规则4: 两者都传入 → 各用各的,但 working_dir 仍做 unilabos_data 子目录检测
|
if skip_env_check and not args_dict.get("working_dir") and not config_path:
|
||||||
raw_working_dir = args_dict.get("working_dir")
|
|
||||||
if raw_working_dir:
|
|
||||||
working_dir = os.path.abspath(raw_working_dir)
|
|
||||||
elif config_path and os.path.exists(config_path):
|
|
||||||
working_dir = os.path.dirname(os.path.abspath(config_path))
|
|
||||||
else:
|
|
||||||
working_dir = os.path.abspath(os.getcwd())
|
working_dir = os.path.abspath(os.getcwd())
|
||||||
|
print_status(f"跳过环境检查模式:使用当前目录作为工作目录 {working_dir}", "info")
|
||||||
# unilabos_data 子目录自动检测
|
# 检查当前目录是否有 local_config.py
|
||||||
if os.path.basename(working_dir) != "unilabos_data":
|
local_config_in_cwd = os.path.join(working_dir, "local_config.py")
|
||||||
unilabos_data_sub = os.path.join(working_dir, "unilabos_data")
|
if os.path.exists(local_config_in_cwd):
|
||||||
if os.path.isdir(unilabos_data_sub):
|
config_path = local_config_in_cwd
|
||||||
working_dir = unilabos_data_sub
|
|
||||||
elif not raw_working_dir and not (config_path and os.path.exists(config_path)):
|
|
||||||
# 未显式指定路径,默认使用 cwd/unilabos_data
|
|
||||||
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
|
||||||
|
|
||||||
# === 解析 config_path ===
|
|
||||||
if config_path and not os.path.exists(config_path):
|
|
||||||
# config_path 传入但不存在,尝试在 working_dir 中查找
|
|
||||||
candidate = os.path.join(working_dir, "local_config.py")
|
|
||||||
if os.path.exists(candidate):
|
|
||||||
config_path = candidate
|
|
||||||
print_status(f"在工作目录中发现配置文件: {config_path}", "info")
|
|
||||||
else:
|
|
||||||
print_status(
|
|
||||||
f"配置文件 {config_path} 不存在,工作目录 {working_dir} 中也未找到 local_config.py,"
|
|
||||||
f"请通过 --config 传入 local_config.py 文件路径",
|
|
||||||
"error",
|
|
||||||
)
|
|
||||||
os._exit(1)
|
|
||||||
elif not config_path:
|
|
||||||
# 规则3: 未传入 config_path,尝试 working_dir/local_config.py
|
|
||||||
candidate = os.path.join(working_dir, "local_config.py")
|
|
||||||
if os.path.exists(candidate):
|
|
||||||
config_path = candidate
|
|
||||||
print_status(f"发现本地配置文件: {config_path}", "info")
|
print_status(f"发现本地配置文件: {config_path}", "info")
|
||||||
else:
|
else:
|
||||||
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||||
print_status(f"您是否为第一次使用?并将当前路径 {working_dir} 作为工作目录? (Y/n)", "info")
|
elif os.getcwd().endswith("unilabos_data"):
|
||||||
if check_mode or input() != "n":
|
working_dir = os.path.abspath(os.getcwd())
|
||||||
os.makedirs(working_dir, exist_ok=True)
|
else:
|
||||||
config_path = os.path.join(working_dir, "local_config.py")
|
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
||||||
shutil.copy(
|
|
||||||
os.path.join(os.path.dirname(os.path.dirname(__file__)), "config", "example_config.py"),
|
if args_dict.get("working_dir"):
|
||||||
config_path,
|
working_dir = args_dict.get("working_dir", "")
|
||||||
|
if config_path and not os.path.exists(config_path):
|
||||||
|
config_path = os.path.join(working_dir, "local_config.py")
|
||||||
|
if not os.path.exists(config_path):
|
||||||
|
print_status(
|
||||||
|
f"当前工作目录 {working_dir} 未找到local_config.py,请通过 --config 传入 local_config.py 文件路径",
|
||||||
|
"error",
|
||||||
)
|
)
|
||||||
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
|
||||||
else:
|
|
||||||
os._exit(1)
|
os._exit(1)
|
||||||
|
elif config_path and os.path.exists(config_path):
|
||||||
|
working_dir = os.path.dirname(config_path)
|
||||||
|
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
|
||||||
|
config_path = os.path.join(working_dir, "local_config.py")
|
||||||
|
elif not skip_env_check and not config_path and (
|
||||||
|
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
|
||||||
|
):
|
||||||
|
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||||
|
print_status(f"您是否为第一次使用?并将当前路径 {working_dir} 作为工作目录? (Y/n)", "info")
|
||||||
|
if input() != "n":
|
||||||
|
os.makedirs(working_dir, exist_ok=True)
|
||||||
|
config_path = os.path.join(working_dir, "local_config.py")
|
||||||
|
shutil.copy(
|
||||||
|
os.path.join(os.path.dirname(os.path.dirname(__file__)), "config", "example_config.py"), config_path
|
||||||
|
)
|
||||||
|
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
||||||
|
else:
|
||||||
|
os._exit(1)
|
||||||
|
|
||||||
# 加载配置文件 (check_mode 跳过)
|
# 加载配置文件 (check_mode 跳过)
|
||||||
print_status(f"当前工作目录为 {working_dir}", "info")
|
print_status(f"当前工作目录为 {working_dir}", "info")
|
||||||
@@ -308,9 +288,7 @@ def main():
|
|||||||
|
|
||||||
if hasattr(BasicConfig, "log_level"):
|
if hasattr(BasicConfig, "log_level"):
|
||||||
logger.info(f"Log level set to '{BasicConfig.log_level}' from config file.")
|
logger.info(f"Log level set to '{BasicConfig.log_level}' from config file.")
|
||||||
file_path = configure_logger(loglevel=BasicConfig.log_level, working_dir=working_dir)
|
configure_logger(loglevel=BasicConfig.log_level, working_dir=working_dir)
|
||||||
if file_path is not None:
|
|
||||||
logger.info(f"[LOG_FILE] {file_path}")
|
|
||||||
|
|
||||||
if args.addr != parser.get_default("addr"):
|
if args.addr != parser.get_default("addr"):
|
||||||
if args.addr == "test":
|
if args.addr == "test":
|
||||||
@@ -354,9 +332,6 @@ def main():
|
|||||||
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
||||||
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
||||||
BasicConfig.no_update_feedback = args_dict.get("no_update_feedback", False)
|
BasicConfig.no_update_feedback = args_dict.get("no_update_feedback", False)
|
||||||
BasicConfig.test_mode = args_dict.get("test_mode", False)
|
|
||||||
if BasicConfig.test_mode:
|
|
||||||
print_status("启用测试模式:所有动作将模拟执行,不调用真实硬件", "warning")
|
|
||||||
BasicConfig.communication_protocol = "websocket"
|
BasicConfig.communication_protocol = "websocket"
|
||||||
machine_name = os.popen("hostname").read().strip()
|
machine_name = os.popen("hostname").read().strip()
|
||||||
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
||||||
|
|||||||
@@ -38,9 +38,9 @@ def register_devices_and_resources(lab_registry, gather_only=False) -> Optional[
|
|||||||
response = http_client.resource_registry({"resources": list(devices_to_register.values())})
|
response = http_client.resource_registry({"resources": list(devices_to_register.values())})
|
||||||
cost_time = time.time() - start_time
|
cost_time = time.time() - start_time
|
||||||
if response.status_code in [200, 201]:
|
if response.status_code in [200, 201]:
|
||||||
logger.info(f"[UniLab Register] 成功注册 {len(devices_to_register)} 个设备 {cost_time}s")
|
logger.info(f"[UniLab Register] 成功注册 {len(devices_to_register)} 个设备 {cost_time}ms")
|
||||||
else:
|
else:
|
||||||
logger.error(f"[UniLab Register] 设备注册失败: {response.status_code}, {response.text} {cost_time}s")
|
logger.error(f"[UniLab Register] 设备注册失败: {response.status_code}, {response.text} {cost_time}ms")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[UniLab Register] 设备注册异常: {e}")
|
logger.error(f"[UniLab Register] 设备注册异常: {e}")
|
||||||
|
|
||||||
@@ -51,9 +51,9 @@ def register_devices_and_resources(lab_registry, gather_only=False) -> Optional[
|
|||||||
response = http_client.resource_registry({"resources": list(resources_to_register.values())})
|
response = http_client.resource_registry({"resources": list(resources_to_register.values())})
|
||||||
cost_time = time.time() - start_time
|
cost_time = time.time() - start_time
|
||||||
if response.status_code in [200, 201]:
|
if response.status_code in [200, 201]:
|
||||||
logger.info(f"[UniLab Register] 成功注册 {len(resources_to_register)} 个资源 {cost_time}s")
|
logger.info(f"[UniLab Register] 成功注册 {len(resources_to_register)} 个资源 {cost_time}ms")
|
||||||
else:
|
else:
|
||||||
logger.error(f"[UniLab Register] 资源注册失败: {response.status_code}, {response.text} {cost_time}s")
|
logger.error(f"[UniLab Register] 资源注册失败: {response.status_code}, {response.text} {cost_time}ms")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"[UniLab Register] 资源注册异常: {e}")
|
logger.error(f"[UniLab Register] 资源注册异常: {e}")
|
||||||
|
|
||||||
|
|||||||
@@ -343,10 +343,9 @@ class HTTPClient:
|
|||||||
edges: List[Dict[str, Any]],
|
edges: List[Dict[str, Any]],
|
||||||
tags: Optional[List[str]] = None,
|
tags: Optional[List[str]] = None,
|
||||||
published: bool = False,
|
published: bool = False,
|
||||||
description: str = "",
|
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
导入工作流到服务器,如果 published 为 True,则额外发起发布请求
|
导入工作流到服务器
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name: 工作流名称(顶层)
|
name: 工作流名称(顶层)
|
||||||
@@ -356,7 +355,6 @@ class HTTPClient:
|
|||||||
edges: 工作流边列表
|
edges: 工作流边列表
|
||||||
tags: 工作流标签列表,默认为空列表
|
tags: 工作流标签列表,默认为空列表
|
||||||
published: 是否发布工作流,默认为False
|
published: 是否发布工作流,默认为False
|
||||||
description: 工作流描述,发布时使用
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict: API响应数据,包含 code 和 data (uuid, name)
|
Dict: API响应数据,包含 code 和 data (uuid, name)
|
||||||
@@ -369,6 +367,7 @@ class HTTPClient:
|
|||||||
"nodes": nodes,
|
"nodes": nodes,
|
||||||
"edges": edges,
|
"edges": edges,
|
||||||
"tags": tags if tags is not None else [],
|
"tags": tags if tags is not None else [],
|
||||||
|
"published": published,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
# 保存请求到文件
|
# 保存请求到文件
|
||||||
@@ -389,51 +388,11 @@ class HTTPClient:
|
|||||||
res = response.json()
|
res = response.json()
|
||||||
if "code" in res and res["code"] != 0:
|
if "code" in res and res["code"] != 0:
|
||||||
logger.error(f"导入工作流失败: {response.text}")
|
logger.error(f"导入工作流失败: {response.text}")
|
||||||
return res
|
|
||||||
# 导入成功后,如果需要发布则额外发起发布请求
|
|
||||||
if published:
|
|
||||||
imported_uuid = res.get("data", {}).get("uuid", workflow_uuid)
|
|
||||||
publish_res = self.workflow_publish(imported_uuid, description)
|
|
||||||
res["publish_result"] = publish_res
|
|
||||||
return res
|
return res
|
||||||
else:
|
else:
|
||||||
logger.error(f"导入工作流失败: {response.status_code}, {response.text}")
|
logger.error(f"导入工作流失败: {response.status_code}, {response.text}")
|
||||||
return {"code": response.status_code, "message": response.text}
|
return {"code": response.status_code, "message": response.text}
|
||||||
|
|
||||||
def workflow_publish(self, workflow_uuid: str, description: str = "") -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
发布工作流
|
|
||||||
|
|
||||||
Args:
|
|
||||||
workflow_uuid: 工作流UUID
|
|
||||||
description: 工作流描述
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict: API响应数据
|
|
||||||
"""
|
|
||||||
payload = {
|
|
||||||
"uuid": workflow_uuid,
|
|
||||||
"description": description,
|
|
||||||
"published": True,
|
|
||||||
}
|
|
||||||
logger.info(f"正在发布工作流: {workflow_uuid}")
|
|
||||||
response = requests.patch(
|
|
||||||
f"{self.remote_addr}/lab/workflow/owner",
|
|
||||||
json=payload,
|
|
||||||
headers={"Authorization": f"Lab {self.auth}"},
|
|
||||||
timeout=60,
|
|
||||||
)
|
|
||||||
if response.status_code == 200:
|
|
||||||
res = response.json()
|
|
||||||
if "code" in res and res["code"] != 0:
|
|
||||||
logger.error(f"发布工作流失败: {response.text}")
|
|
||||||
else:
|
|
||||||
logger.info(f"工作流发布成功: {workflow_uuid}")
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
logger.error(f"发布工作流失败: {response.status_code}, {response.text}")
|
|
||||||
return {"code": response.status_code, "message": response.text}
|
|
||||||
|
|
||||||
|
|
||||||
# 创建默认客户端实例
|
# 创建默认客户端实例
|
||||||
http_client = HTTPClient()
|
http_client = HTTPClient()
|
||||||
|
|||||||
@@ -76,7 +76,6 @@ class JobInfo:
|
|||||||
start_time: float
|
start_time: float
|
||||||
last_update_time: float = field(default_factory=time.time)
|
last_update_time: float = field(default_factory=time.time)
|
||||||
ready_timeout: Optional[float] = None # READY状态的超时时间
|
ready_timeout: Optional[float] = None # READY状态的超时时间
|
||||||
always_free: bool = False # 是否为永久闲置动作(不受排队限制)
|
|
||||||
|
|
||||||
def update_timestamp(self):
|
def update_timestamp(self):
|
||||||
"""更新最后更新时间"""
|
"""更新最后更新时间"""
|
||||||
@@ -128,15 +127,6 @@ class DeviceActionManager:
|
|||||||
# 总是将job添加到all_jobs中
|
# 总是将job添加到all_jobs中
|
||||||
self.all_jobs[job_info.job_id] = job_info
|
self.all_jobs[job_info.job_id] = job_info
|
||||||
|
|
||||||
# always_free的动作不受排队限制,直接设为READY
|
|
||||||
if job_info.always_free:
|
|
||||||
job_info.status = JobStatus.READY
|
|
||||||
job_info.update_timestamp()
|
|
||||||
job_info.set_ready_timeout(10)
|
|
||||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
|
||||||
logger.trace(f"[DeviceActionManager] Job {job_log} always_free, start immediately")
|
|
||||||
return True
|
|
||||||
|
|
||||||
# 检查是否有正在执行或准备执行的任务
|
# 检查是否有正在执行或准备执行的任务
|
||||||
if device_key in self.active_jobs:
|
if device_key in self.active_jobs:
|
||||||
# 有正在执行或准备执行的任务,加入队列
|
# 有正在执行或准备执行的任务,加入队列
|
||||||
@@ -186,15 +176,11 @@ class DeviceActionManager:
|
|||||||
logger.error(f"[DeviceActionManager] Job {job_log} is not in READY status, current: {job_info.status}")
|
logger.error(f"[DeviceActionManager] Job {job_log} is not in READY status, current: {job_info.status}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# always_free的job不需要检查active_jobs
|
# 检查设备上是否是这个job
|
||||||
if not job_info.always_free:
|
if device_key not in self.active_jobs or self.active_jobs[device_key].job_id != job_id:
|
||||||
# 检查设备上是否是这个job
|
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||||
if device_key not in self.active_jobs or self.active_jobs[device_key].job_id != job_id:
|
logger.error(f"[DeviceActionManager] Job {job_log} is not the active job for {device_key}")
|
||||||
job_log = format_job_log(
|
return False
|
||||||
job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name
|
|
||||||
)
|
|
||||||
logger.error(f"[DeviceActionManager] Job {job_log} is not the active job for {device_key}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# 开始执行任务,将状态从READY转换为STARTED
|
# 开始执行任务,将状态从READY转换为STARTED
|
||||||
job_info.status = JobStatus.STARTED
|
job_info.status = JobStatus.STARTED
|
||||||
@@ -217,13 +203,6 @@ class DeviceActionManager:
|
|||||||
job_info = self.all_jobs[job_id]
|
job_info = self.all_jobs[job_id]
|
||||||
device_key = job_info.device_action_key
|
device_key = job_info.device_action_key
|
||||||
|
|
||||||
# always_free的job直接清理,不影响队列
|
|
||||||
if job_info.always_free:
|
|
||||||
job_info.status = JobStatus.ENDED
|
|
||||||
job_info.update_timestamp()
|
|
||||||
del self.all_jobs[job_id]
|
|
||||||
return None
|
|
||||||
|
|
||||||
# 移除活跃任务
|
# 移除活跃任务
|
||||||
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
||||||
del self.active_jobs[device_key]
|
del self.active_jobs[device_key]
|
||||||
@@ -255,14 +234,9 @@ class DeviceActionManager:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def get_active_jobs(self) -> List[JobInfo]:
|
def get_active_jobs(self) -> List[JobInfo]:
|
||||||
"""获取所有正在执行的任务(含active_jobs和always_free的STARTED job)"""
|
"""获取所有正在执行的任务"""
|
||||||
with self.lock:
|
with self.lock:
|
||||||
jobs = list(self.active_jobs.values())
|
return list(self.active_jobs.values())
|
||||||
# 补充 always_free 的 STARTED job(它们不在 active_jobs 中)
|
|
||||||
for job in self.all_jobs.values():
|
|
||||||
if job.always_free and job.status == JobStatus.STARTED and job not in jobs:
|
|
||||||
jobs.append(job)
|
|
||||||
return jobs
|
|
||||||
|
|
||||||
def get_queued_jobs(self) -> List[JobInfo]:
|
def get_queued_jobs(self) -> List[JobInfo]:
|
||||||
"""获取所有排队中的任务"""
|
"""获取所有排队中的任务"""
|
||||||
@@ -287,14 +261,6 @@ class DeviceActionManager:
|
|||||||
job_info = self.all_jobs[job_id]
|
job_info = self.all_jobs[job_id]
|
||||||
device_key = job_info.device_action_key
|
device_key = job_info.device_action_key
|
||||||
|
|
||||||
# always_free的job直接清理
|
|
||||||
if job_info.always_free:
|
|
||||||
job_info.status = JobStatus.ENDED
|
|
||||||
del self.all_jobs[job_id]
|
|
||||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
|
||||||
logger.trace(f"[DeviceActionManager] Always-free job {job_log} cancelled")
|
|
||||||
return True
|
|
||||||
|
|
||||||
# 如果是正在执行的任务
|
# 如果是正在执行的任务
|
||||||
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
||||||
# 清理active job状态
|
# 清理active job状态
|
||||||
@@ -368,18 +334,13 @@ class DeviceActionManager:
|
|||||||
timeout_jobs = []
|
timeout_jobs = []
|
||||||
|
|
||||||
with self.lock:
|
with self.lock:
|
||||||
# 收集所有需要检查的 READY 任务(active_jobs + always_free READY jobs)
|
# 统计READY状态的任务数量
|
||||||
ready_candidates = list(self.active_jobs.values())
|
ready_jobs_count = sum(1 for job in self.active_jobs.values() if job.status == JobStatus.READY)
|
||||||
for job in self.all_jobs.values():
|
|
||||||
if job.always_free and job.status == JobStatus.READY and job not in ready_candidates:
|
|
||||||
ready_candidates.append(job)
|
|
||||||
|
|
||||||
ready_jobs_count = sum(1 for job in ready_candidates if job.status == JobStatus.READY)
|
|
||||||
if ready_jobs_count > 0:
|
if ready_jobs_count > 0:
|
||||||
logger.trace(f"[DeviceActionManager] Checking {ready_jobs_count} READY jobs for timeout") # type: ignore # noqa: E501
|
logger.trace(f"[DeviceActionManager] Checking {ready_jobs_count} READY jobs for timeout") # type: ignore # noqa: E501
|
||||||
|
|
||||||
# 找到所有超时的READY任务(只检测,不处理)
|
# 找到所有超时的READY任务(只检测,不处理)
|
||||||
for job_info in ready_candidates:
|
for job_info in self.active_jobs.values():
|
||||||
if job_info.is_ready_timeout():
|
if job_info.is_ready_timeout():
|
||||||
timeout_jobs.append(job_info)
|
timeout_jobs.append(job_info)
|
||||||
job_log = format_job_log(
|
job_log = format_job_log(
|
||||||
@@ -478,7 +439,7 @@ class MessageProcessor:
|
|||||||
self.connected = True
|
self.connected = True
|
||||||
self.reconnect_count = 0
|
self.reconnect_count = 0
|
||||||
|
|
||||||
logger.info(f"[MessageProcessor] Connected to {self.websocket_url}")
|
logger.trace(f"[MessageProcessor] Connected to {self.websocket_url}")
|
||||||
|
|
||||||
# 启动发送协程
|
# 启动发送协程
|
||||||
send_task = asyncio.create_task(self._send_handler())
|
send_task = asyncio.create_task(self._send_handler())
|
||||||
@@ -556,7 +517,7 @@ class MessageProcessor:
|
|||||||
|
|
||||||
async def _send_handler(self):
|
async def _send_handler(self):
|
||||||
"""处理发送队列中的消息"""
|
"""处理发送队列中的消息"""
|
||||||
logger.debug("[MessageProcessor] Send handler started")
|
logger.trace("[MessageProcessor] Send handler started")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while self.connected and self.websocket:
|
while self.connected and self.websocket:
|
||||||
@@ -647,24 +608,6 @@ class MessageProcessor:
|
|||||||
if host_node:
|
if host_node:
|
||||||
host_node.handle_pong_response(pong_data)
|
host_node.handle_pong_response(pong_data)
|
||||||
|
|
||||||
def _check_action_always_free(self, device_id: str, action_name: str) -> bool:
|
|
||||||
"""检查该action是否标记为always_free,通过HostNode统一的_action_value_mappings查找"""
|
|
||||||
try:
|
|
||||||
host_node = HostNode.get_instance(0)
|
|
||||||
if not host_node:
|
|
||||||
return False
|
|
||||||
# noinspection PyProtectedMember
|
|
||||||
action_mappings = host_node._action_value_mappings.get(device_id)
|
|
||||||
if not action_mappings:
|
|
||||||
return False
|
|
||||||
# 尝试直接匹配或 auto- 前缀匹配
|
|
||||||
for key in [action_name, f"auto-{action_name}"]:
|
|
||||||
if key in action_mappings:
|
|
||||||
return action_mappings[key].get("always_free", False)
|
|
||||||
return False
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def _handle_query_action_state(self, data: Dict[str, Any]):
|
async def _handle_query_action_state(self, data: Dict[str, Any]):
|
||||||
"""处理query_action_state消息"""
|
"""处理query_action_state消息"""
|
||||||
device_id = data.get("device_id", "")
|
device_id = data.get("device_id", "")
|
||||||
@@ -679,9 +622,6 @@ class MessageProcessor:
|
|||||||
|
|
||||||
device_action_key = f"/devices/{device_id}/{action_name}"
|
device_action_key = f"/devices/{device_id}/{action_name}"
|
||||||
|
|
||||||
# 检查action是否为always_free
|
|
||||||
action_always_free = self._check_action_always_free(device_id, action_name)
|
|
||||||
|
|
||||||
# 创建任务信息
|
# 创建任务信息
|
||||||
job_info = JobInfo(
|
job_info = JobInfo(
|
||||||
job_id=job_id,
|
job_id=job_id,
|
||||||
@@ -691,7 +631,6 @@ class MessageProcessor:
|
|||||||
device_action_key=device_action_key,
|
device_action_key=device_action_key,
|
||||||
status=JobStatus.QUEUE,
|
status=JobStatus.QUEUE,
|
||||||
start_time=time.time(),
|
start_time=time.time(),
|
||||||
always_free=action_always_free,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 添加到设备管理器
|
# 添加到设备管理器
|
||||||
@@ -1090,7 +1029,7 @@ class QueueProcessor:
|
|||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
"""运行队列处理主循环"""
|
"""运行队列处理主循环"""
|
||||||
logger.debug("[QueueProcessor] Queue processor started")
|
logger.trace("[QueueProcessor] Queue processor started")
|
||||||
|
|
||||||
while self.is_running:
|
while self.is_running:
|
||||||
try:
|
try:
|
||||||
@@ -1184,11 +1123,6 @@ class QueueProcessor:
|
|||||||
logger.debug(f"[QueueProcessor] Sending busy status for {len(queued_jobs)} queued jobs")
|
logger.debug(f"[QueueProcessor] Sending busy status for {len(queued_jobs)} queued jobs")
|
||||||
|
|
||||||
for job_info in queued_jobs:
|
for job_info in queued_jobs:
|
||||||
# 快照可能已过期:在遍历过程中 end_job() 可能已将此 job 移至 READY,
|
|
||||||
# 此时不应再发送 busy/need_more,否则会覆盖已发出的 free=True 通知
|
|
||||||
if job_info.status != JobStatus.QUEUE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
message = {
|
message = {
|
||||||
"action": "report_action_state",
|
"action": "report_action_state",
|
||||||
"data": {
|
"data": {
|
||||||
@@ -1305,7 +1239,6 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
else:
|
else:
|
||||||
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
|
url = f"{scheme}://{parsed.netloc}/api/v1/ws/schedule"
|
||||||
|
|
||||||
logger.debug(f"[WebSocketClient] URL: {url}")
|
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
@@ -1318,13 +1251,11 @@ class WebSocketClient(BaseCommunicationClient):
|
|||||||
logger.error("[WebSocketClient] WebSocket URL not configured")
|
logger.error("[WebSocketClient] WebSocket URL not configured")
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info(f"[WebSocketClient] Starting connection to {self.websocket_url}")
|
|
||||||
|
|
||||||
# 启动两个核心线程
|
# 启动两个核心线程
|
||||||
self.message_processor.start()
|
self.message_processor.start()
|
||||||
self.queue_processor.start()
|
self.queue_processor.start()
|
||||||
|
|
||||||
logger.info("[WebSocketClient] All threads started")
|
logger.trace("[WebSocketClient] All threads started")
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
"""停止WebSocket客户端"""
|
"""停止WebSocket客户端"""
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ class BasicConfig:
|
|||||||
disable_browser = False # 禁止浏览器自动打开
|
disable_browser = False # 禁止浏览器自动打开
|
||||||
port = 8002 # 本地HTTP服务
|
port = 8002 # 本地HTTP服务
|
||||||
check_mode = False # CI 检查模式,用于验证 registry 导入和文件一致性
|
check_mode = False # CI 检查模式,用于验证 registry 导入和文件一致性
|
||||||
test_mode = False # 测试模式,所有动作不实际执行,返回模拟结果
|
|
||||||
# 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
# 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
||||||
log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG"
|
log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG"
|
||||||
|
|
||||||
@@ -146,5 +145,5 @@ def load_config(config_path=None):
|
|||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
exit(1)
|
exit(1)
|
||||||
else:
|
else:
|
||||||
config_path = os.path.join(os.path.dirname(__file__), "example_config.py")
|
config_path = os.path.join(os.path.dirname(__file__), "local_config.py")
|
||||||
load_config(config_path)
|
load_config(config_path)
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ from pylabrobot.resources import (
|
|||||||
ResourceHolder,
|
ResourceHolder,
|
||||||
Lid,
|
Lid,
|
||||||
Trash,
|
Trash,
|
||||||
Tip, TubeRack,
|
Tip,
|
||||||
)
|
)
|
||||||
from typing_extensions import TypedDict
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
@@ -696,13 +696,10 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
|
|||||||
|
|
||||||
如果 liquid_names 和 volumes 为空,但 plate 和 well_names 不为空,直接返回 plate 和 wells。
|
如果 liquid_names 和 volumes 为空,但 plate 和 well_names 不为空,直接返回 plate 和 wells。
|
||||||
"""
|
"""
|
||||||
assert issubclass(plate.__class__, Plate) or issubclass(plate.__class__, TubeRack) , f"plate must be a Plate, now: {type(plate)}"
|
assert issubclass(plate.__class__, Plate), "plate must be a Plate"
|
||||||
plate: Union[Plate, TubeRack]
|
plate: Plate = cast(Plate, cast(Resource, plate))
|
||||||
# 根据 well_names 获取对应的 Well 对象
|
# 根据 well_names 获取对应的 Well 对象
|
||||||
if issubclass(plate.__class__, Plate):
|
wells = [plate.get_well(name) for name in well_names]
|
||||||
wells = [plate.get_well(name) for name in well_names]
|
|
||||||
elif issubclass(plate.__class__, TubeRack):
|
|
||||||
wells = [plate.get_tube(name) for name in well_names]
|
|
||||||
res_volumes = []
|
res_volumes = []
|
||||||
|
|
||||||
# 如果 liquid_names 和 volumes 都为空,直接返回
|
# 如果 liquid_names 和 volumes 都为空,直接返回
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ class PRCXI9300Deck(Deck):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name: str, size_x: float, size_y: float, size_z: float, **kwargs):
|
def __init__(self, name: str, size_x: float, size_y: float, size_z: float, **kwargs):
|
||||||
super().__init__(size_x, size_y, size_z, name)
|
super().__init__(name, size_x, size_y, size_z)
|
||||||
self.slots = [None] * 16 # PRCXI 9300/9320 最大有 16 个槽位
|
self.slots = [None] * 16 # PRCXI 9300/9320 最大有 16 个槽位
|
||||||
self.slot_locations = [Coordinate(0, 0, 0)] * 16
|
self.slot_locations = [Coordinate(0, 0, 0)] * 16
|
||||||
|
|
||||||
@@ -248,15 +248,14 @@ class PRCXI9300TipRack(TipRack):
|
|||||||
if ordered_items is not None:
|
if ordered_items is not None:
|
||||||
items = ordered_items
|
items = ordered_items
|
||||||
elif ordering is not None:
|
elif ordering is not None:
|
||||||
# 检查 ordering 中的值类型来决定如何处理:
|
# 检查 ordering 中的值是否是字符串(从 JSON 反序列化时的情况)
|
||||||
# - 字符串值(从 JSON 反序列化): 只用键创建 ordering_param
|
# 如果是字符串,说明这是位置名称,需要让 TipRack 自己创建 Tip 对象
|
||||||
# - None 值(从第二次往返序列化): 同样只用键创建 ordering_param
|
# 我们只传递位置信息(键),不传递值,使用 ordering 参数
|
||||||
# - 对象值(已经是实际的 Resource 对象): 直接作为 ordered_items 使用
|
if ordering and isinstance(next(iter(ordering.values()), None), str):
|
||||||
first_val = next(iter(ordering.values()), None) if ordering else None
|
# ordering 的值是字符串,只使用键(位置信息)创建新的 OrderedDict
|
||||||
if not ordering or first_val is None or isinstance(first_val, str):
|
|
||||||
# ordering 的值是字符串或 None,只使用键(位置信息)创建新的 OrderedDict
|
|
||||||
# 传递 ordering 参数而不是 ordered_items,让 TipRack 自己创建 Tip 对象
|
# 传递 ordering 参数而不是 ordered_items,让 TipRack 自己创建 Tip 对象
|
||||||
items = None
|
items = None
|
||||||
|
# 使用 ordering 参数,只包含位置信息(键)
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
||||||
else:
|
else:
|
||||||
# ordering 的值已经是对象,可以直接使用
|
# ordering 的值已经是对象,可以直接使用
|
||||||
@@ -398,15 +397,14 @@ class PRCXI9300TubeRack(TubeRack):
|
|||||||
items_to_pass = ordered_items
|
items_to_pass = ordered_items
|
||||||
ordering_param = None
|
ordering_param = None
|
||||||
elif ordering is not None:
|
elif ordering is not None:
|
||||||
# 检查 ordering 中的值类型来决定如何处理:
|
# 检查 ordering 中的值是否是字符串(从 JSON 反序列化时的情况)
|
||||||
# - 字符串值(从 JSON 反序列化): 只用键创建 ordering_param
|
# 如果是字符串,说明这是位置名称,需要让 TubeRack 自己创建 Tube 对象
|
||||||
# - None 值(从第二次往返序列化): 同样只用键创建 ordering_param
|
# 我们只传递位置信息(键),不传递值,使用 ordering 参数
|
||||||
# - 对象值(已经是实际的 Resource 对象): 直接作为 ordered_items 使用
|
if ordering and isinstance(next(iter(ordering.values()), None), str):
|
||||||
first_val = next(iter(ordering.values()), None) if ordering else None
|
# ordering 的值是字符串,只使用键(位置信息)创建新的 OrderedDict
|
||||||
if not ordering or first_val is None or isinstance(first_val, str):
|
|
||||||
# ordering 的值是字符串或 None,只使用键(位置信息)创建新的 OrderedDict
|
|
||||||
# 传递 ordering 参数而不是 ordered_items,让 TubeRack 自己创建 Tube 对象
|
# 传递 ordering 参数而不是 ordered_items,让 TubeRack 自己创建 Tube 对象
|
||||||
items_to_pass = None
|
items_to_pass = None
|
||||||
|
# 使用 ordering 参数,只包含位置信息(键)
|
||||||
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
|
||||||
else:
|
else:
|
||||||
# ordering 的值已经是对象,可以直接使用
|
# ordering 的值已经是对象,可以直接使用
|
||||||
|
|||||||
@@ -19,11 +19,10 @@ from rclpy.node import Node
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
class LiquidHandlerJointPublisher(BaseROS2DeviceNode):
|
class LiquidHandlerJointPublisher(BaseROS2DeviceNode):
|
||||||
def __init__(self,resources_config:list, resource_tracker, rate=50, device_id:str = "lh_joint_publisher", registry_name: str = "lh_joint_publisher", **kwargs):
|
def __init__(self,resources_config:list, resource_tracker, rate=50, device_id:str = "lh_joint_publisher", **kwargs):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=registry_name,
|
|
||||||
status_types={},
|
status_types={},
|
||||||
action_value_mappings={},
|
action_value_mappings={},
|
||||||
hardware_interface={},
|
hardware_interface={},
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ from threading import Lock, RLock
|
|||||||
from typing_extensions import TypedDict
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
||||||
from unilabos.utils.decorator import not_action, always_free
|
from unilabos.utils.decorator import not_action
|
||||||
from unilabos.resources.resource_tracker import SampleUUIDsType, LabSample, RETURN_UNILABOS_SAMPLES
|
from unilabos.resources.resource_tracker import SampleUUIDsType, LabSample, RETURN_UNILABOS_SAMPLES
|
||||||
|
|
||||||
|
|
||||||
@@ -123,8 +123,8 @@ class VirtualWorkbench:
|
|||||||
_ros_node: BaseROS2DeviceNode
|
_ros_node: BaseROS2DeviceNode
|
||||||
|
|
||||||
# 配置常量
|
# 配置常量
|
||||||
ARM_OPERATION_TIME: float = 2 # 机械臂操作时间(秒)
|
ARM_OPERATION_TIME: float = 3.0 # 机械臂操作时间(秒)
|
||||||
HEATING_TIME: float = 60.0 # 加热时间(秒)
|
HEATING_TIME: float = 10.0 # 加热时间(秒)
|
||||||
NUM_HEATING_STATIONS: int = 3 # 加热台数量
|
NUM_HEATING_STATIONS: int = 3 # 加热台数量
|
||||||
|
|
||||||
def __init__(self, device_id: Optional[str] = None, config: Optional[Dict[str, Any]] = None, **kwargs):
|
def __init__(self, device_id: Optional[str] = None, config: Optional[Dict[str, Any]] = None, **kwargs):
|
||||||
@@ -141,9 +141,9 @@ class VirtualWorkbench:
|
|||||||
self.data: Dict[str, Any] = {}
|
self.data: Dict[str, Any] = {}
|
||||||
|
|
||||||
# 从config中获取可配置参数
|
# 从config中获取可配置参数
|
||||||
self.ARM_OPERATION_TIME = float(self.config.get("arm_operation_time", self.ARM_OPERATION_TIME))
|
self.ARM_OPERATION_TIME = float(self.config.get("arm_operation_time", 3.0))
|
||||||
self.HEATING_TIME = float(self.config.get("heating_time", self.HEATING_TIME))
|
self.HEATING_TIME = float(self.config.get("heating_time", 10.0))
|
||||||
self.NUM_HEATING_STATIONS = int(self.config.get("num_heating_stations", self.NUM_HEATING_STATIONS))
|
self.NUM_HEATING_STATIONS = int(self.config.get("num_heating_stations", 3))
|
||||||
|
|
||||||
# 机械臂状态和锁 (使用threading.Lock)
|
# 机械臂状态和锁 (使用threading.Lock)
|
||||||
self._arm_lock = Lock()
|
self._arm_lock = Lock()
|
||||||
@@ -431,7 +431,6 @@ class VirtualWorkbench:
|
|||||||
sample_uuid, content in sample_uuids.items()]
|
sample_uuid, content in sample_uuids.items()]
|
||||||
}
|
}
|
||||||
|
|
||||||
@always_free
|
|
||||||
def start_heating(
|
def start_heating(
|
||||||
self,
|
self,
|
||||||
sample_uuids: SampleUUIDsType,
|
sample_uuids: SampleUUIDsType,
|
||||||
@@ -502,21 +501,10 @@ class VirtualWorkbench:
|
|||||||
|
|
||||||
self._update_data_status(f"加热台{station_id}开始加热{material_id}")
|
self._update_data_status(f"加热台{station_id}开始加热{material_id}")
|
||||||
|
|
||||||
# 打印当前所有正在加热的台位
|
# 模拟加热过程 (10秒)
|
||||||
with self._stations_lock:
|
|
||||||
heating_list = [
|
|
||||||
f"加热台{sid}:{s.current_material}"
|
|
||||||
for sid, s in self._heating_stations.items()
|
|
||||||
if s.state == HeatingStationState.HEATING and s.current_material
|
|
||||||
]
|
|
||||||
self.logger.info(f"[并行加热] 当前同时加热中: {', '.join(heating_list)}")
|
|
||||||
|
|
||||||
# 模拟加热过程
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
last_countdown_log = start_time
|
|
||||||
while True:
|
while True:
|
||||||
elapsed = time.time() - start_time
|
elapsed = time.time() - start_time
|
||||||
remaining = max(0.0, self.HEATING_TIME - elapsed)
|
|
||||||
progress = min(100.0, (elapsed / self.HEATING_TIME) * 100)
|
progress = min(100.0, (elapsed / self.HEATING_TIME) * 100)
|
||||||
|
|
||||||
with self._stations_lock:
|
with self._stations_lock:
|
||||||
@@ -524,11 +512,6 @@ class VirtualWorkbench:
|
|||||||
|
|
||||||
self._update_data_status(f"加热台{station_id}加热中: {progress:.1f}%")
|
self._update_data_status(f"加热台{station_id}加热中: {progress:.1f}%")
|
||||||
|
|
||||||
# 每5秒打印一次倒计时
|
|
||||||
if time.time() - last_countdown_log >= 5.0:
|
|
||||||
self.logger.info(f"[加热台{station_id}] {material_id} 剩余 {remaining:.1f}s")
|
|
||||||
last_countdown_log = time.time()
|
|
||||||
|
|
||||||
if elapsed >= self.HEATING_TIME:
|
if elapsed >= self.HEATING_TIME:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|||||||
@@ -96,13 +96,10 @@ serial:
|
|||||||
type: string
|
type: string
|
||||||
port:
|
port:
|
||||||
type: string
|
type: string
|
||||||
registry_name:
|
|
||||||
type: string
|
|
||||||
resource_tracker:
|
resource_tracker:
|
||||||
type: object
|
type: object
|
||||||
required:
|
required:
|
||||||
- device_id
|
- device_id
|
||||||
- registry_name
|
|
||||||
- port
|
- port
|
||||||
type: object
|
type: object
|
||||||
data:
|
data:
|
||||||
|
|||||||
@@ -67,9 +67,6 @@ camera:
|
|||||||
period:
|
period:
|
||||||
default: 0.1
|
default: 0.1
|
||||||
type: number
|
type: number
|
||||||
registry_name:
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
resource_tracker:
|
resource_tracker:
|
||||||
type: object
|
type: object
|
||||||
required: []
|
required: []
|
||||||
|
|||||||
@@ -6090,7 +6090,6 @@ virtual_workbench:
|
|||||||
type: object
|
type: object
|
||||||
type: UniLabJsonCommand
|
type: UniLabJsonCommand
|
||||||
auto-start_heating:
|
auto-start_heating:
|
||||||
always_free: true
|
|
||||||
feedback: {}
|
feedback: {}
|
||||||
goal: {}
|
goal: {}
|
||||||
goal_default:
|
goal_default:
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import sys
|
|||||||
import inspect
|
import inspect
|
||||||
import importlib
|
import importlib
|
||||||
import threading
|
import threading
|
||||||
import traceback
|
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Union, Tuple
|
from typing import Any, Dict, List, Union, Tuple
|
||||||
@@ -89,14 +88,6 @@ class Registry:
|
|||||||
)
|
)
|
||||||
test_latency_schema["description"] = "用于测试延迟的动作,返回延迟时间和时间差。"
|
test_latency_schema["description"] = "用于测试延迟的动作,返回延迟时间和时间差。"
|
||||||
|
|
||||||
test_resource_method_info = host_node_enhanced_info.get("action_methods", {}).get("test_resource", {})
|
|
||||||
test_resource_schema = self._generate_unilab_json_command_schema(
|
|
||||||
test_resource_method_info.get("args", []),
|
|
||||||
"test_resource",
|
|
||||||
test_resource_method_info.get("return_annotation"),
|
|
||||||
)
|
|
||||||
test_resource_schema["description"] = "用于测试物料、设备和样本。"
|
|
||||||
|
|
||||||
self.device_type_registry.update(
|
self.device_type_registry.update(
|
||||||
{
|
{
|
||||||
"host_node": {
|
"host_node": {
|
||||||
@@ -198,7 +189,32 @@ class Registry:
|
|||||||
"goal": {},
|
"goal": {},
|
||||||
"feedback": {},
|
"feedback": {},
|
||||||
"result": {},
|
"result": {},
|
||||||
"schema": test_resource_schema,
|
"schema": {
|
||||||
|
"description": "",
|
||||||
|
"properties": {
|
||||||
|
"feedback": {},
|
||||||
|
"goal": {
|
||||||
|
"properties": {
|
||||||
|
"resource": ros_message_to_json_schema(Resource, "resource"),
|
||||||
|
"resources": {
|
||||||
|
"items": {
|
||||||
|
"properties": ros_message_to_json_schema(
|
||||||
|
Resource, "resources"
|
||||||
|
),
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
"type": "array",
|
||||||
|
},
|
||||||
|
"device": {"type": "string"},
|
||||||
|
"devices": {"items": {"type": "string"}, "type": "array"},
|
||||||
|
},
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
|
"result": {},
|
||||||
|
},
|
||||||
|
"title": "test_resource",
|
||||||
|
"type": "object",
|
||||||
|
},
|
||||||
"placeholder_keys": {
|
"placeholder_keys": {
|
||||||
"device": "unilabos_devices",
|
"device": "unilabos_devices",
|
||||||
"devices": "unilabos_devices",
|
"devices": "unilabos_devices",
|
||||||
@@ -822,7 +838,6 @@ class Registry:
|
|||||||
("list", "unilabos.registry.placeholder_type:DeviceSlot"),
|
("list", "unilabos.registry.placeholder_type:DeviceSlot"),
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
**({"always_free": True} if v.get("always_free") else {}),
|
|
||||||
}
|
}
|
||||||
for k, v in enhanced_info["action_methods"].items()
|
for k, v in enhanced_info["action_methods"].items()
|
||||||
if k not in device_config["class"]["action_value_mappings"]
|
if k not in device_config["class"]["action_value_mappings"]
|
||||||
@@ -928,7 +943,6 @@ class Registry:
|
|||||||
if is_valid:
|
if is_valid:
|
||||||
results.append((file, data, device_ids))
|
results.append((file, data, device_ids))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
|
||||||
logger.warning(f"[UniLab Registry] 处理设备文件异常: {file}, 错误: {e}")
|
logger.warning(f"[UniLab Registry] 处理设备文件异常: {file}, 错误: {e}")
|
||||||
|
|
||||||
# 线程安全地更新注册表
|
# 线程安全地更新注册表
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ def canonicalize_nodes_data(
|
|||||||
Returns:
|
Returns:
|
||||||
ResourceTreeSet: 标准化后的资源树集合
|
ResourceTreeSet: 标准化后的资源树集合
|
||||||
"""
|
"""
|
||||||
print_status(f"{len(nodes)} Resources loaded:", "info")
|
print_status(f"{len(nodes)} Resources loaded", "info")
|
||||||
|
|
||||||
# 第一步:基本预处理(处理graphml的label字段)
|
# 第一步:基本预处理(处理graphml的label字段)
|
||||||
outer_host_node_id = None
|
outer_host_node_id = None
|
||||||
|
|||||||
@@ -38,52 +38,24 @@ class LabSample(TypedDict):
|
|||||||
extra: Dict[str, Any]
|
extra: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionSizeType(TypedDict):
|
|
||||||
depth: float
|
|
||||||
width: float
|
|
||||||
height: float
|
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionSize(BaseModel):
|
class ResourceDictPositionSize(BaseModel):
|
||||||
depth: float = Field(description="Depth", default=0.0) # z
|
depth: float = Field(description="Depth", default=0.0) # z
|
||||||
width: float = Field(description="Width", default=0.0) # x
|
width: float = Field(description="Width", default=0.0) # x
|
||||||
height: float = Field(description="Height", default=0.0) # y
|
height: float = Field(description="Height", default=0.0) # y
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionScaleType(TypedDict):
|
|
||||||
x: float
|
|
||||||
y: float
|
|
||||||
z: float
|
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionScale(BaseModel):
|
class ResourceDictPositionScale(BaseModel):
|
||||||
x: float = Field(description="x scale", default=0.0)
|
x: float = Field(description="x scale", default=0.0)
|
||||||
y: float = Field(description="y scale", default=0.0)
|
y: float = Field(description="y scale", default=0.0)
|
||||||
z: float = Field(description="z scale", default=0.0)
|
z: float = Field(description="z scale", default=0.0)
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionObjectType(TypedDict):
|
|
||||||
x: float
|
|
||||||
y: float
|
|
||||||
z: float
|
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionObject(BaseModel):
|
class ResourceDictPositionObject(BaseModel):
|
||||||
x: float = Field(description="X coordinate", default=0.0)
|
x: float = Field(description="X coordinate", default=0.0)
|
||||||
y: float = Field(description="Y coordinate", default=0.0)
|
y: float = Field(description="Y coordinate", default=0.0)
|
||||||
z: float = Field(description="Z coordinate", default=0.0)
|
z: float = Field(description="Z coordinate", default=0.0)
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPositionType(TypedDict):
|
|
||||||
size: ResourceDictPositionSizeType
|
|
||||||
scale: ResourceDictPositionScaleType
|
|
||||||
layout: Literal["2d", "x-y", "z-y", "x-z"]
|
|
||||||
position: ResourceDictPositionObjectType
|
|
||||||
position3d: ResourceDictPositionObjectType
|
|
||||||
rotation: ResourceDictPositionObjectType
|
|
||||||
cross_section_type: Literal["rectangle", "circle", "rounded_rectangle"]
|
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictPosition(BaseModel):
|
class ResourceDictPosition(BaseModel):
|
||||||
size: ResourceDictPositionSize = Field(description="Resource size", default_factory=ResourceDictPositionSize)
|
size: ResourceDictPositionSize = Field(description="Resource size", default_factory=ResourceDictPositionSize)
|
||||||
scale: ResourceDictPositionScale = Field(description="Resource scale", default_factory=ResourceDictPositionScale)
|
scale: ResourceDictPositionScale = Field(description="Resource scale", default_factory=ResourceDictPositionScale)
|
||||||
@@ -102,24 +74,6 @@ class ResourceDictPosition(BaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ResourceDictType(TypedDict):
|
|
||||||
id: str
|
|
||||||
uuid: str
|
|
||||||
name: str
|
|
||||||
description: str
|
|
||||||
resource_schema: Dict[str, Any]
|
|
||||||
model: Dict[str, Any]
|
|
||||||
icon: str
|
|
||||||
parent_uuid: Optional[str]
|
|
||||||
parent: Optional["ResourceDictType"]
|
|
||||||
type: Union[Literal["device"], str]
|
|
||||||
klass: str
|
|
||||||
pose: ResourceDictPositionType
|
|
||||||
config: Dict[str, Any]
|
|
||||||
data: Dict[str, Any]
|
|
||||||
extra: Dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
# 统一的资源字典模型,parent 自动序列化为 parent_uuid,children 不序列化
|
# 统一的资源字典模型,parent 自动序列化为 parent_uuid,children 不序列化
|
||||||
class ResourceDict(BaseModel):
|
class ResourceDict(BaseModel):
|
||||||
id: str = Field(description="Resource ID")
|
id: str = Field(description="Resource ID")
|
||||||
@@ -137,8 +91,8 @@ class ResourceDict(BaseModel):
|
|||||||
klass: str = Field(alias="class", description="Resource class name")
|
klass: str = Field(alias="class", description="Resource class name")
|
||||||
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
|
pose: ResourceDictPosition = Field(description="Resource position", default_factory=ResourceDictPosition)
|
||||||
config: Dict[str, Any] = Field(description="Resource configuration")
|
config: Dict[str, Any] = Field(description="Resource configuration")
|
||||||
data: Dict[str, Any] = Field(description="Resource data")
|
data: Dict[str, Any] = Field(description="Resource data, eg: container liquid data")
|
||||||
extra: Dict[str, Any] = Field(description="Extra data")
|
extra: Dict[str, Any] = Field(description="Extra data, eg: slot index")
|
||||||
|
|
||||||
@field_serializer("parent_uuid")
|
@field_serializer("parent_uuid")
|
||||||
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):
|
def _serialize_parent(self, parent_uuid: Optional["ResourceDict"]):
|
||||||
|
|||||||
@@ -44,7 +44,8 @@ def ros2_device_node(
|
|||||||
# 从属性中自动发现可发布状态
|
# 从属性中自动发现可发布状态
|
||||||
if status_types is None:
|
if status_types is None:
|
||||||
status_types = {}
|
status_types = {}
|
||||||
assert device_config is not None, "device_config cannot be None"
|
if device_config is None:
|
||||||
|
raise ValueError("device_config cannot be None")
|
||||||
if action_value_mappings is None:
|
if action_value_mappings is None:
|
||||||
action_value_mappings = {}
|
action_value_mappings = {}
|
||||||
if hardware_interface is None:
|
if hardware_interface is None:
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ def init_wrapper(
|
|||||||
device_id: str,
|
device_id: str,
|
||||||
device_uuid: str,
|
device_uuid: str,
|
||||||
driver_class: type[T],
|
driver_class: type[T],
|
||||||
device_config: ResourceDictInstance,
|
device_config: ResourceTreeInstance,
|
||||||
status_types: Dict[str, Any],
|
status_types: Dict[str, Any],
|
||||||
action_value_mappings: Dict[str, Any],
|
action_value_mappings: Dict[str, Any],
|
||||||
hardware_interface: Dict[str, Any],
|
hardware_interface: Dict[str, Any],
|
||||||
@@ -279,7 +279,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
self,
|
self,
|
||||||
driver_instance: T,
|
driver_instance: T,
|
||||||
device_id: str,
|
device_id: str,
|
||||||
registry_name: str,
|
|
||||||
device_uuid: str,
|
device_uuid: str,
|
||||||
status_types: Dict[str, Any],
|
status_types: Dict[str, Any],
|
||||||
action_value_mappings: Dict[str, Any],
|
action_value_mappings: Dict[str, Any],
|
||||||
@@ -301,7 +300,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
"""
|
"""
|
||||||
self.driver_instance = driver_instance
|
self.driver_instance = driver_instance
|
||||||
self.device_id = device_id
|
self.device_id = device_id
|
||||||
self.registry_name = registry_name
|
|
||||||
self.uuid = device_uuid
|
self.uuid = device_uuid
|
||||||
self.publish_high_frequency = False
|
self.publish_high_frequency = False
|
||||||
self.callback_group = ReentrantCallbackGroup()
|
self.callback_group = ReentrantCallbackGroup()
|
||||||
@@ -418,9 +416,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
if len(rts.root_nodes) == 1 and isinstance(rts_plr_instances[0], RegularContainer):
|
if len(rts.root_nodes) == 1 and isinstance(rts_plr_instances[0], RegularContainer):
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
container_instance: RegularContainer = rts_plr_instances[0]
|
container_instance: RegularContainer = rts_plr_instances[0]
|
||||||
found_resources = self.resource_tracker.figure_resource(
|
found_resources = self.resource_tracker.figure_resource({"name": container_instance.name}, try_mode=True)
|
||||||
{"name": container_instance.name}, try_mode=True
|
|
||||||
)
|
|
||||||
if not len(found_resources):
|
if not len(found_resources):
|
||||||
self.resource_tracker.add_resource(container_instance)
|
self.resource_tracker.add_resource(container_instance)
|
||||||
logger.info(f"添加物料{container_instance.name}到资源跟踪器")
|
logger.info(f"添加物料{container_instance.name}到资源跟踪器")
|
||||||
@@ -460,7 +456,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
}
|
}
|
||||||
res.response = json.dumps(final_response)
|
res.response = json.dumps(final_response)
|
||||||
# 如果driver自己就有assign的方法,那就使用driver自己的assign方法
|
# 如果driver自己就有assign的方法,那就使用driver自己的assign方法
|
||||||
if hasattr(self.driver_instance, "create_resource") and self.node_name != "host_node":
|
if hasattr(self.driver_instance, "create_resource"):
|
||||||
create_resource_func = getattr(self.driver_instance, "create_resource")
|
create_resource_func = getattr(self.driver_instance, "create_resource")
|
||||||
try:
|
try:
|
||||||
ret = create_resource_func(
|
ret = create_resource_func(
|
||||||
@@ -1156,7 +1152,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
"machine_name": BasicConfig.machine_name,
|
"machine_name": BasicConfig.machine_name,
|
||||||
"type": "slave",
|
"type": "slave",
|
||||||
"edge_device_id": self.device_id,
|
"edge_device_id": self.device_id,
|
||||||
"registry_name": self.registry_name,
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ensure_ascii=False,
|
ensure_ascii=False,
|
||||||
@@ -1631,7 +1626,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
|
|||||||
else:
|
else:
|
||||||
resolved_sample_uuids[sample_uuid] = material_uuid
|
resolved_sample_uuids[sample_uuid] = material_uuid
|
||||||
function_args[PARAM_SAMPLE_UUIDS] = resolved_sample_uuids
|
function_args[PARAM_SAMPLE_UUIDS] = resolved_sample_uuids
|
||||||
self.lab_logger().debug(f"[JsonCommand] 注入 {PARAM_SAMPLE_UUIDS}: {resolved_sample_uuids}")
|
self.lab_logger().debug(
|
||||||
|
f"[JsonCommand] 注入 {PARAM_SAMPLE_UUIDS}: {resolved_sample_uuids}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 处理单个 ResourceSlot
|
# 处理单个 ResourceSlot
|
||||||
@@ -2008,7 +2005,6 @@ class ROS2DeviceNode:
|
|||||||
|
|
||||||
if driver_is_ros:
|
if driver_is_ros:
|
||||||
driver_params["device_id"] = device_id
|
driver_params["device_id"] = device_id
|
||||||
driver_params["registry_name"] = device_config.res_content.klass
|
|
||||||
driver_params["resource_tracker"] = self.resource_tracker
|
driver_params["resource_tracker"] = self.resource_tracker
|
||||||
self._driver_instance = self._driver_creator.create_instance(driver_params)
|
self._driver_instance = self._driver_creator.create_instance(driver_params)
|
||||||
if self._driver_instance is None:
|
if self._driver_instance is None:
|
||||||
@@ -2026,7 +2022,6 @@ class ROS2DeviceNode:
|
|||||||
children=children,
|
children=children,
|
||||||
driver_instance=self._driver_instance, # type: ignore
|
driver_instance=self._driver_instance, # type: ignore
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=device_config.res_content.klass,
|
|
||||||
device_uuid=device_uuid,
|
device_uuid=device_uuid,
|
||||||
status_types=status_types,
|
status_types=status_types,
|
||||||
action_value_mappings=action_value_mappings,
|
action_value_mappings=action_value_mappings,
|
||||||
@@ -2038,7 +2033,6 @@ class ROS2DeviceNode:
|
|||||||
self._ros_node = BaseROS2DeviceNode(
|
self._ros_node = BaseROS2DeviceNode(
|
||||||
driver_instance=self._driver_instance,
|
driver_instance=self._driver_instance,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=device_config.res_content.klass,
|
|
||||||
device_uuid=device_uuid,
|
device_uuid=device_uuid,
|
||||||
status_types=status_types,
|
status_types=status_types,
|
||||||
action_value_mappings=action_value_mappings,
|
action_value_mappings=action_value_mappings,
|
||||||
@@ -2047,7 +2041,6 @@ class ROS2DeviceNode:
|
|||||||
resource_tracker=self.resource_tracker,
|
resource_tracker=self.resource_tracker,
|
||||||
)
|
)
|
||||||
self._ros_node: BaseROS2DeviceNode
|
self._ros_node: BaseROS2DeviceNode
|
||||||
# 将注册表类型名传递给BaseROS2DeviceNode,用于slave上报
|
|
||||||
self._ros_node.lab_logger().info(f"初始化完成 {self._ros_node.uuid} {self.driver_is_ros}")
|
self._ros_node.lab_logger().info(f"初始化完成 {self._ros_node.uuid} {self.driver_is_ros}")
|
||||||
self.driver_instance._ros_node = self._ros_node # type: ignore
|
self.driver_instance._ros_node = self._ros_node # type: ignore
|
||||||
self.driver_instance._execute_driver_command = self._ros_node._execute_driver_command # type: ignore
|
self.driver_instance._execute_driver_command = self._ros_node._execute_driver_command # type: ignore
|
||||||
|
|||||||
@@ -6,13 +6,12 @@ from cv_bridge import CvBridge
|
|||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeResourceTracker
|
||||||
|
|
||||||
class VideoPublisher(BaseROS2DeviceNode):
|
class VideoPublisher(BaseROS2DeviceNode):
|
||||||
def __init__(self, device_id='video_publisher', registry_name="", device_uuid='', camera_index=0, period: float = 0.1, resource_tracker: DeviceNodeResourceTracker = None):
|
def __init__(self, device_id='video_publisher', device_uuid='', camera_index=0, period: float = 0.1, resource_tracker: DeviceNodeResourceTracker = None):
|
||||||
# 初始化BaseROS2DeviceNode,使用自身作为driver_instance
|
# 初始化BaseROS2DeviceNode,使用自身作为driver_instance
|
||||||
BaseROS2DeviceNode.__init__(
|
BaseROS2DeviceNode.__init__(
|
||||||
self,
|
self,
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=registry_name,
|
|
||||||
device_uuid=device_uuid,
|
device_uuid=device_uuid,
|
||||||
status_types={},
|
status_types={},
|
||||||
action_value_mappings={},
|
action_value_mappings={},
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ class ControllerNode(BaseROS2DeviceNode):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
device_id: str,
|
device_id: str,
|
||||||
registry_name: str,
|
|
||||||
controller_func: Callable,
|
controller_func: Callable,
|
||||||
update_rate: float,
|
update_rate: float,
|
||||||
inputs: Dict[str, Dict[str, type | str]],
|
inputs: Dict[str, Dict[str, type | str]],
|
||||||
@@ -52,7 +51,6 @@ class ControllerNode(BaseROS2DeviceNode):
|
|||||||
self,
|
self,
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=registry_name,
|
|
||||||
status_types=status_types,
|
status_types=status_types,
|
||||||
action_value_mappings=action_value_mappings,
|
action_value_mappings=action_value_mappings,
|
||||||
hardware_interface=hardware_interface,
|
hardware_interface=hardware_interface,
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ from unilabos.resources.resource_tracker import (
|
|||||||
ResourceTreeInstance,
|
ResourceTreeInstance,
|
||||||
RETURN_UNILABOS_SAMPLES,
|
RETURN_UNILABOS_SAMPLES,
|
||||||
JSON_UNILABOS_PARAM,
|
JSON_UNILABOS_PARAM,
|
||||||
PARAM_SAMPLE_UUIDS, SampleUUIDsType, LabSample,
|
PARAM_SAMPLE_UUIDS,
|
||||||
)
|
)
|
||||||
from unilabos.ros.initialize_device import initialize_device_from_dict
|
from unilabos.ros.initialize_device import initialize_device_from_dict
|
||||||
from unilabos.ros.msgs.message_converter import (
|
from unilabos.ros.msgs.message_converter import (
|
||||||
@@ -51,7 +51,6 @@ from unilabos.utils import logger
|
|||||||
from unilabos.utils.exception import DeviceClassInvalid
|
from unilabos.utils.exception import DeviceClassInvalid
|
||||||
from unilabos.utils.log import warning
|
from unilabos.utils.log import warning
|
||||||
from unilabos.utils.type_check import serialize_result_info
|
from unilabos.utils.type_check import serialize_result_info
|
||||||
from unilabos.config.config import BasicConfig
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from unilabos.app.ws_client import QueueItem
|
from unilabos.app.ws_client import QueueItem
|
||||||
@@ -64,8 +63,7 @@ class DeviceActionStatus:
|
|||||||
|
|
||||||
class TestResourceReturn(TypedDict):
|
class TestResourceReturn(TypedDict):
|
||||||
resources: List[List[ResourceDict]]
|
resources: List[List[ResourceDict]]
|
||||||
devices: List[Dict[str, Any]]
|
devices: List[DeviceSlot]
|
||||||
unilabos_samples: List[LabSample]
|
|
||||||
|
|
||||||
|
|
||||||
class TestLatencyReturn(TypedDict):
|
class TestLatencyReturn(TypedDict):
|
||||||
@@ -250,7 +248,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self,
|
self,
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name="host_node",
|
|
||||||
device_uuid=host_node_dict["uuid"],
|
device_uuid=host_node_dict["uuid"],
|
||||||
status_types={},
|
status_types={},
|
||||||
action_value_mappings=lab_registry.device_type_registry["host_node"]["class"]["action_value_mappings"],
|
action_value_mappings=lab_registry.device_type_registry["host_node"]["class"]["action_value_mappings"],
|
||||||
@@ -305,8 +302,7 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
} # 用来存储多个ActionClient实例
|
} # 用来存储多个ActionClient实例
|
||||||
self._action_value_mappings: Dict[str, Dict] = (
|
self._action_value_mappings: Dict[str, Dict] = (
|
||||||
{}
|
{}
|
||||||
) # device_id -> action_value_mappings(本地+远程设备统一存储)
|
) # 用来存储多个ActionClient的type, goal, feedback, result的变量名映射关系
|
||||||
self._slave_registry_configs: Dict[str, Dict] = {} # registry_name -> registry_config(含action_value_mappings)
|
|
||||||
self._goals: Dict[str, Any] = {} # 用来存储多个目标的状态
|
self._goals: Dict[str, Any] = {} # 用来存储多个目标的状态
|
||||||
self._online_devices: Set[str] = {f"{self.namespace}/{device_id}"} # 用于跟踪在线设备
|
self._online_devices: Set[str] = {f"{self.namespace}/{device_id}"} # 用于跟踪在线设备
|
||||||
self._last_discovery_time = 0.0 # 上次设备发现的时间
|
self._last_discovery_time = 0.0 # 上次设备发现的时间
|
||||||
@@ -640,8 +636,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
self.device_machine_names[device_id] = "本地"
|
self.device_machine_names[device_id] = "本地"
|
||||||
self.devices_instances[device_id] = d
|
self.devices_instances[device_id] = d
|
||||||
# noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
self._action_value_mappings[device_id] = d._ros_node._action_value_mappings
|
|
||||||
# noinspection PyProtectedMember
|
|
||||||
for action_name, action_value_mapping in d._ros_node._action_value_mappings.items():
|
for action_name, action_value_mapping in d._ros_node._action_value_mappings.items():
|
||||||
if action_name.startswith("auto-") or str(action_value_mapping.get("type", "")).startswith(
|
if action_name.startswith("auto-") or str(action_value_mapping.get("type", "")).startswith(
|
||||||
"UniLabJsonCommand"
|
"UniLabJsonCommand"
|
||||||
@@ -778,17 +772,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
u = uuid.UUID(item.job_id)
|
u = uuid.UUID(item.job_id)
|
||||||
device_id = item.device_id
|
device_id = item.device_id
|
||||||
action_name = item.action_name
|
action_name = item.action_name
|
||||||
|
|
||||||
if BasicConfig.test_mode:
|
|
||||||
action_id = f"/devices/{device_id}/{action_name}"
|
|
||||||
self.lab_logger().info(
|
|
||||||
f"[TEST MODE] 模拟执行: {action_id} (job={item.job_id[:8]}), 参数: {str(action_kwargs)[:500]}"
|
|
||||||
)
|
|
||||||
# 根据注册表 handles 构建模拟返回值
|
|
||||||
mock_return = self._build_test_mode_return(device_id, action_name, action_kwargs)
|
|
||||||
self._handle_test_mode_result(item, action_id, mock_return)
|
|
||||||
return
|
|
||||||
|
|
||||||
if action_type.startswith("UniLabJsonCommand"):
|
if action_type.startswith("UniLabJsonCommand"):
|
||||||
if action_name.startswith("auto-"):
|
if action_name.startswith("auto-"):
|
||||||
action_name = action_name[5:]
|
action_name = action_name[5:]
|
||||||
@@ -826,51 +809,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
)
|
)
|
||||||
future.add_done_callback(lambda f: self.goal_response_callback(item, action_id, f))
|
future.add_done_callback(lambda f: self.goal_response_callback(item, action_id, f))
|
||||||
|
|
||||||
def _build_test_mode_return(
|
|
||||||
self, device_id: str, action_name: str, action_kwargs: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
根据注册表 handles 的 output 定义构建测试模式的模拟返回值
|
|
||||||
|
|
||||||
根据 data_key 中 @flatten 的层数决定嵌套数组层数,叶子值为空字典。
|
|
||||||
例如: "vessel" → {}, "plate.@flatten" → [{}], "a.@flatten.@flatten" → [[{}]]
|
|
||||||
"""
|
|
||||||
mock_return: Dict[str, Any] = {"test_mode": True, "action_name": action_name}
|
|
||||||
action_mappings = self._action_value_mappings.get(device_id, {})
|
|
||||||
action_mapping = action_mappings.get(action_name, {})
|
|
||||||
handles = action_mapping.get("handles", {})
|
|
||||||
if isinstance(handles, dict):
|
|
||||||
for output_handle in handles.get("output", []):
|
|
||||||
data_key = output_handle.get("data_key", "")
|
|
||||||
handler_key = output_handle.get("handler_key", "")
|
|
||||||
# 根据 @flatten 层数构建嵌套数组,叶子为空字典
|
|
||||||
flatten_count = data_key.count("@flatten")
|
|
||||||
value: Any = {}
|
|
||||||
for _ in range(flatten_count):
|
|
||||||
value = [value]
|
|
||||||
mock_return[handler_key] = value
|
|
||||||
return mock_return
|
|
||||||
|
|
||||||
def _handle_test_mode_result(
|
|
||||||
self, item: "QueueItem", action_id: str, mock_return: Dict[str, Any]
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
测试模式下直接构建结果并走正常的结果回调流程(跳过 ROS)
|
|
||||||
"""
|
|
||||||
job_id = item.job_id
|
|
||||||
status = "success"
|
|
||||||
return_info = serialize_result_info("", True, mock_return)
|
|
||||||
|
|
||||||
self.lab_logger().info(f"[TEST MODE] Result for {action_id} ({job_id[:8]}): {status}")
|
|
||||||
|
|
||||||
from unilabos.app.web.controller import store_job_result
|
|
||||||
store_job_result(job_id, status, return_info, mock_return)
|
|
||||||
|
|
||||||
# 发布状态到桥接器
|
|
||||||
for bridge in self.bridges:
|
|
||||||
if hasattr(bridge, "publish_job_status"):
|
|
||||||
bridge.publish_job_status(mock_return, item, status, return_info)
|
|
||||||
|
|
||||||
def goal_response_callback(self, item: "QueueItem", action_id: str, future) -> None:
|
def goal_response_callback(self, item: "QueueItem", action_id: str, future) -> None:
|
||||||
"""目标响应回调"""
|
"""目标响应回调"""
|
||||||
goal_handle = future.result()
|
goal_handle = future.result()
|
||||||
@@ -1230,10 +1168,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
def _node_info_update_callback(self, request, response):
|
def _node_info_update_callback(self, request, response):
|
||||||
"""
|
"""
|
||||||
更新节点信息回调
|
更新节点信息回调
|
||||||
|
|
||||||
处理两种消息:
|
|
||||||
1. 首次上报(main_slave_run): 带 devices_config + registry_config,存储 action_value_mappings
|
|
||||||
2. 设备重注册(SYNC_SLAVE_NODE_INFO): 带 edge_device_id + registry_name,用 registry_name 索引已存储的 mappings
|
|
||||||
"""
|
"""
|
||||||
self.lab_logger().trace(f"[Host Node] Node info update request received: {request}")
|
self.lab_logger().trace(f"[Host Node] Node info update request received: {request}")
|
||||||
try:
|
try:
|
||||||
@@ -1245,48 +1179,12 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
info = info["SYNC_SLAVE_NODE_INFO"]
|
info = info["SYNC_SLAVE_NODE_INFO"]
|
||||||
machine_name = info["machine_name"]
|
machine_name = info["machine_name"]
|
||||||
edge_device_id = info["edge_device_id"]
|
edge_device_id = info["edge_device_id"]
|
||||||
registry_name = info.get("registry_name", "")
|
|
||||||
self.device_machine_names[edge_device_id] = machine_name
|
self.device_machine_names[edge_device_id] = machine_name
|
||||||
|
|
||||||
# 用 registry_name 索引已存储的 registry_config,获取 action_value_mappings
|
|
||||||
if registry_name and registry_name in self._slave_registry_configs:
|
|
||||||
action_mappings = self._slave_registry_configs[registry_name].get(
|
|
||||||
"class", {}
|
|
||||||
).get("action_value_mappings", {})
|
|
||||||
if action_mappings:
|
|
||||||
self._action_value_mappings[edge_device_id] = action_mappings
|
|
||||||
self.lab_logger().info(
|
|
||||||
f"[Host Node] Loaded {len(action_mappings)} action mappings "
|
|
||||||
f"for remote device {edge_device_id} (registry: {registry_name})"
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
devices_config = info.pop("devices_config")
|
devices_config = info.pop("devices_config")
|
||||||
registry_config = info.pop("registry_config")
|
registry_config = info.pop("registry_config")
|
||||||
if registry_config:
|
if registry_config:
|
||||||
http_client.resource_registry({"resources": registry_config})
|
http_client.resource_registry({"resources": registry_config})
|
||||||
|
|
||||||
# 存储 slave 的 registry_config,用于后续 SYNC_SLAVE_NODE_INFO 索引
|
|
||||||
for reg_name, reg_data in registry_config.items():
|
|
||||||
if isinstance(reg_data, dict) and "class" in reg_data:
|
|
||||||
self._slave_registry_configs[reg_name] = reg_data
|
|
||||||
|
|
||||||
# 解析 devices_config,建立 device_id -> action_value_mappings 映射
|
|
||||||
if devices_config:
|
|
||||||
for device_tree in devices_config:
|
|
||||||
for device_dict in device_tree:
|
|
||||||
device_id = device_dict.get("id", "")
|
|
||||||
class_name = device_dict.get("class", "")
|
|
||||||
if device_id and class_name and class_name in self._slave_registry_configs:
|
|
||||||
action_mappings = self._slave_registry_configs[class_name].get(
|
|
||||||
"class", {}
|
|
||||||
).get("action_value_mappings", {})
|
|
||||||
if action_mappings:
|
|
||||||
self._action_value_mappings[device_id] = action_mappings
|
|
||||||
self.lab_logger().info(
|
|
||||||
f"[Host Node] Stored {len(action_mappings)} action mappings "
|
|
||||||
f"for remote device {device_id} (class: {class_name})"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.lab_logger().debug(f"[Host Node] Node info update: {info}")
|
self.lab_logger().debug(f"[Host Node] Node info update: {info}")
|
||||||
response.response = "OK"
|
response.response = "OK"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -1583,7 +1481,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
|
|
||||||
def test_resource(
|
def test_resource(
|
||||||
self,
|
self,
|
||||||
sample_uuids: SampleUUIDsType,
|
|
||||||
resource: ResourceSlot = None,
|
resource: ResourceSlot = None,
|
||||||
resources: List[ResourceSlot] = None,
|
resources: List[ResourceSlot] = None,
|
||||||
device: DeviceSlot = None,
|
device: DeviceSlot = None,
|
||||||
@@ -1598,7 +1495,6 @@ class HostNode(BaseROS2DeviceNode):
|
|||||||
return {
|
return {
|
||||||
"resources": ResourceTreeSet.from_plr_resources([resource, *resources], known_newly_created=True).dump(),
|
"resources": ResourceTreeSet.from_plr_resources([resource, *resources], known_newly_created=True).dump(),
|
||||||
"devices": [device, *devices],
|
"devices": [device, *devices],
|
||||||
"unilabos_samples": [LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for sample_uuid, content in sample_uuids.items()]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def handle_pong_response(self, pong_data: dict):
|
def handle_pong_response(self, pong_data: dict):
|
||||||
|
|||||||
@@ -7,11 +7,10 @@ from rclpy.callback_groups import ReentrantCallbackGroup
|
|||||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
||||||
|
|
||||||
class JointRepublisher(BaseROS2DeviceNode):
|
class JointRepublisher(BaseROS2DeviceNode):
|
||||||
def __init__(self,device_id, registry_name, resource_tracker, **kwargs):
|
def __init__(self,device_id,resource_tracker, **kwargs):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=registry_name,
|
|
||||||
status_types={},
|
status_types={},
|
||||||
action_value_mappings={},
|
action_value_mappings={},
|
||||||
hardware_interface={},
|
hardware_interface={},
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ from unilabos.resources.graphio import initialize_resources
|
|||||||
from unilabos.registry.registry import lab_registry
|
from unilabos.registry.registry import lab_registry
|
||||||
|
|
||||||
class ResourceMeshManager(BaseROS2DeviceNode):
|
class ResourceMeshManager(BaseROS2DeviceNode):
|
||||||
def __init__(self, resource_model: dict, resource_config: list,resource_tracker, device_id: str = "resource_mesh_manager", registry_name: str = "", rate=50, **kwargs):
|
def __init__(self, resource_model: dict, resource_config: list,resource_tracker, device_id: str = "resource_mesh_manager", rate=50, **kwargs):
|
||||||
"""初始化资源网格管理器节点
|
"""初始化资源网格管理器节点
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -37,7 +37,6 @@ class ResourceMeshManager(BaseROS2DeviceNode):
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=registry_name,
|
|
||||||
status_types={},
|
status_types={},
|
||||||
action_value_mappings={},
|
action_value_mappings={},
|
||||||
hardware_interface={},
|
hardware_interface={},
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, DeviceNodeRe
|
|||||||
|
|
||||||
|
|
||||||
class ROS2SerialNode(BaseROS2DeviceNode):
|
class ROS2SerialNode(BaseROS2DeviceNode):
|
||||||
def __init__(self, device_id, registry_name, port: str, baudrate: int = 9600, resource_tracker: DeviceNodeResourceTracker=None):
|
def __init__(self, device_id, port: str, baudrate: int = 9600, resource_tracker: DeviceNodeResourceTracker=None):
|
||||||
# 保存属性,以便在调用父类初始化前使用
|
# 保存属性,以便在调用父类初始化前使用
|
||||||
self.port = port
|
self.port = port
|
||||||
self.baudrate = baudrate
|
self.baudrate = baudrate
|
||||||
@@ -28,7 +28,6 @@ class ROS2SerialNode(BaseROS2DeviceNode):
|
|||||||
BaseROS2DeviceNode.__init__(
|
BaseROS2DeviceNode.__init__(
|
||||||
self,
|
self,
|
||||||
driver_instance=self,
|
driver_instance=self,
|
||||||
registry_name=registry_name,
|
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
status_types={},
|
status_types={},
|
||||||
action_value_mappings={},
|
action_value_mappings={},
|
||||||
|
|||||||
@@ -47,7 +47,6 @@ class ROS2WorkstationNode(BaseROS2DeviceNode):
|
|||||||
*,
|
*,
|
||||||
driver_instance: "WorkstationBase",
|
driver_instance: "WorkstationBase",
|
||||||
device_id: str,
|
device_id: str,
|
||||||
registry_name: str,
|
|
||||||
device_uuid: str,
|
device_uuid: str,
|
||||||
status_types: Dict[str, Any],
|
status_types: Dict[str, Any],
|
||||||
action_value_mappings: Dict[str, Any],
|
action_value_mappings: Dict[str, Any],
|
||||||
@@ -63,7 +62,6 @@ class ROS2WorkstationNode(BaseROS2DeviceNode):
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
driver_instance=driver_instance,
|
driver_instance=driver_instance,
|
||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
registry_name=registry_name,
|
|
||||||
device_uuid=device_uuid,
|
device_uuid=device_uuid,
|
||||||
status_types=status_types,
|
status_types=status_types,
|
||||||
action_value_mappings={**action_value_mappings, **self.protocol_action_mappings},
|
action_value_mappings={**action_value_mappings, **self.protocol_action_mappings},
|
||||||
|
|||||||
@@ -339,8 +339,13 @@
|
|||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
|
"max_volume": 500.0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container"
|
"category": "container",
|
||||||
|
"max_temp": 200.0,
|
||||||
|
"min_temp": -20.0,
|
||||||
|
"has_stirrer": true,
|
||||||
|
"has_heater": true
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"liquids": [],
|
"liquids": [],
|
||||||
@@ -764,7 +769,9 @@
|
|||||||
"size_y": 250,
|
"size_y": 250,
|
||||||
"size_z": 0,
|
"size_z": 0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container"
|
"category": "container",
|
||||||
|
"reagent": "sodium_chloride",
|
||||||
|
"physical_state": "solid"
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"current_mass": 500.0,
|
"current_mass": 500.0,
|
||||||
@@ -785,11 +792,14 @@
|
|||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
|
"volume": 500.0,
|
||||||
"size_x": 600,
|
"size_x": 600,
|
||||||
"size_y": 250,
|
"size_y": 250,
|
||||||
"size_z": 0,
|
"size_z": 0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container"
|
"category": "container",
|
||||||
|
"reagent": "sodium_carbonate",
|
||||||
|
"physical_state": "solid"
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"current_mass": 500.0,
|
"current_mass": 500.0,
|
||||||
@@ -810,11 +820,14 @@
|
|||||||
"z": 0
|
"z": 0
|
||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
|
"volume": 500.0,
|
||||||
"size_x": 650,
|
"size_x": 650,
|
||||||
"size_y": 250,
|
"size_y": 250,
|
||||||
"size_z": 0,
|
"size_z": 0,
|
||||||
"type": "RegularContainer",
|
"type": "RegularContainer",
|
||||||
"category": "container"
|
"category": "container",
|
||||||
|
"reagent": "magnesium_chloride",
|
||||||
|
"physical_state": "solid"
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"current_mass": 500.0,
|
"current_mass": 500.0,
|
||||||
|
|||||||
@@ -184,51 +184,6 @@ def get_all_subscriptions(instance) -> list:
|
|||||||
return subscriptions
|
return subscriptions
|
||||||
|
|
||||||
|
|
||||||
def always_free(func: F) -> F:
|
|
||||||
"""
|
|
||||||
标记动作为永久闲置(不受busy队列限制)的装饰器
|
|
||||||
|
|
||||||
被此装饰器标记的 action 方法,在执行时不会受到设备级别的排队限制,
|
|
||||||
任何时候请求都可以立即执行。适用于查询类、状态读取类等轻量级操作。
|
|
||||||
|
|
||||||
Example:
|
|
||||||
class MyDriver:
|
|
||||||
@always_free
|
|
||||||
def query_status(self, param: str):
|
|
||||||
# 这个动作可以随时执行,不需要排队
|
|
||||||
return self._status
|
|
||||||
|
|
||||||
def transfer(self, volume: float):
|
|
||||||
# 这个动作会按正常排队逻辑执行
|
|
||||||
pass
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- 可以与其他装饰器组合使用,@always_free 应放在最外层
|
|
||||||
- 仅影响 WebSocket 调度层的 busy/free 判断,不影响 ROS2 层
|
|
||||||
"""
|
|
||||||
|
|
||||||
@wraps(func)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
|
|
||||||
wrapper._is_always_free = True # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
return wrapper # type: ignore[return-value]
|
|
||||||
|
|
||||||
|
|
||||||
def is_always_free(func) -> bool:
|
|
||||||
"""
|
|
||||||
检查函数是否被标记为永久闲置
|
|
||||||
|
|
||||||
Args:
|
|
||||||
func: 被检查的函数
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
如果函数被 @always_free 装饰则返回 True,否则返回 False
|
|
||||||
"""
|
|
||||||
return getattr(func, "_is_always_free", False)
|
|
||||||
|
|
||||||
|
|
||||||
def not_action(func: F) -> F:
|
def not_action(func: F) -> F:
|
||||||
"""
|
"""
|
||||||
标记方法为非动作的装饰器
|
标记方法为非动作的装饰器
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ from ast import Constant
|
|||||||
|
|
||||||
from unilabos.resources.resource_tracker import PARAM_SAMPLE_UUIDS
|
from unilabos.resources.resource_tracker import PARAM_SAMPLE_UUIDS
|
||||||
from unilabos.utils import logger
|
from unilabos.utils import logger
|
||||||
from unilabos.utils.decorator import is_not_action, is_always_free
|
from unilabos.utils.decorator import is_not_action
|
||||||
|
|
||||||
|
|
||||||
class ImportManager:
|
class ImportManager:
|
||||||
@@ -282,9 +282,6 @@ class ImportManager:
|
|||||||
continue
|
continue
|
||||||
# 其他非_开头的方法归类为action
|
# 其他非_开头的方法归类为action
|
||||||
method_info = self._analyze_method_signature(method)
|
method_info = self._analyze_method_signature(method)
|
||||||
# 检查是否被 @always_free 装饰器标记
|
|
||||||
if is_always_free(method):
|
|
||||||
method_info["always_free"] = True
|
|
||||||
result["action_methods"][name] = method_info
|
result["action_methods"][name] = method_info
|
||||||
|
|
||||||
return result
|
return result
|
||||||
@@ -342,9 +339,6 @@ class ImportManager:
|
|||||||
if self._is_not_action_method(node):
|
if self._is_not_action_method(node):
|
||||||
continue
|
continue
|
||||||
# 其他非_开头的方法归类为action
|
# 其他非_开头的方法归类为action
|
||||||
# 检查是否被 @always_free 装饰器标记
|
|
||||||
if self._is_always_free_method(node):
|
|
||||||
method_info["always_free"] = True
|
|
||||||
result["action_methods"][method_name] = method_info
|
result["action_methods"][method_name] = method_info
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -480,13 +474,6 @@ class ImportManager:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _is_always_free_method(self, node: ast.FunctionDef) -> bool:
|
|
||||||
"""检查是否是@always_free装饰的方法"""
|
|
||||||
for decorator in node.decorator_list:
|
|
||||||
if isinstance(decorator, ast.Name) and decorator.id == "always_free":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _get_property_name_from_setter(self, node: ast.FunctionDef) -> str:
|
def _get_property_name_from_setter(self, node: ast.FunctionDef) -> str:
|
||||||
"""从setter装饰器中获取属性名"""
|
"""从setter装饰器中获取属性名"""
|
||||||
for decorator in node.decorator_list:
|
for decorator in node.decorator_list:
|
||||||
|
|||||||
@@ -193,7 +193,6 @@ def configure_logger(loglevel=None, working_dir=None):
|
|||||||
root_logger.addHandler(console_handler)
|
root_logger.addHandler(console_handler)
|
||||||
|
|
||||||
# 如果指定了工作目录,添加文件处理器
|
# 如果指定了工作目录,添加文件处理器
|
||||||
log_filepath = None
|
|
||||||
if working_dir is not None:
|
if working_dir is not None:
|
||||||
logs_dir = os.path.join(working_dir, "logs")
|
logs_dir = os.path.join(working_dir, "logs")
|
||||||
os.makedirs(logs_dir, exist_ok=True)
|
os.makedirs(logs_dir, exist_ok=True)
|
||||||
@@ -214,7 +213,6 @@ def configure_logger(loglevel=None, working_dir=None):
|
|||||||
|
|
||||||
logging.getLogger("asyncio").setLevel(logging.INFO)
|
logging.getLogger("asyncio").setLevel(logging.INFO)
|
||||||
logging.getLogger("urllib3").setLevel(logging.INFO)
|
logging.getLogger("urllib3").setLevel(logging.INFO)
|
||||||
return log_filepath
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
0
unilabos/workflow/__init__.py
Normal file
0
unilabos/workflow/__init__.py
Normal file
@@ -362,16 +362,14 @@ def build_protocol_graph(
|
|||||||
protocol_steps: List[Dict[str, Any]],
|
protocol_steps: List[Dict[str, Any]],
|
||||||
workstation_name: str,
|
workstation_name: str,
|
||||||
action_resource_mapping: Optional[Dict[str, str]] = None,
|
action_resource_mapping: Optional[Dict[str, str]] = None,
|
||||||
labware_defs: Optional[List[Dict[str, Any]]] = None,
|
|
||||||
) -> WorkflowGraph:
|
) -> WorkflowGraph:
|
||||||
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
|
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
labware_info: reagent 信息字典,格式为 {name: {slot, well}, ...},用于 set_liquid 和 well 查找
|
labware_info: labware 信息字典,格式为 {name: {slot, well, labware, ...}, ...}
|
||||||
protocol_steps: 协议步骤列表
|
protocol_steps: 协议步骤列表
|
||||||
workstation_name: 工作站名称
|
workstation_name: 工作站名称
|
||||||
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
action_resource_mapping: action 到 resource_name 的映射字典,可选
|
||||||
labware_defs: labware 定义列表,格式为 [{"name": "...", "slot": "1", "type": "lab_xxx"}, ...]
|
|
||||||
"""
|
"""
|
||||||
G = WorkflowGraph()
|
G = WorkflowGraph()
|
||||||
resource_last_writer = {} # reagent_name -> "node_id:port"
|
resource_last_writer = {} # reagent_name -> "node_id:port"
|
||||||
@@ -379,7 +377,18 @@ def build_protocol_graph(
|
|||||||
|
|
||||||
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
|
protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
|
||||||
|
|
||||||
# ==================== 第一步:按 slot 创建 create_resource 节点 ====================
|
# ==================== 第一步:按 slot 去重创建 create_resource 节点 ====================
|
||||||
|
# 收集所有唯一的 slot
|
||||||
|
slots_info = {} # slot -> {labware, res_id}
|
||||||
|
for labware_id, item in labware_info.items():
|
||||||
|
slot = str(item.get("slot", ""))
|
||||||
|
if slot and slot not in slots_info:
|
||||||
|
res_id = f"plate_slot_{slot}"
|
||||||
|
slots_info[slot] = {
|
||||||
|
"labware": item.get("labware", ""),
|
||||||
|
"res_id": res_id,
|
||||||
|
}
|
||||||
|
|
||||||
# 创建 Group 节点,包含所有 create_resource 节点
|
# 创建 Group 节点,包含所有 create_resource 节点
|
||||||
group_node_id = str(uuid.uuid4())
|
group_node_id = str(uuid.uuid4())
|
||||||
G.add_node(
|
G.add_node(
|
||||||
@@ -395,35 +404,29 @@ def build_protocol_graph(
|
|||||||
param=None,
|
param=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 直接使用 JSON 中的 labware 定义,每个 slot 一条记录,type 即 class_name
|
# 为每个唯一的 slot 创建 create_resource 节点
|
||||||
res_index = 0
|
res_index = 0
|
||||||
for lw in (labware_defs or []):
|
for slot, info in slots_info.items():
|
||||||
slot = str(lw.get("slot", ""))
|
node_id = str(uuid.uuid4())
|
||||||
if not slot or slot in slot_to_create_resource:
|
res_id = info["res_id"]
|
||||||
continue # 跳过空 slot 或已处理的 slot
|
|
||||||
|
|
||||||
lw_name = lw.get("name", f"slot {slot}")
|
|
||||||
lw_type = lw.get("type", CREATE_RESOURCE_DEFAULTS["class_name"])
|
|
||||||
res_id = f"plate_slot_{slot}"
|
|
||||||
|
|
||||||
res_index += 1
|
res_index += 1
|
||||||
node_id = str(uuid.uuid4())
|
|
||||||
G.add_node(
|
G.add_node(
|
||||||
node_id,
|
node_id,
|
||||||
template_name="create_resource",
|
template_name="create_resource",
|
||||||
resource_name="host_node",
|
resource_name="host_node",
|
||||||
name=lw_name,
|
name=f"Plate {res_index}",
|
||||||
description=f"Create {lw_name}",
|
description=f"Create plate on slot {slot}",
|
||||||
lab_node_type="Labware",
|
lab_node_type="Labware",
|
||||||
footer="create_resource-host_node",
|
footer="create_resource-host_node",
|
||||||
device_name=DEVICE_NAME_HOST,
|
device_name=DEVICE_NAME_HOST,
|
||||||
type=NODE_TYPE_DEFAULT,
|
type=NODE_TYPE_DEFAULT,
|
||||||
parent_uuid=group_node_id,
|
parent_uuid=group_node_id, # 指向 Group 节点
|
||||||
minimized=True,
|
minimized=True, # 折叠显示
|
||||||
param={
|
param={
|
||||||
"res_id": res_id,
|
"res_id": res_id,
|
||||||
"device_id": CREATE_RESOURCE_DEFAULTS["device_id"],
|
"device_id": CREATE_RESOURCE_DEFAULTS["device_id"],
|
||||||
"class_name": lw_type,
|
"class_name": CREATE_RESOURCE_DEFAULTS["class_name"],
|
||||||
"parent": CREATE_RESOURCE_DEFAULTS["parent_template"].format(slot=slot),
|
"parent": CREATE_RESOURCE_DEFAULTS["parent_template"].format(slot=slot),
|
||||||
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
|
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
|
||||||
"slot_on_deck": slot,
|
"slot_on_deck": slot,
|
||||||
@@ -431,6 +434,8 @@ def build_protocol_graph(
|
|||||||
)
|
)
|
||||||
slot_to_create_resource[slot] = node_id
|
slot_to_create_resource[slot] = node_id
|
||||||
|
|
||||||
|
# create_resource 之间不需要 ready 连接
|
||||||
|
|
||||||
# ==================== 第二步:为每个 reagent 创建 set_liquid_from_plate 节点 ====================
|
# ==================== 第二步:为每个 reagent 创建 set_liquid_from_plate 节点 ====================
|
||||||
# 创建 Group 节点,包含所有 set_liquid_from_plate 节点
|
# 创建 Group 节点,包含所有 set_liquid_from_plate 节点
|
||||||
set_liquid_group_id = str(uuid.uuid4())
|
set_liquid_group_id = str(uuid.uuid4())
|
||||||
|
|||||||
@@ -1,20 +1,16 @@
|
|||||||
"""
|
"""
|
||||||
JSON 工作流转换模块
|
JSON 工作流转换模块
|
||||||
|
|
||||||
将 workflow/reagent/labware 格式的 JSON 转换为统一工作流格式。
|
将 workflow/reagent 格式的 JSON 转换为统一工作流格式。
|
||||||
|
|
||||||
输入格式:
|
输入格式:
|
||||||
{
|
{
|
||||||
"labware": [
|
|
||||||
{"name": "...", "slot": "1", "type": "lab_xxx"},
|
|
||||||
...
|
|
||||||
],
|
|
||||||
"workflow": [
|
"workflow": [
|
||||||
{"action": "...", "action_args": {...}},
|
{"action": "...", "action_args": {...}},
|
||||||
...
|
...
|
||||||
],
|
],
|
||||||
"reagent": {
|
"reagent": {
|
||||||
"reagent_name": {"slot": int, "well": [...]},
|
"reagent_name": {"slot": int, "well": [...], "labware": "..."},
|
||||||
...
|
...
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -249,18 +245,18 @@ def convert_from_json(
|
|||||||
if "workflow" not in json_data or "reagent" not in json_data:
|
if "workflow" not in json_data or "reagent" not in json_data:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"不支持的 JSON 格式。请使用标准格式:\n"
|
"不支持的 JSON 格式。请使用标准格式:\n"
|
||||||
'{"labware": [...], "workflow": [...], "reagent": {...}}'
|
'{"workflow": [{"action": "...", "action_args": {...}}, ...], '
|
||||||
|
'"reagent": {"name": {"slot": int, "well": [...], "labware": "..."}, ...}}'
|
||||||
)
|
)
|
||||||
|
|
||||||
# 提取数据
|
# 提取数据
|
||||||
workflow = json_data["workflow"]
|
workflow = json_data["workflow"]
|
||||||
reagent = json_data["reagent"]
|
reagent = json_data["reagent"]
|
||||||
labware_defs = json_data.get("labware", []) # 新的 labware 定义列表
|
|
||||||
|
|
||||||
# 规范化步骤数据
|
# 规范化步骤数据
|
||||||
protocol_steps = normalize_workflow_steps(workflow)
|
protocol_steps = normalize_workflow_steps(workflow)
|
||||||
|
|
||||||
# reagent 已经是字典格式,用于 set_liquid 和 well 数量查找
|
# reagent 已经是字典格式,直接使用
|
||||||
labware_info = reagent
|
labware_info = reagent
|
||||||
|
|
||||||
# 构建工作流图
|
# 构建工作流图
|
||||||
@@ -269,7 +265,6 @@ def convert_from_json(
|
|||||||
protocol_steps=protocol_steps,
|
protocol_steps=protocol_steps,
|
||||||
workstation_name=workstation_name,
|
workstation_name=workstation_name,
|
||||||
action_resource_mapping=ACTION_RESOURCE_MAPPING,
|
action_resource_mapping=ACTION_RESOURCE_MAPPING,
|
||||||
labware_defs=labware_defs,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# 校验句柄配置
|
# 校验句柄配置
|
||||||
|
|||||||
241
unilabos/workflow/from_python_script.py
Normal file
241
unilabos/workflow/from_python_script.py
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
import ast
|
||||||
|
import json
|
||||||
|
from typing import Dict, List, Any, Tuple, Optional
|
||||||
|
|
||||||
|
from .common import WorkflowGraph, RegistryAdapter
|
||||||
|
|
||||||
|
Json = Dict[str, Any]
|
||||||
|
|
||||||
|
# ---------------- Converter ----------------
|
||||||
|
|
||||||
|
class DeviceMethodConverter:
|
||||||
|
"""
|
||||||
|
- 字段统一:resource_name(原 device_class)、template_name(原 action_key)
|
||||||
|
- params 单层;inputs 使用 'params.' 前缀
|
||||||
|
- SimpleGraph.add_workflow_node 负责变量连线与边
|
||||||
|
"""
|
||||||
|
def __init__(self, device_registry: Optional[Dict[str, Any]] = None):
|
||||||
|
self.graph = WorkflowGraph()
|
||||||
|
self.variable_sources: Dict[str, Dict[str, Any]] = {} # var -> {node_id, output_name}
|
||||||
|
self.instance_to_resource: Dict[str, Optional[str]] = {} # 实例名 -> resource_name
|
||||||
|
self.node_id_counter: int = 0
|
||||||
|
self.registry = RegistryAdapter(device_registry or {})
|
||||||
|
|
||||||
|
# ---- helpers ----
|
||||||
|
def _new_node_id(self) -> int:
|
||||||
|
nid = self.node_id_counter
|
||||||
|
self.node_id_counter += 1
|
||||||
|
return nid
|
||||||
|
|
||||||
|
def _assign_targets(self, targets) -> List[str]:
|
||||||
|
names: List[str] = []
|
||||||
|
import ast
|
||||||
|
if isinstance(targets, ast.Tuple):
|
||||||
|
for elt in targets.elts:
|
||||||
|
if isinstance(elt, ast.Name):
|
||||||
|
names.append(elt.id)
|
||||||
|
elif isinstance(targets, ast.Name):
|
||||||
|
names.append(targets.id)
|
||||||
|
return names
|
||||||
|
|
||||||
|
def _extract_device_instantiation(self, node) -> Optional[Tuple[str, str]]:
|
||||||
|
import ast
|
||||||
|
if not isinstance(node.value, ast.Call):
|
||||||
|
return None
|
||||||
|
callee = node.value.func
|
||||||
|
if isinstance(callee, ast.Name):
|
||||||
|
class_name = callee.id
|
||||||
|
elif isinstance(callee, ast.Attribute) and isinstance(callee.value, ast.Name):
|
||||||
|
class_name = callee.attr
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
if isinstance(node.targets[0], ast.Name):
|
||||||
|
instance = node.targets[0].id
|
||||||
|
return instance, class_name
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _extract_call(self, call) -> Tuple[str, str, Dict[str, Any], str]:
|
||||||
|
import ast
|
||||||
|
owner_name, method_name, call_kind = "", "", "func"
|
||||||
|
if isinstance(call.func, ast.Attribute):
|
||||||
|
method_name = call.func.attr
|
||||||
|
if isinstance(call.func.value, ast.Name):
|
||||||
|
owner_name = call.func.value.id
|
||||||
|
call_kind = "instance" if owner_name in self.instance_to_resource else "class_or_module"
|
||||||
|
elif isinstance(call.func.value, ast.Attribute) and isinstance(call.func.value.value, ast.Name):
|
||||||
|
owner_name = call.func.value.attr
|
||||||
|
call_kind = "class_or_module"
|
||||||
|
elif isinstance(call.func, ast.Name):
|
||||||
|
method_name = call.func.id
|
||||||
|
call_kind = "func"
|
||||||
|
|
||||||
|
def pack(node):
|
||||||
|
if isinstance(node, ast.Name):
|
||||||
|
return {"type": "variable", "value": node.id}
|
||||||
|
if isinstance(node, ast.Constant):
|
||||||
|
return {"type": "constant", "value": node.value}
|
||||||
|
if isinstance(node, ast.Dict):
|
||||||
|
return {"type": "dict", "value": self._parse_dict(node)}
|
||||||
|
if isinstance(node, ast.List):
|
||||||
|
return {"type": "list", "value": self._parse_list(node)}
|
||||||
|
return {"type": "raw", "value": ast.unparse(node) if hasattr(ast, "unparse") else str(node)}
|
||||||
|
|
||||||
|
args: Dict[str, Any] = {}
|
||||||
|
pos: List[Any] = []
|
||||||
|
for a in call.args:
|
||||||
|
pos.append(pack(a))
|
||||||
|
for kw in call.keywords:
|
||||||
|
args[kw.arg] = pack(kw.value)
|
||||||
|
if pos:
|
||||||
|
args["_positional"] = pos
|
||||||
|
return owner_name, method_name, args, call_kind
|
||||||
|
|
||||||
|
def _parse_dict(self, node) -> Dict[str, Any]:
|
||||||
|
import ast
|
||||||
|
out: Dict[str, Any] = {}
|
||||||
|
for k, v in zip(node.keys, node.values):
|
||||||
|
if isinstance(k, ast.Constant):
|
||||||
|
key = str(k.value)
|
||||||
|
if isinstance(v, ast.Name):
|
||||||
|
out[key] = f"var:{v.id}"
|
||||||
|
elif isinstance(v, ast.Constant):
|
||||||
|
out[key] = v.value
|
||||||
|
elif isinstance(v, ast.Dict):
|
||||||
|
out[key] = self._parse_dict(v)
|
||||||
|
elif isinstance(v, ast.List):
|
||||||
|
out[key] = self._parse_list(v)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _parse_list(self, node) -> List[Any]:
|
||||||
|
import ast
|
||||||
|
out: List[Any] = []
|
||||||
|
for elt in node.elts:
|
||||||
|
if isinstance(elt, ast.Name):
|
||||||
|
out.append(f"var:{elt.id}")
|
||||||
|
elif isinstance(elt, ast.Constant):
|
||||||
|
out.append(elt.value)
|
||||||
|
elif isinstance(elt, ast.Dict):
|
||||||
|
out.append(self._parse_dict(elt))
|
||||||
|
elif isinstance(elt, ast.List):
|
||||||
|
out.append(self._parse_list(elt))
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _normalize_var_tokens(self, x: Any) -> Any:
|
||||||
|
if isinstance(x, str) and x.startswith("var:"):
|
||||||
|
return {"__var__": x[4:]}
|
||||||
|
if isinstance(x, list):
|
||||||
|
return [self._normalize_var_tokens(i) for i in x]
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return {k: self._normalize_var_tokens(v) for k, v in x.items()}
|
||||||
|
return x
|
||||||
|
|
||||||
|
def _make_params_payload(self, resource_name: Optional[str], template_name: str, call_args: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
input_keys = self.registry.get_action_input_keys(resource_name, template_name) if resource_name else []
|
||||||
|
defaults = self.registry.get_action_goal_default(resource_name, template_name) if resource_name else {}
|
||||||
|
params: Dict[str, Any] = dict(defaults)
|
||||||
|
|
||||||
|
def unpack(p):
|
||||||
|
t, v = p.get("type"), p.get("value")
|
||||||
|
if t == "variable":
|
||||||
|
return {"__var__": v}
|
||||||
|
if t == "dict":
|
||||||
|
return self._normalize_var_tokens(v)
|
||||||
|
if t == "list":
|
||||||
|
return self._normalize_var_tokens(v)
|
||||||
|
return v
|
||||||
|
|
||||||
|
for k, p in call_args.items():
|
||||||
|
if k == "_positional":
|
||||||
|
continue
|
||||||
|
params[k] = unpack(p)
|
||||||
|
|
||||||
|
pos = call_args.get("_positional", [])
|
||||||
|
if pos:
|
||||||
|
if input_keys:
|
||||||
|
for i, p in enumerate(pos):
|
||||||
|
if i >= len(input_keys):
|
||||||
|
break
|
||||||
|
name = input_keys[i]
|
||||||
|
if name in params:
|
||||||
|
continue
|
||||||
|
params[name] = unpack(p)
|
||||||
|
else:
|
||||||
|
for i, p in enumerate(pos):
|
||||||
|
params[f"arg_{i}"] = unpack(p)
|
||||||
|
return params
|
||||||
|
|
||||||
|
# ---- handlers ----
|
||||||
|
def _on_assign(self, stmt):
|
||||||
|
import ast
|
||||||
|
inst = self._extract_device_instantiation(stmt)
|
||||||
|
if inst:
|
||||||
|
instance, code_class = inst
|
||||||
|
resource_name = self.registry.resolve_resource_by_classname(code_class)
|
||||||
|
self.instance_to_resource[instance] = resource_name
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(stmt.value, ast.Call):
|
||||||
|
owner, method, call_args, kind = self._extract_call(stmt.value)
|
||||||
|
if kind == "instance":
|
||||||
|
device_key = owner
|
||||||
|
resource_name = self.instance_to_resource.get(owner)
|
||||||
|
else:
|
||||||
|
device_key = owner
|
||||||
|
resource_name = self.registry.resolve_resource_by_classname(owner)
|
||||||
|
|
||||||
|
module = self.registry.get_device_module(resource_name)
|
||||||
|
params = self._make_params_payload(resource_name, method, call_args)
|
||||||
|
|
||||||
|
nid = self._new_node_id()
|
||||||
|
self.graph.add_workflow_node(
|
||||||
|
nid,
|
||||||
|
device_key=device_key,
|
||||||
|
resource_name=resource_name, # ✅
|
||||||
|
module=module,
|
||||||
|
template_name=method, # ✅
|
||||||
|
params=params,
|
||||||
|
variable_sources=self.variable_sources,
|
||||||
|
add_ready_if_no_vars=True,
|
||||||
|
prev_node_id=(nid - 1) if nid > 0 else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
out_vars = self._assign_targets(stmt.targets[0])
|
||||||
|
for var in out_vars:
|
||||||
|
self.variable_sources[var] = {"node_id": nid, "output_name": "result"}
|
||||||
|
|
||||||
|
def _on_expr(self, stmt):
|
||||||
|
import ast
|
||||||
|
if not isinstance(stmt.value, ast.Call):
|
||||||
|
return
|
||||||
|
owner, method, call_args, kind = self._extract_call(stmt.value)
|
||||||
|
if kind == "instance":
|
||||||
|
device_key = owner
|
||||||
|
resource_name = self.instance_to_resource.get(owner)
|
||||||
|
else:
|
||||||
|
device_key = owner
|
||||||
|
resource_name = self.registry.resolve_resource_by_classname(owner)
|
||||||
|
|
||||||
|
module = self.registry.get_device_module(resource_name)
|
||||||
|
params = self._make_params_payload(resource_name, method, call_args)
|
||||||
|
|
||||||
|
nid = self._new_node_id()
|
||||||
|
self.graph.add_workflow_node(
|
||||||
|
nid,
|
||||||
|
device_key=device_key,
|
||||||
|
resource_name=resource_name, # ✅
|
||||||
|
module=module,
|
||||||
|
template_name=method, # ✅
|
||||||
|
params=params,
|
||||||
|
variable_sources=self.variable_sources,
|
||||||
|
add_ready_if_no_vars=True,
|
||||||
|
prev_node_id=(nid - 1) if nid > 0 else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
def convert(self, python_code: str):
|
||||||
|
tree = ast.parse(python_code)
|
||||||
|
for stmt in tree.body:
|
||||||
|
if isinstance(stmt, ast.Assign):
|
||||||
|
self._on_assign(stmt)
|
||||||
|
elif isinstance(stmt, ast.Expr):
|
||||||
|
self._on_expr(stmt)
|
||||||
|
return self
|
||||||
131
unilabos/workflow/from_xdl.py
Normal file
131
unilabos/workflow/from_xdl.py
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
from typing import List, Any, Dict
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_type(val: str) -> Any:
|
||||||
|
"""将字符串值转换为适当的数据类型"""
|
||||||
|
if val == "True":
|
||||||
|
return True
|
||||||
|
if val == "False":
|
||||||
|
return False
|
||||||
|
if val == "?":
|
||||||
|
return None
|
||||||
|
if val.endswith(" g"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
if val.endswith("mg"):
|
||||||
|
return float(val.split("mg")[0])
|
||||||
|
elif val.endswith("mmol"):
|
||||||
|
return float(val.split("mmol")[0]) / 1000
|
||||||
|
elif val.endswith("mol"):
|
||||||
|
return float(val.split("mol")[0])
|
||||||
|
elif val.endswith("ml"):
|
||||||
|
return float(val.split("ml")[0])
|
||||||
|
elif val.endswith("RPM"):
|
||||||
|
return float(val.split("RPM")[0])
|
||||||
|
elif val.endswith(" °C"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
elif val.endswith(" %"):
|
||||||
|
return float(val.split(" ")[0])
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_xdl_procedure(procedure_elem: ET.Element) -> List[ET.Element]:
|
||||||
|
"""展平嵌套的XDL程序结构"""
|
||||||
|
flattened_operations = []
|
||||||
|
TEMP_UNSUPPORTED_PROTOCOL = ["Purge", "Wait", "Stir", "ResetHandling"]
|
||||||
|
|
||||||
|
def extract_operations(element: ET.Element):
|
||||||
|
if element.tag not in ["Prep", "Reaction", "Workup", "Purification", "Procedure"]:
|
||||||
|
if element.tag not in TEMP_UNSUPPORTED_PROTOCOL:
|
||||||
|
flattened_operations.append(element)
|
||||||
|
|
||||||
|
for child in element:
|
||||||
|
extract_operations(child)
|
||||||
|
|
||||||
|
for child in procedure_elem:
|
||||||
|
extract_operations(child)
|
||||||
|
|
||||||
|
return flattened_operations
|
||||||
|
|
||||||
|
|
||||||
|
def parse_xdl_content(xdl_content: str) -> tuple:
|
||||||
|
"""解析XDL内容"""
|
||||||
|
try:
|
||||||
|
xdl_content_cleaned = "".join(c for c in xdl_content if c.isprintable())
|
||||||
|
root = ET.fromstring(xdl_content_cleaned)
|
||||||
|
|
||||||
|
synthesis_elem = root.find("Synthesis")
|
||||||
|
if synthesis_elem is None:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
# 解析硬件组件
|
||||||
|
hardware_elem = synthesis_elem.find("Hardware")
|
||||||
|
hardware = []
|
||||||
|
if hardware_elem is not None:
|
||||||
|
hardware = [{"id": c.get("id"), "type": c.get("type")} for c in hardware_elem.findall("Component")]
|
||||||
|
|
||||||
|
# 解析试剂
|
||||||
|
reagents_elem = synthesis_elem.find("Reagents")
|
||||||
|
reagents = []
|
||||||
|
if reagents_elem is not None:
|
||||||
|
reagents = [{"name": r.get("name"), "role": r.get("role", "")} for r in reagents_elem.findall("Reagent")]
|
||||||
|
|
||||||
|
# 解析程序
|
||||||
|
procedure_elem = synthesis_elem.find("Procedure")
|
||||||
|
if procedure_elem is None:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
flattened_operations = flatten_xdl_procedure(procedure_elem)
|
||||||
|
return hardware, reagents, flattened_operations
|
||||||
|
|
||||||
|
except ET.ParseError as e:
|
||||||
|
raise ValueError(f"Invalid XDL format: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def convert_xdl_to_dict(xdl_content: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
将XDL XML格式转换为标准的字典格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xdl_content: XDL XML内容
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
转换结果,包含步骤和器材信息
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
hardware, reagents, flattened_operations = parse_xdl_content(xdl_content)
|
||||||
|
if hardware is None:
|
||||||
|
return {"error": "Failed to parse XDL content", "success": False}
|
||||||
|
|
||||||
|
# 将XDL元素转换为字典格式
|
||||||
|
steps_data = []
|
||||||
|
for elem in flattened_operations:
|
||||||
|
# 转换参数类型
|
||||||
|
parameters = {}
|
||||||
|
for key, val in elem.attrib.items():
|
||||||
|
converted_val = convert_to_type(val)
|
||||||
|
if converted_val is not None:
|
||||||
|
parameters[key] = converted_val
|
||||||
|
|
||||||
|
step_dict = {
|
||||||
|
"operation": elem.tag,
|
||||||
|
"parameters": parameters,
|
||||||
|
"description": elem.get("purpose", f"Operation: {elem.tag}"),
|
||||||
|
}
|
||||||
|
steps_data.append(step_dict)
|
||||||
|
|
||||||
|
# 合并硬件和试剂为统一的labware_info格式
|
||||||
|
labware_data = []
|
||||||
|
labware_data.extend({"id": hw["id"], "type": "hardware", **hw} for hw in hardware)
|
||||||
|
labware_data.extend({"name": reagent["name"], "type": "reagent", **reagent} for reagent in reagents)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"steps": steps_data,
|
||||||
|
"labware": labware_data,
|
||||||
|
"message": f"Successfully converted XDL to dict format. Found {len(steps_data)} steps and {len(labware_data)} labware items.",
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"XDL conversion failed: {str(e)}"
|
||||||
|
return {"error": error_msg, "success": False}
|
||||||
@@ -41,7 +41,6 @@ def upload_workflow(
|
|||||||
workflow_name: Optional[str] = None,
|
workflow_name: Optional[str] = None,
|
||||||
tags: Optional[List[str]] = None,
|
tags: Optional[List[str]] = None,
|
||||||
published: bool = False,
|
published: bool = False,
|
||||||
description: str = "",
|
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
上传工作流到服务器
|
上传工作流到服务器
|
||||||
@@ -57,7 +56,6 @@ def upload_workflow(
|
|||||||
workflow_name: 工作流名称,如果不提供则从文件中读取或使用文件名
|
workflow_name: 工作流名称,如果不提供则从文件中读取或使用文件名
|
||||||
tags: 工作流标签列表,默认为空列表
|
tags: 工作流标签列表,默认为空列表
|
||||||
published: 是否发布工作流,默认为False
|
published: 是否发布工作流,默认为False
|
||||||
description: 工作流描述,发布时使用
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict: API响应数据
|
Dict: API响应数据
|
||||||
@@ -77,14 +75,6 @@ def upload_workflow(
|
|||||||
print_status(f"工作流文件JSON解析失败: {e}", "error")
|
print_status(f"工作流文件JSON解析失败: {e}", "error")
|
||||||
return {"code": -1, "message": f"JSON解析失败: {e}"}
|
return {"code": -1, "message": f"JSON解析失败: {e}"}
|
||||||
|
|
||||||
# 从 JSON 文件中提取 description 和 tags(作为 fallback)
|
|
||||||
if not description and "description" in workflow_data:
|
|
||||||
description = workflow_data["description"]
|
|
||||||
print_status(f"从文件中读取 description", "info")
|
|
||||||
if not tags and "tags" in workflow_data:
|
|
||||||
tags = workflow_data["tags"]
|
|
||||||
print_status(f"从文件中读取 tags: {tags}", "info")
|
|
||||||
|
|
||||||
# 自动检测并转换格式
|
# 自动检测并转换格式
|
||||||
if not _is_node_link_format(workflow_data):
|
if not _is_node_link_format(workflow_data):
|
||||||
try:
|
try:
|
||||||
@@ -106,7 +96,6 @@ def upload_workflow(
|
|||||||
print_status(f" - 节点数量: {len(nodes)}", "info")
|
print_status(f" - 节点数量: {len(nodes)}", "info")
|
||||||
print_status(f" - 边数量: {len(edges)}", "info")
|
print_status(f" - 边数量: {len(edges)}", "info")
|
||||||
print_status(f" - 标签: {tags or []}", "info")
|
print_status(f" - 标签: {tags or []}", "info")
|
||||||
print_status(f" - 描述: {description[:50]}{'...' if len(description) > 50 else ''}", "info")
|
|
||||||
print_status(f" - 发布状态: {published}", "info")
|
print_status(f" - 发布状态: {published}", "info")
|
||||||
|
|
||||||
# 调用 http_client 上传
|
# 调用 http_client 上传
|
||||||
@@ -118,7 +107,6 @@ def upload_workflow(
|
|||||||
edges=edges,
|
edges=edges,
|
||||||
tags=tags,
|
tags=tags,
|
||||||
published=published,
|
published=published,
|
||||||
description=description,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if result.get("code") == 0:
|
if result.get("code") == 0:
|
||||||
@@ -143,9 +131,8 @@ def handle_workflow_upload_command(args_dict: Dict[str, Any]) -> None:
|
|||||||
workflow_name = args_dict.get("workflow_name")
|
workflow_name = args_dict.get("workflow_name")
|
||||||
tags = args_dict.get("tags", [])
|
tags = args_dict.get("tags", [])
|
||||||
published = args_dict.get("published", False)
|
published = args_dict.get("published", False)
|
||||||
description = args_dict.get("description", "")
|
|
||||||
|
|
||||||
if workflow_file:
|
if workflow_file:
|
||||||
upload_workflow(workflow_file, workflow_name, tags, published, description)
|
upload_workflow(workflow_file, workflow_name, tags, published)
|
||||||
else:
|
else:
|
||||||
print_status("未指定工作流文件路径,请使用 -f/--workflow_file 参数", "error")
|
print_status("未指定工作流文件路径,请使用 -f/--workflow_file 参数", "error")
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
|
||||||
<package format="3">
|
<package format="3">
|
||||||
<name>unilabos_msgs</name>
|
<name>unilabos_msgs</name>
|
||||||
<version>0.10.18</version>
|
<version>0.10.17</version>
|
||||||
<description>ROS2 Messages package for unilabos devices</description>
|
<description>ROS2 Messages package for unilabos devices</description>
|
||||||
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
<maintainer email="changjh@pku.edu.cn">Junhan Chang</maintainer>
|
||||||
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
<maintainer email="18435084+Xuwznln@users.noreply.github.com">Xuwznln</maintainer>
|
||||||
|
|||||||
Reference in New Issue
Block a user