Compare commits
137 Commits
219a480c08
...
refactor/B
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
edc1fe853b | ||
|
|
80272d691d | ||
|
|
0ab4027de7 | ||
|
|
5f36b6c04b | ||
|
|
d75c7f123b | ||
|
|
ed80d786c1 | ||
|
|
9de473374f | ||
|
|
dbf5df6e4d | ||
|
|
f10c0343ce | ||
|
|
8b6553bdd9 | ||
|
|
e7a4afd6b5 | ||
|
|
f18f6d82fc | ||
|
|
b7c726635c | ||
|
|
c809912fd3 | ||
|
|
d956b27e9f | ||
|
|
ff1e21fcd8 | ||
|
|
b9d9666003 | ||
|
|
d776550a4b | ||
|
|
3d8123849a | ||
|
|
d2f204c5b0 | ||
|
|
d8922884b1 | ||
|
|
427afe83d4 | ||
|
|
23c2e3b2f7 | ||
|
|
59c26265e9 | ||
|
|
4c2adea55a | ||
|
|
0f6264503a | ||
|
|
2c554182d3 | ||
|
|
6d319d91ff | ||
|
|
3155b2f97e | ||
|
|
e5e30a1c7d | ||
|
|
4e82f62327 | ||
|
|
95d3456214 | ||
|
|
38bf95b13c | ||
|
|
f2c0bec02c | ||
|
|
e0394bf414 | ||
|
|
975a56415a | ||
|
|
cadbe87e3f | ||
|
|
b993c1f590 | ||
|
|
e0fae94c10 | ||
|
|
b5cd181ac1 | ||
|
|
5c047beb83 | ||
|
|
b40c087143 | ||
|
|
7f1cc3b2a5 | ||
|
|
3f160c2049 | ||
|
|
a54e7c0f23 | ||
|
|
e5015cd5e0 | ||
|
|
514373c164 | ||
|
|
fcea02585a | ||
|
|
07cf690897 | ||
|
|
cfea27460a | ||
|
|
b7d3e980a9 | ||
|
|
f9ed6cb3fb | ||
|
|
699a0b3ce7 | ||
|
|
cf3a20ae79 | ||
|
|
cdf0652020 | ||
|
|
60073ff139 | ||
|
|
a9053b822f | ||
|
|
d238c2ab8b | ||
|
|
9a7d5c7c82 | ||
|
|
4f7d431c0b | ||
|
|
341a1b537c | ||
|
|
957fb41a6f | ||
|
|
26271bcab8 | ||
|
|
84a8223173 | ||
|
|
e8d1263488 | ||
|
|
380b39100d | ||
|
|
56eb7e2ab4 | ||
|
|
23ce145f74 | ||
|
|
b0da149252 | ||
|
|
07c9e6f0fe | ||
|
|
ccec6b9d77 | ||
|
|
dadfdf3d8d | ||
|
|
400bb073d4 | ||
|
|
3f63c36505 | ||
|
|
0ae94f7f3c | ||
|
|
7eacae6442 | ||
|
|
f7d2cb4b9e | ||
|
|
bf980d7248 | ||
|
|
27c0544bfc | ||
|
|
d48e77c9ae | ||
|
|
e70a5bea66 | ||
|
|
467d75dc03 | ||
|
|
9feeb0c430 | ||
|
|
b2f26ffb28 | ||
|
|
4b0d1553e9 | ||
|
|
67ddee2ab2 | ||
|
|
1bcdad9448 | ||
|
|
039c96fe01 | ||
|
|
e1555d10a0 | ||
|
|
f2a96b2041 | ||
|
|
329349639e | ||
|
|
e4cc111523 | ||
|
|
d245ceef1b | ||
|
|
6db7fbd721 | ||
|
|
ab05b858e1 | ||
|
|
43e4c71a8e | ||
|
|
2cf58ca452 | ||
|
|
fd73bb7dcb | ||
|
|
a02cecfd18 | ||
|
|
d6accc3f1c | ||
|
|
39dc443399 | ||
|
|
37b1fca962 | ||
|
|
216f19fb62 | ||
|
|
ec7ca6a1fe | ||
|
|
4c8022ee95 | ||
|
|
ad21644db0 | ||
|
|
9dfd58e9af | ||
|
|
31c9f9a172 | ||
|
|
02cd8de4c5 | ||
|
|
a66603ec1c | ||
|
|
ec015e16cd | ||
|
|
965bf36e8d | ||
|
|
aacf3497e0 | ||
|
|
657f952e7a | ||
|
|
0165590290 | ||
|
|
daea1ab54d | ||
|
|
93cb307396 | ||
|
|
1c312772ae | ||
|
|
bad1db5094 | ||
|
|
f26eb69eca | ||
|
|
12c0770c92 | ||
|
|
3d2d428a96 | ||
|
|
78bf57f590 | ||
|
|
e227cddab3 | ||
|
|
f2b993643f | ||
|
|
2e14bf197c | ||
|
|
66c18c080a | ||
|
|
a1c34f138e | ||
|
|
75bb5ec553 | ||
|
|
bb95c89829 | ||
|
|
394c140830 | ||
|
|
e6d8d41183 | ||
|
|
847a300af3 | ||
|
|
a201d7c307 | ||
|
|
3433766bc5 | ||
|
|
7e9e93b29c | ||
|
|
9e1e6da505 |
62
.conda/base/recipe.yaml
Normal file
@@ -0,0 +1,62 @@
|
||||
# unilabos: Production package (depends on unilabos-env + pip unilabos)
|
||||
# For production deployment
|
||||
|
||||
package:
|
||||
name: unilabos
|
||||
version: 0.10.19
|
||||
|
||||
source:
|
||||
path: ../../unilabos
|
||||
target_directory: unilabos
|
||||
|
||||
build:
|
||||
python:
|
||||
entry_points:
|
||||
- unilab = unilabos.app.main:main
|
||||
script:
|
||||
- set PIP_NO_INDEX=
|
||||
- if: win
|
||||
then:
|
||||
- copy %RECIPE_DIR%\..\..\MANIFEST.in %SRC_DIR%
|
||||
- copy %RECIPE_DIR%\..\..\setup.cfg %SRC_DIR%
|
||||
- copy %RECIPE_DIR%\..\..\setup.py %SRC_DIR%
|
||||
- pip install %SRC_DIR%
|
||||
- if: unix
|
||||
then:
|
||||
- cp $RECIPE_DIR/../../MANIFEST.in $SRC_DIR
|
||||
- cp $RECIPE_DIR/../../setup.cfg $SRC_DIR
|
||||
- cp $RECIPE_DIR/../../setup.py $SRC_DIR
|
||||
- pip install $SRC_DIR
|
||||
|
||||
requirements:
|
||||
host:
|
||||
- python ==3.11.14
|
||||
- pip
|
||||
- setuptools
|
||||
- zstd
|
||||
- zstandard
|
||||
run:
|
||||
- zstd
|
||||
- zstandard
|
||||
- networkx
|
||||
- typing_extensions
|
||||
- websockets
|
||||
- pint
|
||||
- fastapi
|
||||
- jinja2
|
||||
- requests
|
||||
- uvicorn
|
||||
- if: not osx
|
||||
then:
|
||||
- opcua
|
||||
- pyserial
|
||||
- pandas
|
||||
- pymodbus
|
||||
- matplotlib
|
||||
- pylibftdi
|
||||
- uni-lab::unilabos-env ==0.10.19
|
||||
|
||||
about:
|
||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||
license: GPL-3.0-only
|
||||
description: "UniLabOS - Production package with minimal ROS2 dependencies"
|
||||
39
.conda/environment/recipe.yaml
Normal file
@@ -0,0 +1,39 @@
|
||||
# unilabos-env: conda environment dependencies (ROS2 + conda packages)
|
||||
|
||||
package:
|
||||
name: unilabos-env
|
||||
version: 0.10.19
|
||||
|
||||
build:
|
||||
noarch: generic
|
||||
|
||||
requirements:
|
||||
run:
|
||||
# Python
|
||||
- zstd
|
||||
- zstandard
|
||||
- conda-forge::python ==3.11.14
|
||||
- conda-forge::opencv
|
||||
# ROS2 dependencies (from ci-check.yml)
|
||||
- robostack-staging::ros-humble-ros-core
|
||||
- robostack-staging::ros-humble-action-msgs
|
||||
- robostack-staging::ros-humble-std-msgs
|
||||
- robostack-staging::ros-humble-geometry-msgs
|
||||
- robostack-staging::ros-humble-control-msgs
|
||||
- robostack-staging::ros-humble-nav2-msgs
|
||||
- robostack-staging::ros-humble-cv-bridge
|
||||
- robostack-staging::ros-humble-vision-opencv
|
||||
- robostack-staging::ros-humble-tf-transformations
|
||||
- robostack-staging::ros-humble-moveit-msgs
|
||||
- robostack-staging::ros-humble-tf2-ros
|
||||
- robostack-staging::ros-humble-tf2-ros-py
|
||||
- conda-forge::transforms3d
|
||||
- conda-forge::uv
|
||||
|
||||
# UniLabOS custom messages
|
||||
- uni-lab::ros-humble-unilabos-msgs
|
||||
|
||||
about:
|
||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||
license: GPL-3.0-only
|
||||
description: "UniLabOS Environment - ROS2 and conda dependencies"
|
||||
42
.conda/full/recipe.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
# unilabos-full: Full package with all features
|
||||
# Depends on unilabos + complete ROS2 desktop + dev tools
|
||||
|
||||
package:
|
||||
name: unilabos-full
|
||||
version: 0.10.19
|
||||
|
||||
build:
|
||||
noarch: generic
|
||||
|
||||
requirements:
|
||||
run:
|
||||
# Base unilabos package (includes unilabos-env)
|
||||
- uni-lab::unilabos ==0.10.19
|
||||
# Documentation tools
|
||||
- sphinx
|
||||
- sphinx_rtd_theme
|
||||
# Web UI
|
||||
- gradio
|
||||
- flask
|
||||
# Interactive development
|
||||
- ipython
|
||||
- jupyter
|
||||
- jupyros
|
||||
- colcon-common-extensions
|
||||
# ROS2 full desktop (includes rviz2, gazebo, etc.)
|
||||
- robostack-staging::ros-humble-desktop-full
|
||||
# Navigation and motion control
|
||||
- ros-humble-navigation2
|
||||
- ros-humble-ros2-control
|
||||
- ros-humble-robot-state-publisher
|
||||
- ros-humble-joint-state-publisher
|
||||
# MoveIt motion planning
|
||||
- ros-humble-moveit
|
||||
- ros-humble-moveit-servo
|
||||
# Simulation
|
||||
- ros-humble-simulation
|
||||
|
||||
about:
|
||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||
license: GPL-3.0-only
|
||||
description: "UniLabOS Full - Complete package with ROS2 Desktop, MoveIt, Navigation2, Gazebo, Jupyter"
|
||||
@@ -1,92 +0,0 @@
|
||||
package:
|
||||
name: unilabos
|
||||
version: 0.10.12
|
||||
|
||||
source:
|
||||
path: ../unilabos
|
||||
target_directory: unilabos
|
||||
|
||||
build:
|
||||
python:
|
||||
entry_points:
|
||||
- unilab = unilabos.app.main:main
|
||||
script:
|
||||
- set PIP_NO_INDEX=
|
||||
- if: win
|
||||
then:
|
||||
- copy %RECIPE_DIR%\..\MANIFEST.in %SRC_DIR%
|
||||
- copy %RECIPE_DIR%\..\setup.cfg %SRC_DIR%
|
||||
- copy %RECIPE_DIR%\..\setup.py %SRC_DIR%
|
||||
- call %PYTHON% -m pip install %SRC_DIR%
|
||||
- if: unix
|
||||
then:
|
||||
- cp $RECIPE_DIR/../MANIFEST.in $SRC_DIR
|
||||
- cp $RECIPE_DIR/../setup.cfg $SRC_DIR
|
||||
- cp $RECIPE_DIR/../setup.py $SRC_DIR
|
||||
- $PYTHON -m pip install $SRC_DIR
|
||||
|
||||
|
||||
requirements:
|
||||
host:
|
||||
- python ==3.11.11
|
||||
- pip
|
||||
- setuptools
|
||||
- zstd
|
||||
- zstandard
|
||||
run:
|
||||
- conda-forge::python ==3.11.11
|
||||
- compilers
|
||||
- cmake
|
||||
- zstd
|
||||
- zstandard
|
||||
- ninja
|
||||
- if: unix
|
||||
then:
|
||||
- make
|
||||
- sphinx
|
||||
- sphinx_rtd_theme
|
||||
- numpy
|
||||
- scipy
|
||||
- pandas
|
||||
- networkx
|
||||
- matplotlib
|
||||
- pint
|
||||
- pyserial
|
||||
- pyusb
|
||||
- pylibftdi
|
||||
- pymodbus
|
||||
- python-can
|
||||
- pyvisa
|
||||
- opencv
|
||||
- pydantic
|
||||
- fastapi
|
||||
- uvicorn
|
||||
- gradio
|
||||
- flask
|
||||
- websockets
|
||||
- ipython
|
||||
- jupyter
|
||||
- jupyros
|
||||
- colcon-common-extensions
|
||||
- robostack-staging::ros-humble-desktop-full
|
||||
- robostack-staging::ros-humble-control-msgs
|
||||
- robostack-staging::ros-humble-sensor-msgs
|
||||
- robostack-staging::ros-humble-trajectory-msgs
|
||||
- ros-humble-navigation2
|
||||
- ros-humble-ros2-control
|
||||
- ros-humble-robot-state-publisher
|
||||
- ros-humble-joint-state-publisher
|
||||
- ros-humble-rosbridge-server
|
||||
- ros-humble-cv-bridge
|
||||
- ros-humble-tf2
|
||||
- ros-humble-moveit
|
||||
- ros-humble-moveit-servo
|
||||
- ros-humble-simulation
|
||||
- ros-humble-tf-transformations
|
||||
- transforms3d
|
||||
- uni-lab::ros-humble-unilabos-msgs
|
||||
|
||||
about:
|
||||
repository: https://github.com/dptech-corp/Uni-Lab-OS
|
||||
license: GPL-3.0-only
|
||||
description: "Uni-Lab-OS"
|
||||
@@ -1,9 +0,0 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
REM upgrade pip
|
||||
"%PREFIX%\python.exe" -m pip install --upgrade pip
|
||||
|
||||
REM install extra deps
|
||||
"%PREFIX%\python.exe" -m pip install paho-mqtt opentrons_shared_data
|
||||
"%PREFIX%\python.exe" -m pip install git+https://github.com/Xuwznln/pylabrobot.git
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euxo pipefail
|
||||
|
||||
# make sure pip is available
|
||||
"$PREFIX/bin/python" -m pip install --upgrade pip
|
||||
|
||||
# install extra deps
|
||||
"$PREFIX/bin/python" -m pip install paho-mqtt opentrons_shared_data
|
||||
"$PREFIX/bin/python" -m pip install git+https://github.com/Xuwznln/pylabrobot.git
|
||||
160
.cursor/skills/add-device/SKILL.md
Normal file
@@ -0,0 +1,160 @@
|
||||
---
|
||||
name: add-device
|
||||
description: Guide for adding new devices to Uni-Lab-OS (接入新设备). Uses @device decorator + AST auto-scanning instead of manual YAML. Walks through device category, communication protocol, driver creation with decorators, and graph file setup. Use when the user wants to add/integrate a new device, create a device driver, write a device class, or mentions 接入设备/添加设备/设备驱动/物模型.
|
||||
---
|
||||
|
||||
# 添加新设备到 Uni-Lab-OS
|
||||
|
||||
**第一步:** 使用 Read 工具读取 `docs/ai_guides/add_device.md`,获取完整的设备接入指南。
|
||||
|
||||
该指南包含设备类别(物模型)列表、通信协议模板、常见错误检查清单等。搜索 `unilabos/devices/` 获取已有设备的实现参考。
|
||||
|
||||
---
|
||||
|
||||
## 装饰器参考
|
||||
|
||||
### @device — 设备类装饰器
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import device
|
||||
|
||||
# 单设备
|
||||
@device(
|
||||
id="my_device.vendor", # 注册表唯一标识(必填)
|
||||
category=["temperature"], # 分类标签列表(必填)
|
||||
description="设备描述", # 设备描述
|
||||
display_name="显示名称", # UI 显示名称(默认用 id)
|
||||
icon="DeviceIcon.webp", # 图标文件名
|
||||
version="1.0.0", # 版本号
|
||||
device_type="python", # "python" 或 "ros2"
|
||||
handles=[...], # 端口列表(InputHandle / OutputHandle)
|
||||
model={...}, # 3D 模型配置
|
||||
hardware_interface=HardwareInterface(...), # 硬件通信接口
|
||||
)
|
||||
|
||||
# 多设备(同一个类注册多个设备 ID,各自有不同的 handles 等配置)
|
||||
@device(
|
||||
ids=["pump.vendor.model_A", "pump.vendor.model_B"],
|
||||
id_meta={
|
||||
"pump.vendor.model_A": {"handles": [...], "description": "型号 A"},
|
||||
"pump.vendor.model_B": {"handles": [...], "description": "型号 B"},
|
||||
},
|
||||
category=["pump_and_valve"],
|
||||
)
|
||||
```
|
||||
|
||||
### @action — 动作方法装饰器
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import action
|
||||
|
||||
@action # 无参:注册为 UniLabJsonCommand 动作
|
||||
@action() # 同上
|
||||
@action(description="执行操作") # 带描述
|
||||
@action(
|
||||
action_type=HeatChill, # 指定 ROS Action 消息类型
|
||||
goal={"temperature": "temp"}, # Goal 字段映射
|
||||
feedback={}, # Feedback 字段映射
|
||||
result={}, # Result 字段映射
|
||||
handles=[...], # 动作级别端口
|
||||
goal_default={"temp": 25.0}, # Goal 默认值
|
||||
placeholder_keys={...}, # 参数占位符
|
||||
always_free=True, # 不受排队限制
|
||||
auto_prefix=True, # 强制使用 auto- 前缀
|
||||
parent=True, # 从父类 MRO 获取参数签名
|
||||
)
|
||||
```
|
||||
|
||||
**自动识别规则:**
|
||||
- 带 `@action` 的公开方法 → 注册为动作(方法名即动作名)
|
||||
- **不带 `@action` 的公开方法** → 自动注册为 `auto-{方法名}` 动作
|
||||
- `_` 开头的方法 → 不扫描
|
||||
- `@not_action` 标记的方法 → 排除
|
||||
|
||||
### @topic_config — 状态属性配置
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import topic_config
|
||||
|
||||
@property
|
||||
@topic_config(
|
||||
period=5.0, # 发布周期(秒),默认 5.0
|
||||
print_publish=False, # 是否打印发布日志
|
||||
qos=10, # QoS 深度,默认 10
|
||||
name="custom_name", # 自定义发布名称(默认用属性名)
|
||||
)
|
||||
def temperature(self) -> float:
|
||||
return self.data.get("temperature", 0.0)
|
||||
```
|
||||
|
||||
### 辅助装饰器
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import not_action, always_free
|
||||
|
||||
@not_action # 标记为非动作(post_init、辅助方法等)
|
||||
@always_free # 标记为不受排队限制(查询类操作)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 设备模板
|
||||
|
||||
```python
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
|
||||
from unilabos.registry.decorators import device, action, topic_config, not_action
|
||||
|
||||
@device(id="my_device", category=["my_category"], description="设备描述")
|
||||
class MyDevice:
|
||||
_ros_node: BaseROS2DeviceNode
|
||||
|
||||
def __init__(self, device_id: Optional[str] = None, config: Optional[Dict[str, Any]] = None, **kwargs):
|
||||
self.device_id = device_id or "my_device"
|
||||
self.config = config or {}
|
||||
self.logger = logging.getLogger(f"MyDevice.{self.device_id}")
|
||||
self.data: Dict[str, Any] = {"status": "Idle"}
|
||||
|
||||
@not_action
|
||||
def post_init(self, ros_node: BaseROS2DeviceNode) -> None:
|
||||
self._ros_node = ros_node
|
||||
|
||||
@action
|
||||
async def initialize(self) -> bool:
|
||||
self.data["status"] = "Ready"
|
||||
return True
|
||||
|
||||
@action
|
||||
async def cleanup(self) -> bool:
|
||||
self.data["status"] = "Offline"
|
||||
return True
|
||||
|
||||
@action(description="执行操作")
|
||||
def my_action(self, param: float = 0.0, name: str = "") -> Dict[str, Any]:
|
||||
"""带 @action 装饰器 → 注册为 'my_action' 动作"""
|
||||
return {"success": True}
|
||||
|
||||
def get_info(self) -> Dict[str, Any]:
|
||||
"""无 @action → 自动注册为 'auto-get_info' 动作"""
|
||||
return {"device_id": self.device_id}
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def status(self) -> str:
|
||||
return self.data.get("status", "Idle")
|
||||
|
||||
@property
|
||||
@topic_config(period=2.0)
|
||||
def temperature(self) -> float:
|
||||
return self.data.get("temperature", 0.0)
|
||||
```
|
||||
|
||||
### 要点
|
||||
|
||||
- `_ros_node: BaseROS2DeviceNode` 类型标注放在类体顶部
|
||||
- `__init__` 签名固定为 `(self, device_id=None, config=None, **kwargs)`
|
||||
- `post_init` 用 `@not_action` 标记,参数类型标注为 `BaseROS2DeviceNode`
|
||||
- 运行时状态存储在 `self.data` 字典中
|
||||
- 设备文件放在 `unilabos/devices/<category>/` 目录下
|
||||
351
.cursor/skills/add-resource/SKILL.md
Normal file
@@ -0,0 +1,351 @@
|
||||
---
|
||||
name: add-resource
|
||||
description: Guide for adding new resources (materials, bottles, carriers, decks, warehouses) to Uni-Lab-OS (添加新物料/资源). Uses @resource decorator for AST auto-scanning. Covers Bottle, Carrier, Deck, WareHouse definitions. Use when the user wants to add resources, define materials, create a deck layout, add bottles/carriers/plates, or mentions 物料/资源/resource/bottle/carrier/deck/plate/warehouse.
|
||||
---
|
||||
|
||||
# 添加新物料资源
|
||||
|
||||
Uni-Lab-OS 的资源体系基于 PyLabRobot,通过扩展实现 Bottle、Carrier、WareHouse、Deck 等实验室物料管理。使用 `@resource` 装饰器注册,AST 自动扫描生成注册表条目。
|
||||
|
||||
---
|
||||
|
||||
## 资源类型
|
||||
|
||||
| 类型 | 基类 | 用途 | 示例 |
|
||||
|------|------|------|------|
|
||||
| **Bottle** | `Well` (PyLabRobot) | 单个容器(瓶、小瓶、烧杯、反应器) | 试剂瓶、粉末瓶 |
|
||||
| **BottleCarrier** | `ItemizedCarrier` | 多槽位载架(放多个 Bottle) | 6 位试剂架、枪头盒 |
|
||||
| **WareHouse** | `ItemizedCarrier` | 堆栈/仓库(放多个 Carrier) | 4x4 堆栈 |
|
||||
| **Deck** | `Deck` (PyLabRobot) | 工作站台面(放多个 WareHouse) | 反应站 Deck |
|
||||
|
||||
**层级关系:** `Deck` → `WareHouse` → `BottleCarrier` → `Bottle`
|
||||
|
||||
WareHouse 本质上和 Site 是同一概念 — 都是定义一组固定的放置位(slot),只不过 WareHouse 多嵌套了一层 Deck。两者都需要开发者根据实际物理尺寸自行计算各 slot 的偏移坐标。
|
||||
|
||||
---
|
||||
|
||||
## @resource 装饰器
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import resource
|
||||
|
||||
@resource(
|
||||
id="my_resource_id", # 注册表唯一标识(必填)
|
||||
category=["bottles"], # 分类标签列表(必填)
|
||||
description="资源描述",
|
||||
icon="", # 图标
|
||||
version="1.0.0",
|
||||
handles=[...], # 端口列表(InputHandle / OutputHandle)
|
||||
model={...}, # 3D 模型配置
|
||||
class_type="pylabrobot", # "python" / "pylabrobot" / "unilabos"
|
||||
)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 创建规范
|
||||
|
||||
### 命名规则
|
||||
|
||||
1. **`name` 参数作为前缀**:所有工厂函数必须接受 `name: str` 参数,创建子物料时以 `name` 作为前缀,确保实例名在运行时全局唯一
|
||||
2. **Bottle 命名约定**:试剂瓶-Bottle,烧杯-Beaker,烧瓶-Flask,小瓶-Vial
|
||||
3. **函数名 = `@resource(id=...)`**:工厂函数名与注册表 id 保持一致
|
||||
|
||||
### 子物料命名示例
|
||||
|
||||
```python
|
||||
# Carrier 内部的 sites 用 name 前缀
|
||||
for k, v in sites.items():
|
||||
v.name = f"{name}_{v.name}" # "堆栈1左_A01", "堆栈1左_B02" ...
|
||||
|
||||
# Carrier 中放置 Bottle 时用 name 前缀
|
||||
carrier[0] = My_Reagent_Bottle(f"{name}_flask_1") # "堆栈1左_flask_1"
|
||||
carrier[i] = My_Solid_Vial(f"{name}_vial_{ordering[i]}") # "堆栈1左_vial_A1"
|
||||
|
||||
# create_homogeneous_resources 使用 name_prefix
|
||||
sites=create_homogeneous_resources(
|
||||
klass=ResourceHolder,
|
||||
locations=[...],
|
||||
name_prefix=name, # 自动生成 "{name}_0", "{name}_1" ...
|
||||
)
|
||||
|
||||
# Deck setup 中用仓库名称作为 name 传入
|
||||
self.warehouses = {
|
||||
"堆栈1左": my_warehouse_4x4("堆栈1左"), # WareHouse.name = "堆栈1左"
|
||||
"试剂堆栈": my_reagent_stack("试剂堆栈"), # WareHouse.name = "试剂堆栈"
|
||||
}
|
||||
```
|
||||
|
||||
### 其他规范
|
||||
|
||||
- **max_volume 单位为 μL**:500mL = 500000
|
||||
- **尺寸单位为 mm**:`diameter`, `height`, `size_x/y/z`, `dx/dy/dz`
|
||||
- **BottleCarrier 必须设置 `num_items_x/y/z`**:用于前端渲染布局
|
||||
- **Deck 的 `__init__` 必须接受 `setup=False`**:图文件中 `config.setup=true` 触发 `setup()`
|
||||
- **按项目分组文件**:同一工作站的资源放在 `unilabos/resources/<project>/` 下
|
||||
- **`__init__` 必须接受 `serialize()` 输出的所有字段**:`serialize()` 输出会作为 `config` 回传到 `__init__`,因此必须通过显式参数或 `**kwargs` 接受,否则反序列化会报错
|
||||
- **持久化运行时状态用 `serialize_state()`**:通过 `_unilabos_state` 字典存储可变信息(如物料内容、液体量),只存 JSON 可序列化的基本类型
|
||||
|
||||
---
|
||||
|
||||
## 资源模板
|
||||
|
||||
### Bottle
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import resource
|
||||
from unilabos.resources.itemized_carrier import Bottle
|
||||
|
||||
|
||||
@resource(id="My_Reagent_Bottle", category=["bottles"], description="我的试剂瓶")
|
||||
def My_Reagent_Bottle(
|
||||
name: str,
|
||||
diameter: float = 70.0,
|
||||
height: float = 120.0,
|
||||
max_volume: float = 500000.0,
|
||||
barcode: str = None,
|
||||
) -> Bottle:
|
||||
return Bottle(
|
||||
name=name,
|
||||
diameter=diameter,
|
||||
height=height,
|
||||
max_volume=max_volume,
|
||||
barcode=barcode,
|
||||
model="My_Reagent_Bottle",
|
||||
)
|
||||
```
|
||||
|
||||
**Bottle 参数:**
|
||||
- `name`: 实例名称(运行时唯一,由上层 Carrier 以前缀方式传入)
|
||||
- `diameter`: 瓶体直径 (mm)
|
||||
- `height`: 瓶体高度 (mm)
|
||||
- `max_volume`: 最大容积(**μL**,500mL = 500000)
|
||||
- `barcode`: 条形码(可选)
|
||||
|
||||
### BottleCarrier
|
||||
|
||||
```python
|
||||
from pylabrobot.resources import ResourceHolder
|
||||
from pylabrobot.resources.carrier import create_ordered_items_2d
|
||||
from unilabos.resources.itemized_carrier import BottleCarrier
|
||||
from unilabos.registry.decorators import resource
|
||||
|
||||
|
||||
@resource(id="My_6SlotCarrier", category=["bottle_carriers"], description="六槽位载架")
|
||||
def My_6SlotCarrier(name: str) -> BottleCarrier:
|
||||
sites = create_ordered_items_2d(
|
||||
klass=ResourceHolder,
|
||||
num_items_x=3, num_items_y=2,
|
||||
dx=10.0, dy=10.0, dz=5.0,
|
||||
item_dx=42.0, item_dy=35.0,
|
||||
size_x=20.0, size_y=20.0, size_z=50.0,
|
||||
)
|
||||
# 子 site 用 name 作为前缀
|
||||
for k, v in sites.items():
|
||||
v.name = f"{name}_{v.name}"
|
||||
|
||||
carrier = BottleCarrier(
|
||||
name=name, size_x=146.0, size_y=80.0, size_z=55.0,
|
||||
sites=sites, model="My_6SlotCarrier",
|
||||
)
|
||||
carrier.num_items_x = 3
|
||||
carrier.num_items_y = 2
|
||||
carrier.num_items_z = 1
|
||||
|
||||
# 放置 Bottle 时用 name 作为前缀
|
||||
ordering = ["A1", "B1", "A2", "B2", "A3", "B3"]
|
||||
for i in range(6):
|
||||
carrier[i] = My_Reagent_Bottle(f"{name}_vial_{ordering[i]}")
|
||||
return carrier
|
||||
```
|
||||
|
||||
### WareHouse / Deck 放置位
|
||||
|
||||
WareHouse 和 Site 本质上是同一概念:都是定义一组固定放置位(slot),根据物理尺寸自行批量计算偏移坐标。WareHouse 只是多嵌套了一层 Deck 而已。推荐开发者直接根据实物测量数据计算各 slot 偏移量。
|
||||
|
||||
#### WareHouse(使用 warehouse_factory)
|
||||
|
||||
```python
|
||||
from unilabos.resources.warehouse import warehouse_factory
|
||||
from unilabos.registry.decorators import resource
|
||||
|
||||
|
||||
@resource(id="my_warehouse_4x4", category=["warehouse"], description="4x4 堆栈仓库")
|
||||
def my_warehouse_4x4(name: str) -> "WareHouse":
|
||||
return warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=4, num_items_y=4, num_items_z=1,
|
||||
dx=10.0, dy=10.0, dz=10.0, # 第一个 slot 的起始偏移
|
||||
item_dx=147.0, item_dy=106.0, item_dz=130.0, # slot 间距
|
||||
resource_size_x=127.0, resource_size_y=85.0, resource_size_z=100.0, # slot 尺寸
|
||||
model="my_warehouse_4x4",
|
||||
col_offset=0, # 列标签起始偏移(0 → A01, 4 → A05)
|
||||
layout="row-major", # "row-major" 行优先 / "col-major" 列优先 / "vertical-col-major" 竖向
|
||||
)
|
||||
```
|
||||
|
||||
`warehouse_factory` 参数说明:
|
||||
- `dx/dy/dz`:第一个 slot 相对 WareHouse 原点的偏移(mm)
|
||||
- `item_dx/item_dy/item_dz`:相邻 slot 间距(mm),需根据实际物理间距测量
|
||||
- `resource_size_x/y/z`:每个 slot 的可放置区域尺寸
|
||||
- `layout`:影响 slot 标签和坐标映射
|
||||
- `"row-major"`:A01,A02,...,B01,B02,...(行优先,适合横向排列)
|
||||
- `"col-major"`:A01,B01,...,A02,B02,...(列优先)
|
||||
- `"vertical-col-major"`:竖向排列,y 坐标反向
|
||||
|
||||
#### Deck 组装 WareHouse
|
||||
|
||||
Deck 通过 `setup()` 将多个 WareHouse 放置到指定坐标:
|
||||
|
||||
```python
|
||||
from pylabrobot.resources import Deck, Coordinate
|
||||
from unilabos.registry.decorators import resource
|
||||
|
||||
|
||||
@resource(id="MyStation_Deck", category=["deck"], description="我的工作站 Deck")
|
||||
class MyStation_Deck(Deck):
|
||||
def __init__(self, name="MyStation_Deck", size_x=2700.0, size_y=1080.0, size_z=1500.0,
|
||||
category="deck", setup=False, **kwargs) -> None:
|
||||
super().__init__(name=name, size_x=size_x, size_y=size_y, size_z=size_z)
|
||||
if setup:
|
||||
self.setup()
|
||||
|
||||
def setup(self) -> None:
|
||||
self.warehouses = {
|
||||
"堆栈1左": my_warehouse_4x4("堆栈1左"),
|
||||
"堆栈1右": my_warehouse_4x4("堆栈1右"),
|
||||
}
|
||||
self.warehouse_locations = {
|
||||
"堆栈1左": Coordinate(-200.0, 400.0, 0.0), # 自行测量计算
|
||||
"堆栈1右": Coordinate(2350.0, 400.0, 0.0),
|
||||
}
|
||||
for wh_name, wh in self.warehouses.items():
|
||||
self.assign_child_resource(wh, location=self.warehouse_locations[wh_name])
|
||||
```
|
||||
|
||||
#### Site 模式(前端定向放置)
|
||||
|
||||
适用于有固定孔位/槽位的设备(如移液站 PRCXI 9300),Deck 通过 `sites` 列表定义前端展示的放置位,前端据此渲染可拖拽的孔位布局:
|
||||
|
||||
```python
|
||||
import collections
|
||||
from typing import Any, Dict, List, Optional
|
||||
from pylabrobot.resources import Deck, Resource, Coordinate
|
||||
from unilabos.registry.decorators import resource
|
||||
|
||||
|
||||
@resource(id="MyLabDeck", category=["deck"], description="带 Site 定向放置的 Deck")
|
||||
class MyLabDeck(Deck):
|
||||
# 根据设备台面实测批量计算各 slot 坐标偏移
|
||||
_DEFAULT_SITE_POSITIONS = [
|
||||
(0, 0, 0), (138, 0, 0), (276, 0, 0), (414, 0, 0), # T1-T4
|
||||
(0, 96, 0), (138, 96, 0), (276, 96, 0), (414, 96, 0), # T5-T8
|
||||
]
|
||||
_DEFAULT_SITE_SIZE = {"width": 128.0, "height": 86.0, "depth": 0}
|
||||
_DEFAULT_CONTENT_TYPE = ["plate", "tip_rack", "tube_rack", "adaptor"]
|
||||
|
||||
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
|
||||
sites: Optional[List[Dict[str, Any]]] = None, **kwargs):
|
||||
super().__init__(size_x, size_y, size_z, name)
|
||||
if sites is not None:
|
||||
self.sites = [dict(s) for s in sites]
|
||||
else:
|
||||
self.sites = []
|
||||
for i, (x, y, z) in enumerate(self._DEFAULT_SITE_POSITIONS):
|
||||
self.sites.append({
|
||||
"label": f"T{i + 1}", # 前端显示的槽位标签
|
||||
"visible": True, # 是否在前端可见
|
||||
"position": {"x": x, "y": y, "z": z}, # 槽位物理坐标
|
||||
"size": dict(self._DEFAULT_SITE_SIZE), # 槽位尺寸
|
||||
"content_type": list(self._DEFAULT_CONTENT_TYPE), # 允许放入的物料类型
|
||||
})
|
||||
self._ordering = collections.OrderedDict(
|
||||
(site["label"], None) for site in self.sites
|
||||
)
|
||||
|
||||
def assign_child_resource(self, resource: Resource,
|
||||
location: Optional[Coordinate] = None,
|
||||
reassign: bool = True,
|
||||
spot: Optional[int] = None):
|
||||
idx = spot
|
||||
if spot is None:
|
||||
for i, site in enumerate(self.sites):
|
||||
if site.get("label") == resource.name:
|
||||
idx = i
|
||||
break
|
||||
if idx is None:
|
||||
for i in range(len(self.sites)):
|
||||
if self._get_site_resource(i) is None:
|
||||
idx = i
|
||||
break
|
||||
if idx is None:
|
||||
raise ValueError(f"No available site for '{resource.name}'")
|
||||
loc = Coordinate(**self.sites[idx]["position"])
|
||||
super().assign_child_resource(resource, location=loc, reassign=reassign)
|
||||
|
||||
def serialize(self) -> dict:
|
||||
data = super().serialize()
|
||||
sites_out = []
|
||||
for i, site in enumerate(self.sites):
|
||||
occupied = self._get_site_resource(i)
|
||||
sites_out.append({
|
||||
"label": site["label"],
|
||||
"visible": site.get("visible", True),
|
||||
"occupied_by": occupied.name if occupied else None,
|
||||
"position": site["position"],
|
||||
"size": site["size"],
|
||||
"content_type": site["content_type"],
|
||||
})
|
||||
data["sites"] = sites_out
|
||||
return data
|
||||
```
|
||||
|
||||
**Site 字段说明:**
|
||||
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| `label` | str | 槽位标签(如 `"T1"`),前端显示名称,也用于匹配 resource.name |
|
||||
| `visible` | bool | 是否在前端可见 |
|
||||
| `position` | dict | 物理坐标 `{x, y, z}`(mm),需自行测量计算偏移 |
|
||||
| `size` | dict | 槽位尺寸 `{width, height, depth}`(mm) |
|
||||
| `content_type` | list | 允许放入的物料类型,如 `["plate", "tip_rack", "tube_rack", "adaptor"]` |
|
||||
|
||||
**参考实现:** `unilabos/devices/liquid_handling/prcxi/prcxi.py` 中的 `PRCXI9300Deck`(4x4 共 16 个 site)。
|
||||
|
||||
---
|
||||
|
||||
## 文件位置
|
||||
|
||||
```
|
||||
unilabos/resources/
|
||||
├── <project>/ # 按项目分组
|
||||
│ ├── bottles.py # Bottle 工厂函数
|
||||
│ ├── bottle_carriers.py # Carrier 工厂函数
|
||||
│ ├── warehouses.py # WareHouse 工厂函数
|
||||
│ └── decks.py # Deck 类定义
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 验证
|
||||
|
||||
```bash
|
||||
# 资源可导入
|
||||
python -c "from unilabos.resources.my_project.bottles import My_Reagent_Bottle; print(My_Reagent_Bottle('test'))"
|
||||
|
||||
# 启动测试(AST 自动扫描)
|
||||
unilab -g <graph>.json
|
||||
```
|
||||
|
||||
仅在以下情况仍需 YAML:第三方库资源(如 pylabrobot 内置资源,无 `@resource` 装饰器)。
|
||||
|
||||
---
|
||||
|
||||
## 关键路径
|
||||
|
||||
| 内容 | 路径 |
|
||||
|------|------|
|
||||
| Bottle/Carrier 基类 | `unilabos/resources/itemized_carrier.py` |
|
||||
| WareHouse 基类 + 工厂 | `unilabos/resources/warehouse.py` |
|
||||
| PLR 注册 | `unilabos/resources/plr_additional_res_reg.py` |
|
||||
| 装饰器定义 | `unilabos/registry/decorators.py` |
|
||||
292
.cursor/skills/add-resource/reference.md
Normal file
@@ -0,0 +1,292 @@
|
||||
# 资源高级参考
|
||||
|
||||
本文件是 SKILL.md 的补充,包含类继承体系、序列化/反序列化、Bioyond 物料同步、非瓶类资源和仓库工厂模式。Agent 在需要实现这些功能时按需阅读。
|
||||
|
||||
---
|
||||
|
||||
## 1. 类继承体系
|
||||
|
||||
```
|
||||
PyLabRobot
|
||||
├── Resource (PLR 基类)
|
||||
│ ├── Well
|
||||
│ │ └── Bottle (unilabos) → 瓶/小瓶/烧杯/反应器
|
||||
│ ├── Deck
|
||||
│ │ └── 自定义 Deck 类 (unilabos) → 工作站台面
|
||||
│ ├── ResourceHolder → 槽位占位符
|
||||
│ └── Container
|
||||
│ └── Battery (unilabos) → 组装好的电池
|
||||
│
|
||||
├── ItemizedCarrier (unilabos, 继承 Resource)
|
||||
│ ├── BottleCarrier (unilabos) → 瓶载架
|
||||
│ └── WareHouse (unilabos) → 堆栈仓库
|
||||
│
|
||||
├── ItemizedResource (PLR)
|
||||
│ └── MagazineHolder (unilabos) → 子弹夹载架
|
||||
│
|
||||
└── ResourceStack (PLR)
|
||||
└── Magazine (unilabos) → 子弹夹洞位
|
||||
```
|
||||
|
||||
### Bottle 类细节
|
||||
|
||||
```python
|
||||
class Bottle(Well):
|
||||
def __init__(self, name, diameter, height, max_volume,
|
||||
size_x=0.0, size_y=0.0, size_z=0.0,
|
||||
barcode=None, category="container", model=None, **kwargs):
|
||||
super().__init__(
|
||||
name=name,
|
||||
size_x=diameter, # PLR 用 diameter 作为 size_x/size_y
|
||||
size_y=diameter,
|
||||
size_z=height, # PLR 用 height 作为 size_z
|
||||
max_volume=max_volume,
|
||||
category=category,
|
||||
model=model,
|
||||
bottom_type="flat",
|
||||
cross_section_type="circle"
|
||||
)
|
||||
```
|
||||
|
||||
注意 `size_x = size_y = diameter`,`size_z = height`。
|
||||
|
||||
### ItemizedCarrier 核心方法
|
||||
|
||||
| 方法 | 说明 |
|
||||
|------|------|
|
||||
| `__getitem__(identifier)` | 通过索引或 Excel 标识(如 `"A01"`)访问槽位 |
|
||||
| `__setitem__(identifier, resource)` | 向槽位放入资源 |
|
||||
| `get_child_identifier(child)` | 获取子资源的标识符 |
|
||||
| `capacity` | 总槽位数 |
|
||||
| `sites` | 所有槽位字典 |
|
||||
|
||||
---
|
||||
|
||||
## 2. 序列化与反序列化
|
||||
|
||||
### PLR ↔ UniLab 转换
|
||||
|
||||
| 函数 | 位置 | 方向 |
|
||||
|------|------|------|
|
||||
| `ResourceTreeSet.from_plr_resources(resources)` | `resource_tracker.py` | PLR → UniLab |
|
||||
| `ResourceTreeSet.to_plr_resources()` | `resource_tracker.py` | UniLab → PLR |
|
||||
|
||||
### `from_plr_resources` 流程
|
||||
|
||||
```
|
||||
PLR Resource
|
||||
↓ build_uuid_mapping (递归生成 UUID)
|
||||
↓ resource.serialize() → dict
|
||||
↓ resource.serialize_all_state() → states
|
||||
↓ resource_plr_inner (递归构建 ResourceDictInstance)
|
||||
ResourceTreeSet
|
||||
```
|
||||
|
||||
关键:每个 PLR 资源通过 `unilabos_uuid` 属性携带 UUID,`unilabos_extra` 携带扩展数据(如 `class` 名)。
|
||||
|
||||
### `to_plr_resources` 流程
|
||||
|
||||
```
|
||||
ResourceTreeSet
|
||||
↓ collect_node_data (收集 UUID、状态、扩展数据)
|
||||
↓ node_to_plr_dict (转为 PLR 字典格式)
|
||||
↓ find_subclass(type_name, PLRResource) (查找 PLR 子类)
|
||||
↓ sub_cls.deserialize(plr_dict) (反序列化)
|
||||
↓ loop_set_uuid, loop_set_extra (递归设置 UUID 和扩展)
|
||||
PLR Resource
|
||||
```
|
||||
|
||||
### Bottle 序列化
|
||||
|
||||
```python
|
||||
class Bottle(Well):
|
||||
def serialize(self) -> dict:
|
||||
data = super().serialize()
|
||||
return {**data, "diameter": self.diameter, "height": self.height}
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, data: dict, allow_marshal=False):
|
||||
barcode_data = data.pop("barcode", None)
|
||||
instance = super().deserialize(data, allow_marshal=allow_marshal)
|
||||
if barcode_data and isinstance(barcode_data, str):
|
||||
instance.barcode = barcode_data
|
||||
return instance
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Bioyond 物料同步
|
||||
|
||||
### 双向转换函数
|
||||
|
||||
| 函数 | 位置 | 方向 |
|
||||
|------|------|------|
|
||||
| `resource_bioyond_to_plr(materials, type_mapping, deck)` | `graphio.py` | Bioyond → PLR |
|
||||
| `resource_plr_to_bioyond(resources, type_mapping, warehouse_mapping)` | `graphio.py` | PLR → Bioyond |
|
||||
|
||||
### `resource_bioyond_to_plr` 流程
|
||||
|
||||
```
|
||||
Bioyond 物料列表
|
||||
↓ reverse_type_mapping: {typeName → (model, UUID)}
|
||||
↓ 对每个物料:
|
||||
typeName → 查映射 → model (如 "BIOYOND_PolymerStation_Reactor")
|
||||
initialize_resource({"name": unique_name, "class": model})
|
||||
↓ 设置 unilabos_extra (material_bioyond_id, material_bioyond_name 等)
|
||||
↓ 处理 detail (子物料/坐标)
|
||||
↓ 按 locationName 放入 deck.warehouses 对应槽位
|
||||
PLR 资源列表
|
||||
```
|
||||
|
||||
### `resource_plr_to_bioyond` 流程
|
||||
|
||||
```
|
||||
PLR 资源列表
|
||||
↓ 遍历每个资源:
|
||||
载架(capacity > 1): 生成 details 子物料 + 坐标
|
||||
单瓶: 直接映射
|
||||
↓ type_mapping 查找 typeId
|
||||
↓ warehouse_mapping 查找位置 UUID
|
||||
↓ 组装 Bioyond 格式 (name, typeName, typeId, quantity, Parameters, locations)
|
||||
Bioyond 物料列表
|
||||
```
|
||||
|
||||
### BioyondResourceSynchronizer
|
||||
|
||||
工作站通过 `ResourceSynchronizer` 自动同步物料:
|
||||
|
||||
```python
|
||||
class BioyondResourceSynchronizer(ResourceSynchronizer):
|
||||
def sync_from_external(self) -> bool:
|
||||
all_data = []
|
||||
all_data.extend(api_client.stock_material('{"typeMode": 0}')) # 耗材
|
||||
all_data.extend(api_client.stock_material('{"typeMode": 1}')) # 样品
|
||||
all_data.extend(api_client.stock_material('{"typeMode": 2}')) # 试剂
|
||||
unilab_resources = resource_bioyond_to_plr(
|
||||
all_data,
|
||||
type_mapping=self.workstation.bioyond_config["material_type_mappings"],
|
||||
deck=self.workstation.deck
|
||||
)
|
||||
# 更新 deck 上的资源
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. 非瓶类资源
|
||||
|
||||
### ElectrodeSheet(极片)
|
||||
|
||||
路径:`unilabos/resources/battery/electrode_sheet.py`
|
||||
|
||||
```python
|
||||
class ElectrodeSheet(ResourcePLR):
|
||||
"""片状材料(极片、隔膜、弹片、垫片等)"""
|
||||
_unilabos_state = {
|
||||
"diameter": 0.0,
|
||||
"thickness": 0.0,
|
||||
"mass": 0.0,
|
||||
"material_type": "",
|
||||
"color": "",
|
||||
"info": "",
|
||||
}
|
||||
```
|
||||
|
||||
工厂函数:`PositiveCan`, `PositiveElectrode`, `NegativeCan`, `NegativeElectrode`, `SpringWasher`, `FlatWasher`, `AluminumFoil`
|
||||
|
||||
### Battery(电池)
|
||||
|
||||
```python
|
||||
class Battery(Container):
|
||||
"""组装好的电池"""
|
||||
_unilabos_state = {
|
||||
"color": "",
|
||||
"electrolyte_name": "",
|
||||
"open_circuit_voltage": 0.0,
|
||||
}
|
||||
```
|
||||
|
||||
### Magazine / MagazineHolder(子弹夹)
|
||||
|
||||
```python
|
||||
class Magazine(ResourceStack):
|
||||
"""子弹夹洞位,可堆叠 ElectrodeSheet"""
|
||||
# direction, max_sheets
|
||||
|
||||
class MagazineHolder(ItemizedResource):
|
||||
"""多洞位子弹夹"""
|
||||
# hole_diameter, hole_depth, max_sheets_per_hole
|
||||
```
|
||||
|
||||
工厂函数 `magazine_factory()` 用 `create_homogeneous_resources` 生成洞位,可选预填 `ElectrodeSheet` 或 `Battery`。
|
||||
|
||||
---
|
||||
|
||||
## 5. 仓库工厂模式参考
|
||||
|
||||
### 实际 warehouse 工厂函数示例
|
||||
|
||||
```python
|
||||
# 行优先 4x4 仓库
|
||||
def bioyond_warehouse_1x4x4(name: str) -> WareHouse:
|
||||
return warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=4, num_items_y=4, num_items_z=1,
|
||||
dx=10.0, dy=10.0, dz=10.0,
|
||||
item_dx=147.0, item_dy=106.0, item_dz=130.0,
|
||||
layout="row-major", # A01,A02,A03,A04, B01,...
|
||||
)
|
||||
|
||||
# 右侧 4x4 仓库(列名偏移)
|
||||
def bioyond_warehouse_1x4x4_right(name: str) -> WareHouse:
|
||||
return warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=4, num_items_y=4, num_items_z=1,
|
||||
dx=10.0, dy=10.0, dz=10.0,
|
||||
item_dx=147.0, item_dy=106.0, item_dz=130.0,
|
||||
col_offset=4, # A05,A06,A07,A08
|
||||
layout="row-major",
|
||||
)
|
||||
|
||||
# 竖向仓库(站内试剂存放)
|
||||
def bioyond_warehouse_reagent_storage(name: str) -> WareHouse:
|
||||
return warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=1, num_items_y=2, num_items_z=1,
|
||||
dx=10.0, dy=10.0, dz=10.0,
|
||||
item_dx=147.0, item_dy=106.0, item_dz=130.0,
|
||||
layout="vertical-col-major",
|
||||
)
|
||||
|
||||
# 行偏移(F 行开始)
|
||||
def bioyond_warehouse_5x3x1(name: str, row_offset: int = 0) -> WareHouse:
|
||||
return warehouse_factory(
|
||||
name=name,
|
||||
num_items_x=3, num_items_y=5, num_items_z=1,
|
||||
dx=10.0, dy=10.0, dz=10.0,
|
||||
item_dx=159.0, item_dy=183.0, item_dz=130.0,
|
||||
row_offset=row_offset, # 0→A行起,5→F行起
|
||||
layout="row-major",
|
||||
)
|
||||
```
|
||||
|
||||
### layout 类型说明
|
||||
|
||||
| layout | 命名顺序 | 适用场景 |
|
||||
|--------|---------|---------|
|
||||
| `col-major` (默认) | A01,B01,C01,D01, A02,B02,... | 列优先,标准堆栈 |
|
||||
| `row-major` | A01,A02,A03,A04, B01,B02,... | 行优先,Bioyond 前端展示 |
|
||||
| `vertical-col-major` | 竖向排列,标签从底部开始 | 竖向仓库(试剂存放、测密度) |
|
||||
|
||||
---
|
||||
|
||||
## 6. 关键路径
|
||||
|
||||
| 内容 | 路径 |
|
||||
|------|------|
|
||||
| Bottle/Carrier 基类 | `unilabos/resources/itemized_carrier.py` |
|
||||
| WareHouse 类 + 工厂 | `unilabos/resources/warehouse.py` |
|
||||
| ResourceTreeSet 转换 | `unilabos/resources/resource_tracker.py` |
|
||||
| Bioyond 物料转换 | `unilabos/resources/graphio.py` |
|
||||
| Bioyond 仓库定义 | `unilabos/resources/bioyond/warehouses.py` |
|
||||
| 电池资源 | `unilabos/resources/battery/` |
|
||||
| PLR 注册 | `unilabos/resources/plr_additional_res_reg.py` |
|
||||
626
.cursor/skills/add-workstation/SKILL.md
Normal file
@@ -0,0 +1,626 @@
|
||||
---
|
||||
name: add-workstation
|
||||
description: Guide for adding new workstations to Uni-Lab-OS (接入新工作站). Uses @device decorator + AST auto-scanning. Walks through workstation type, sub-device composition, driver creation, deck setup, and graph file. Use when the user wants to add a workstation, create a workstation driver, configure a station with sub-devices, or mentions 工作站/工站/station/workstation.
|
||||
---
|
||||
|
||||
# Uni-Lab-OS 工作站接入指南
|
||||
|
||||
工作站(workstation)是组合多个子设备的大型设备,拥有独立的物料管理系统和工作流引擎。使用 `@device` 装饰器注册,AST 自动扫描生成注册表。
|
||||
|
||||
---
|
||||
|
||||
## 工作站类型
|
||||
|
||||
| 类型 | 基类 | 适用场景 |
|
||||
| ------------------- | ----------------- | ---------------------------------- |
|
||||
| **Protocol 工作站** | `ProtocolNode` | 标准化学操作协议(泵转移、过滤等) |
|
||||
| **外部系统工作站** | `WorkstationBase` | 与外部 LIMS/MES 对接 |
|
||||
| **硬件控制工作站** | `WorkstationBase` | 直接控制 PLC/硬件 |
|
||||
|
||||
---
|
||||
|
||||
## @device 装饰器(工作站)
|
||||
|
||||
工作站也使用 `@device` 装饰器注册,参数与普通设备一致:
|
||||
|
||||
```python
|
||||
@device(
|
||||
id="my_workstation", # 注册表唯一标识(必填)
|
||||
category=["workstation"], # 分类标签
|
||||
description="我的工作站",
|
||||
)
|
||||
```
|
||||
|
||||
如果一个工作站类支持多个具体变体,可使用 `ids` / `id_meta`,与设备的用法相同(参见 add-device SKILL)。
|
||||
|
||||
---
|
||||
|
||||
## 工作站驱动模板
|
||||
|
||||
### 模板 A:基于外部系统的工作站
|
||||
|
||||
```python
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
from pylabrobot.resources import Deck
|
||||
|
||||
from unilabos.registry.decorators import device, topic_config, not_action
|
||||
from unilabos.devices.workstation.workstation_base import WorkstationBase
|
||||
|
||||
try:
|
||||
from unilabos.ros.nodes.presets.workstation import ROS2WorkstationNode
|
||||
except ImportError:
|
||||
ROS2WorkstationNode = None
|
||||
|
||||
|
||||
@device(id="my_workstation", category=["workstation"], description="我的工作站")
|
||||
class MyWorkstation(WorkstationBase):
|
||||
_ros_node: "ROS2WorkstationNode"
|
||||
|
||||
def __init__(self, config=None, deck=None, protocol_type=None, **kwargs):
|
||||
super().__init__(deck=deck, **kwargs)
|
||||
self.config = config or {}
|
||||
self.logger = logging.getLogger("MyWorkstation")
|
||||
self.api_host = self.config.get("api_host", "")
|
||||
self._status = "Idle"
|
||||
|
||||
@not_action
|
||||
def post_init(self, ros_node: "ROS2WorkstationNode"):
|
||||
super().post_init(ros_node)
|
||||
self._ros_node = ros_node
|
||||
|
||||
async def scheduler_start(self, **kwargs) -> Dict[str, Any]:
|
||||
"""注册为工作站动作"""
|
||||
return {"success": True}
|
||||
|
||||
async def create_order(self, json_str: str, **kwargs) -> Dict[str, Any]:
|
||||
"""注册为工作站动作"""
|
||||
return {"success": True}
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def workflow_sequence(self) -> str:
|
||||
return "[]"
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def material_info(self) -> str:
|
||||
return "{}"
|
||||
```
|
||||
|
||||
### 模板 B:Protocol 工作站
|
||||
|
||||
直接使用 `ProtocolNode`,通常不需要自定义驱动类:
|
||||
|
||||
```python
|
||||
from unilabos.devices.workstation.workstation_base import ProtocolNode
|
||||
```
|
||||
|
||||
在图文件中配置 `protocol_type` 即可。
|
||||
|
||||
---
|
||||
|
||||
## 子设备访问(sub_devices)
|
||||
|
||||
工站初始化子设备后,所有子设备实例存储在 `self._ros_node.sub_devices` 字典中(key 为设备 id,value 为 `ROS2DeviceNode` 实例)。工站的驱动类可以直接获取子设备实例来调用其方法:
|
||||
|
||||
```python
|
||||
# 在工站驱动类的方法中访问子设备
|
||||
sub = self._ros_node.sub_devices["pump_1"]
|
||||
|
||||
# .driver_instance — 子设备的驱动实例(即设备 Python 类的实例)
|
||||
sub.driver_instance.some_method(arg1, arg2)
|
||||
|
||||
# .ros_node_instance — 子设备的 ROS2 节点实例
|
||||
sub.ros_node_instance._action_value_mappings # 查看子设备支持的 action
|
||||
```
|
||||
|
||||
**常见用法**:
|
||||
|
||||
```python
|
||||
class MyWorkstation(WorkstationBase):
|
||||
def my_protocol(self, **kwargs):
|
||||
# 获取子设备驱动实例
|
||||
pump = self._ros_node.sub_devices["pump_1"].driver_instance
|
||||
heater = self._ros_node.sub_devices["heater_1"].driver_instance
|
||||
|
||||
# 直接调用子设备方法
|
||||
pump.aspirate(volume=100)
|
||||
heater.set_temperature(80)
|
||||
```
|
||||
|
||||
> 参考实现:`unilabos/devices/workstation/bioyond_studio/reaction_station/reaction_station.py` 中通过 `self._ros_node.sub_devices.get(reactor_id)` 获取子反应器实例并更新数据。
|
||||
|
||||
---
|
||||
|
||||
## 硬件通信接口(hardware_interface)
|
||||
|
||||
硬件控制型工作站通常需要通过串口(Serial)、Modbus 等通信协议控制多个子设备。Uni-Lab-OS 通过 **通信设备代理** 机制实现端口共享:一个串口只创建一个 `serial` 节点,多个子设备共享这个通信实例。
|
||||
|
||||
### 工作原理
|
||||
|
||||
`ROS2WorkstationNode` 初始化时分两轮遍历子设备(`workstation.py`):
|
||||
|
||||
**第一轮 — 初始化所有子设备**:按 `children` 顺序调用 `initialize_device()`,通信设备(`serial_` / `io_` 开头的 id)优先完成初始化,创建 `serial.Serial()` 实例。其他子设备此时 `self.hardware_interface = "serial_pump"`(字符串)。
|
||||
|
||||
**第二轮 — 代理替换**:遍历所有已初始化的子设备,读取子设备的 `_hardware_interface` 配置:
|
||||
|
||||
```
|
||||
hardware_interface = d.ros_node_instance._hardware_interface
|
||||
# → {"name": "hardware_interface", "read": "send_command", "write": "send_command"}
|
||||
```
|
||||
|
||||
1. 取 `name` 字段对应的属性值:`name_value = getattr(driver, hardware_interface["name"])`
|
||||
- 如果 `name_value` 是字符串且该字符串是某个子设备的 id → 触发代理替换
|
||||
2. 从通信设备获取真正的 `read`/`write` 方法
|
||||
3. 用 `setattr(driver, read_method, _read)` 将通信设备的方法绑定到子设备上
|
||||
|
||||
因此:
|
||||
|
||||
- **通信设备 id 必须与子设备 config 中填的字符串完全一致**(如 `"serial_pump"`)
|
||||
- **通信设备 id 必须以 `serial_` 或 `io_` 开头**(否则第一轮不会被识别为通信设备)
|
||||
- **通信设备必须在 `children` 列表中排在最前面**,确保先初始化
|
||||
|
||||
### HardwareInterface 参数说明
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import HardwareInterface
|
||||
|
||||
HardwareInterface(
|
||||
name="hardware_interface", # __init__ 中接收通信实例的属性名
|
||||
read="send_command", # 通信设备上暴露的读方法名
|
||||
write="send_command", # 通信设备上暴露的写方法名
|
||||
extra_info=["list_ports"], # 可选:额外暴露的方法
|
||||
)
|
||||
```
|
||||
|
||||
**`name` 字段的含义**:对应设备类 `__init__` 中,用于保存通信实例的**属性名**。系统据此知道要替换哪个属性。大部分设备直接用 `"hardware_interface"`,也可以自定义(如 `"io_device_port"`)。
|
||||
|
||||
### 示例 1:泵(name="hardware_interface")
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import device, HardwareInterface
|
||||
|
||||
@device(
|
||||
id="my_pump",
|
||||
category=["pump_and_valve"],
|
||||
hardware_interface=HardwareInterface(
|
||||
name="hardware_interface",
|
||||
read="send_command",
|
||||
write="send_command",
|
||||
),
|
||||
)
|
||||
class MyPump:
|
||||
def __init__(self, port=None, address="1", **kwargs):
|
||||
# name="hardware_interface" → 系统替换 self.hardware_interface
|
||||
self.hardware_interface = port # 初始为字符串 "serial_pump",启动后被替换为 Serial 实例
|
||||
self.address = address
|
||||
|
||||
def send_command(self, command: str):
|
||||
full_command = f"/{self.address}{command}\r\n"
|
||||
self.hardware_interface.write(bytearray(full_command, "ascii"))
|
||||
return self.hardware_interface.read_until(b"\n")
|
||||
```
|
||||
|
||||
### 示例 2:电磁阀(name="io_device_port",自定义属性名)
|
||||
|
||||
```python
|
||||
@device(
|
||||
id="solenoid_valve",
|
||||
category=["pump_and_valve"],
|
||||
hardware_interface=HardwareInterface(
|
||||
name="io_device_port", # 自定义属性名 → 系统替换 self.io_device_port
|
||||
read="read_io_coil",
|
||||
write="write_io_coil",
|
||||
),
|
||||
)
|
||||
class SolenoidValve:
|
||||
def __init__(self, io_device_port: str = None, **kwargs):
|
||||
# name="io_device_port" → 图文件 config 中用 "io_device_port": "io_board_1"
|
||||
self.io_device_port = io_device_port # 初始为字符串,系统替换为 Modbus 实例
|
||||
```
|
||||
|
||||
### Serial 通信设备(class="serial")
|
||||
|
||||
`serial` 是 Uni-Lab-OS 内置的通信代理设备,代码位于 `unilabos/ros/nodes/presets/serial_node.py`:
|
||||
|
||||
```python
|
||||
from serial import Serial, SerialException
|
||||
from threading import Lock
|
||||
|
||||
class ROS2SerialNode(BaseROS2DeviceNode):
|
||||
def __init__(self, device_id, registry_name, port: str, baudrate: int = 9600, **kwargs):
|
||||
self.port = port
|
||||
self.baudrate = baudrate
|
||||
self._hardware_interface = {
|
||||
"name": "hardware_interface",
|
||||
"write": "send_command",
|
||||
"read": "read_data",
|
||||
}
|
||||
self._query_lock = Lock()
|
||||
|
||||
self.hardware_interface = Serial(baudrate=baudrate, port=port)
|
||||
|
||||
BaseROS2DeviceNode.__init__(
|
||||
self, driver_instance=self, registry_name=registry_name,
|
||||
device_id=device_id, status_types={}, action_value_mappings={},
|
||||
hardware_interface=self._hardware_interface, print_publish=False,
|
||||
)
|
||||
self.create_service(SerialCommand, "serialwrite", self.handle_serial_request)
|
||||
|
||||
def send_command(self, command: str):
|
||||
with self._query_lock:
|
||||
self.hardware_interface.write(bytearray(f"{command}\n", "ascii"))
|
||||
return self.hardware_interface.read_until(b"\n").decode()
|
||||
|
||||
def read_data(self):
|
||||
with self._query_lock:
|
||||
return self.hardware_interface.read_until(b"\n").decode()
|
||||
```
|
||||
|
||||
在图文件中使用 `"class": "serial"` 即可创建串口代理:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "serial_pump",
|
||||
"class": "serial",
|
||||
"parent": "my_station",
|
||||
"config": { "port": "COM7", "baudrate": 9600 }
|
||||
}
|
||||
```
|
||||
|
||||
### 图文件配置
|
||||
|
||||
**通信设备必须在 `children` 列表中排在最前面**,确保先于其他子设备初始化:
|
||||
|
||||
```json
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "my_station",
|
||||
"class": "workstation",
|
||||
"children": ["serial_pump", "pump_1", "pump_2"],
|
||||
"config": { "protocol_type": ["PumpTransferProtocol"] }
|
||||
},
|
||||
{
|
||||
"id": "serial_pump",
|
||||
"class": "serial",
|
||||
"parent": "my_station",
|
||||
"config": { "port": "COM7", "baudrate": 9600 }
|
||||
},
|
||||
{
|
||||
"id": "pump_1",
|
||||
"class": "syringe_pump_with_valve.runze.SY03B-T08",
|
||||
"parent": "my_station",
|
||||
"config": { "port": "serial_pump", "address": "1", "max_volume": 25.0 }
|
||||
},
|
||||
{
|
||||
"id": "pump_2",
|
||||
"class": "syringe_pump_with_valve.runze.SY03B-T08",
|
||||
"parent": "my_station",
|
||||
"config": { "port": "serial_pump", "address": "2", "max_volume": 25.0 }
|
||||
}
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"source": "pump_1",
|
||||
"target": "serial_pump",
|
||||
"type": "communication",
|
||||
"port": { "pump_1": "port", "serial_pump": "port" }
|
||||
},
|
||||
{
|
||||
"source": "pump_2",
|
||||
"target": "serial_pump",
|
||||
"type": "communication",
|
||||
"port": { "pump_2": "port", "serial_pump": "port" }
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 通信协议速查
|
||||
|
||||
| 协议 | config 参数 | 依赖包 | 通信设备 class |
|
||||
| -------------------- | ------------------------------ | ---------- | -------------------------- |
|
||||
| Serial (RS232/RS485) | `port`, `baudrate` | `pyserial` | `serial` |
|
||||
| Modbus RTU | `port`, `baudrate`, `slave_id` | `pymodbus` | `device_comms/modbus_plc/` |
|
||||
| Modbus TCP | `host`, `port`, `slave_id` | `pymodbus` | `device_comms/modbus_plc/` |
|
||||
| TCP Socket | `host`, `port` | stdlib | 自定义 |
|
||||
| HTTP API | `url`, `token` | `requests` | `device_comms/rpc.py` |
|
||||
|
||||
参考实现:`unilabos/test/experiments/Grignard_flow_batchreact_single_pumpvalve.json`
|
||||
|
||||
---
|
||||
|
||||
## Deck 与物料生命周期
|
||||
|
||||
### 1. Deck 入参与两种初始化模式
|
||||
|
||||
系统根据设备节点 `config.deck` 的写法,自动反序列化 Deck 实例后传入 `__init__` 的 `deck` 参数。目前 `deck` 是固定字段名,只支持一个主 Deck。建议一个设备拥有一个台面,台面上抽象二级、三级子物料。
|
||||
|
||||
有两种初始化模式:
|
||||
|
||||
#### init 初始化(推荐)
|
||||
|
||||
`config.deck` 直接包含 `_resource_type` + `_resource_child_name`,系统先用 Deck 节点的 `config` 调用 Deck 类的 `__init__` 反序列化,再将实例传入设备的 `deck` 参数。子物料随 Deck 的 `children` 一起反序列化。
|
||||
|
||||
```json
|
||||
"config": {
|
||||
"deck": {
|
||||
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck",
|
||||
"_resource_child_name": "PRCXI_Deck"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### deserialize 初始化
|
||||
|
||||
`config.deck` 用 `data` 包裹一层,系统走 `deserialize` 路径,可传入更多参数(如 `allow_marshal` 等):
|
||||
|
||||
```json
|
||||
"config": {
|
||||
"deck": {
|
||||
"data": {
|
||||
"_resource_child_name": "YB_Bioyond_Deck",
|
||||
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_YB_Deck"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
没有特殊需求时推荐 init 初始化。
|
||||
|
||||
#### config.deck 字段说明
|
||||
|
||||
| 字段 | 说明 |
|
||||
|------|------|
|
||||
| `_resource_type` | Deck 类的完整模块路径(`module:ClassName`) |
|
||||
| `_resource_child_name` | 对应图文件中 Deck 节点的 `id`,建立父子关联 |
|
||||
|
||||
#### 设备 __init__ 接收
|
||||
|
||||
```python
|
||||
def __init__(self, config=None, deck=None, protocol_type=None, **kwargs):
|
||||
super().__init__(deck=deck, **kwargs)
|
||||
# deck 已经是反序列化后的 Deck 实例
|
||||
# → PRCXI9300Deck / BIOYOND_YB_Deck 等
|
||||
```
|
||||
|
||||
#### Deck 节点(图文件中)
|
||||
|
||||
Deck 节点作为设备的 `children` 之一,`parent` 指向设备 id:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "PRCXI_Deck",
|
||||
"parent": "PRCXI",
|
||||
"type": "deck",
|
||||
"class": "",
|
||||
"children": [],
|
||||
"config": {
|
||||
"type": "PRCXI9300Deck",
|
||||
"size_x": 542, "size_y": 374, "size_z": 0,
|
||||
"category": "deck",
|
||||
"sites": [...]
|
||||
},
|
||||
"data": {}
|
||||
}
|
||||
```
|
||||
|
||||
- `config` 中的字段会传入 Deck 类的 `__init__`(因此 `__init__` 必须能接受所有 `serialize()` 输出的字段)
|
||||
- `children` 初始为空时,由同步器或手动初始化填充
|
||||
- `config.type` 填 Deck 类名
|
||||
|
||||
### 2. Deck 为空时自行初始化
|
||||
|
||||
如果 Deck 节点的 `children` 为空,工作站需在 `post_init` 或首次同步时自行初始化内容:
|
||||
|
||||
```python
|
||||
@not_action
|
||||
def post_init(self, ros_node):
|
||||
super().post_init(ros_node)
|
||||
if self.deck and not self.deck.children:
|
||||
self._initialize_default_deck()
|
||||
|
||||
def _initialize_default_deck(self):
|
||||
from my_labware import My_TipRack, My_Plate
|
||||
self.deck.assign_child_resource(My_TipRack("T1"), spot=0)
|
||||
self.deck.assign_child_resource(My_Plate("T2"), spot=1)
|
||||
```
|
||||
|
||||
### 3. 物料双向同步
|
||||
|
||||
当工作站对接外部系统(LIMS/MES)时,需要实现 `ResourceSynchronizer` 处理双向物料同步:
|
||||
|
||||
```python
|
||||
from unilabos.devices.workstation.workstation_base import ResourceSynchronizer
|
||||
|
||||
class MyResourceSynchronizer(ResourceSynchronizer):
|
||||
def sync_from_external(self) -> bool:
|
||||
"""从外部系统同步到 self.workstation.deck"""
|
||||
external_data = self._query_external_materials()
|
||||
# 以外部工站为准:根据外部数据反向创建 PLR 资源实例
|
||||
for item in external_data:
|
||||
cls = self._resolve_resource_class(item["type"])
|
||||
resource = cls(name=item["name"], **item["params"])
|
||||
self.workstation.deck.assign_child_resource(resource, spot=item["slot"])
|
||||
return True
|
||||
|
||||
def sync_to_external(self, resource) -> bool:
|
||||
"""将 UniLab 侧物料变更同步到外部系统"""
|
||||
# 以 UniLab 为准:将 PLR 资源转为外部格式并推送
|
||||
external_format = self._convert_to_external(resource)
|
||||
return self._push_to_external(external_format)
|
||||
|
||||
def handle_external_change(self, change_info) -> bool:
|
||||
"""处理外部系统主动推送的变更"""
|
||||
return True
|
||||
```
|
||||
|
||||
同步策略取决于业务场景:
|
||||
|
||||
- **以外部工站为准**:从外部 API 查询物料数据,反向创建对应的 PLR 资源实例放到 Deck 上
|
||||
- **以 UniLab 为准**:UniLab 侧的物料变更通过 `sync_to_external` 推送到外部系统
|
||||
|
||||
在工作站 `post_init` 中初始化同步器:
|
||||
|
||||
```python
|
||||
@not_action
|
||||
def post_init(self, ros_node):
|
||||
super().post_init(ros_node)
|
||||
self.resource_synchronizer = MyResourceSynchronizer(self)
|
||||
self.resource_synchronizer.sync_from_external()
|
||||
```
|
||||
|
||||
### 4. 序列化与持久化(serialize / serialize_state)
|
||||
|
||||
资源类需正确实现序列化,系统据此完成持久化和前端同步。
|
||||
|
||||
**`serialize()`** — 输出资源的结构信息(`config` 层),反序列化时作为 `__init__` 的入参回传。因此 **`__init__` 必须通过 `**kwargs`接受`serialize()` 输出的所有字段\*\*,即使当前不使用:
|
||||
|
||||
```python
|
||||
class MyDeck(Deck):
|
||||
def __init__(self, name, size_x, size_y, size_z,
|
||||
sites=None, # serialize() 输出的字段
|
||||
rotation=None, # serialize() 输出的字段
|
||||
barcode=None, # serialize() 输出的字段
|
||||
**kwargs): # 兜底:接受所有未知的 serialize 字段
|
||||
super().__init__(size_x, size_y, size_z, name)
|
||||
# ...
|
||||
|
||||
def serialize(self) -> dict:
|
||||
data = super().serialize()
|
||||
data["sites"] = [...] # 自定义字段
|
||||
return data
|
||||
```
|
||||
|
||||
**`serialize_state()`** — 输出资源的运行时状态(`data` 层),用于持久化可变信息。`data` 中的内容会被正确保存和恢复:
|
||||
|
||||
```python
|
||||
class MyPlate(Plate):
|
||||
def __init__(self, name, size_x, size_y, size_z,
|
||||
material_info=None, **kwargs):
|
||||
super().__init__(name, size_x, size_y, size_z, **kwargs)
|
||||
self._unilabos_state = {}
|
||||
if material_info:
|
||||
self._unilabos_state["Material"] = material_info
|
||||
|
||||
def serialize_state(self) -> Dict[str, Any]:
|
||||
data = super().serialize_state()
|
||||
data.update(self._unilabos_state)
|
||||
return data
|
||||
```
|
||||
|
||||
关键要点:
|
||||
|
||||
- `serialize()` 输出的所有字段都会作为 `config` 回传到 `__init__`,所以 `__init__` 必须能接受它们(显式声明或 `**kwargs`)
|
||||
- `serialize_state()` 输出的 `data` 用于持久化运行时状态(如物料信息、液体量等)
|
||||
- `_unilabos_state` 中只存可 JSON 序列化的基本类型(str, int, float, bool, list, dict, None)
|
||||
|
||||
### 5. 子物料自动同步
|
||||
|
||||
子物料(Bottle、Plate、TipRack 等)放到 Deck 上后,系统会自动将其同步到前端的 Deck 视图。只需保证资源类正确实现了 `serialize()` / `serialize_state()` 和反序列化即可。
|
||||
|
||||
### 6. 图文件配置(参考 prcxi_9320_slim.json)
|
||||
|
||||
```json
|
||||
{
|
||||
"nodes": [
|
||||
{
|
||||
"id": "my_station",
|
||||
"type": "device",
|
||||
"class": "my_workstation",
|
||||
"config": {
|
||||
"deck": {
|
||||
"_resource_type": "unilabos.resources.my_module:MyDeck",
|
||||
"_resource_child_name": "my_deck"
|
||||
},
|
||||
"host": "10.20.30.1",
|
||||
"port": 9999
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "my_deck",
|
||||
"parent": "my_station",
|
||||
"type": "deck",
|
||||
"class": "",
|
||||
"children": [],
|
||||
"config": {
|
||||
"type": "MyLabDeck",
|
||||
"size_x": 542,
|
||||
"size_y": 374,
|
||||
"size_z": 0,
|
||||
"category": "deck",
|
||||
"sites": [
|
||||
{
|
||||
"label": "T1",
|
||||
"visible": true,
|
||||
"occupied_by": null,
|
||||
"position": { "x": 0, "y": 0, "z": 0 },
|
||||
"size": { "width": 128.0, "height": 86, "depth": 0 },
|
||||
"content_type": ["plate", "tip_rack", "tube_rack", "adaptor"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"data": {}
|
||||
}
|
||||
],
|
||||
"edges": []
|
||||
}
|
||||
```
|
||||
|
||||
Deck 节点要点:
|
||||
|
||||
- `config.type` 填 Deck 类名(如 `"PRCXI9300Deck"`)
|
||||
- `config.sites` 完整列出所有 site(从 Deck 类的 `serialize()` 输出获取)
|
||||
- `children` 初始为空(由同步器或手动初始化填充)
|
||||
- 设备节点 `config.deck._resource_type` 指向 Deck 类的完整模块路径
|
||||
|
||||
---
|
||||
|
||||
## 子设备
|
||||
|
||||
子设备按标准设备接入流程创建(参见 add-device SKILL),使用 `@device` 装饰器。
|
||||
|
||||
子设备约束:
|
||||
|
||||
- 图文件中 `parent` 指向工作站 ID
|
||||
- 在工作站 `children` 数组中列出
|
||||
|
||||
---
|
||||
|
||||
## 关键规则
|
||||
|
||||
1. **`__init__` 必须接受 `deck` 和 `**kwargs`** — `WorkstationBase.**init**`需要`deck` 参数
|
||||
2. **Deck 通过 `config.deck._resource_type` 反序列化传入** — 不要在 `__init__` 中手动创建 Deck
|
||||
3. **Deck 为空时自行初始化内容** — 在 `post_init` 中检查并填充默认物料
|
||||
4. **外部同步实现 `ResourceSynchronizer`** — `sync_from_external` / `sync_to_external`
|
||||
5. **通过 `self._children` 访问子设备** — 不要自行维护子设备引用
|
||||
6. **`post_init` 中启动后台服务** — 不要在 `__init__` 中启动网络连接
|
||||
7. **异步方法使用 `await self._ros_node.sleep()`** — 禁止 `time.sleep()` 和 `asyncio.sleep()`
|
||||
8. **使用 `@not_action` 标记非动作方法** — `post_init`, `initialize`, `cleanup`
|
||||
9. **子物料保证正确 serialize/deserialize** — 系统自动同步到前端 Deck 视图
|
||||
|
||||
---
|
||||
|
||||
## 验证
|
||||
|
||||
```bash
|
||||
# 模块可导入
|
||||
python -c "from unilabos.devices.workstation.<name>.<name> import <ClassName>"
|
||||
|
||||
# 启动测试(AST 自动扫描)
|
||||
unilab -g <graph>.json
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 现有工作站参考
|
||||
|
||||
| 工作站 | 驱动类 | 类型 |
|
||||
| -------------- | ----------------------------- | -------- |
|
||||
| Protocol 通用 | `ProtocolNode` | Protocol |
|
||||
| Bioyond 反应站 | `BioyondReactionStation` | 外部系统 |
|
||||
| 纽扣电池组装 | `CoinCellAssemblyWorkstation` | 硬件控制 |
|
||||
|
||||
参考路径:`unilabos/devices/workstation/` 目录下各工作站实现。
|
||||
371
.cursor/skills/add-workstation/reference.md
Normal file
@@ -0,0 +1,371 @@
|
||||
# 工作站高级模式参考
|
||||
|
||||
本文件是 SKILL.md 的补充,包含外部系统集成、物料同步、配置结构等高级模式。
|
||||
Agent 在需要实现这些功能时按需阅读。
|
||||
|
||||
---
|
||||
|
||||
## 1. 外部系统集成模式
|
||||
|
||||
### 1.1 RPC 客户端
|
||||
|
||||
与外部 LIMS/MES 系统通信的标准模式。继承 `BaseRequest`,所有接口统一用 POST。
|
||||
|
||||
```python
|
||||
from unilabos.device_comms.rpc import BaseRequest
|
||||
|
||||
|
||||
class MySystemRPC(BaseRequest):
|
||||
"""外部系统 RPC 客户端"""
|
||||
|
||||
def __init__(self, host: str, api_key: str):
|
||||
super().__init__(host)
|
||||
self.api_key = api_key
|
||||
|
||||
def _request(self, endpoint: str, data: dict = None) -> dict:
|
||||
return self.post(
|
||||
url=f"{self.host}/api/{endpoint}",
|
||||
params={
|
||||
"apiKey": self.api_key,
|
||||
"requestTime": self.get_current_time_iso8601(),
|
||||
"data": data or {},
|
||||
},
|
||||
)
|
||||
|
||||
def query_status(self) -> dict:
|
||||
return self._request("status/query")
|
||||
|
||||
def create_order(self, order_data: dict) -> dict:
|
||||
return self._request("order/create", order_data)
|
||||
```
|
||||
|
||||
参考:`unilabos/devices/workstation/bioyond_studio/bioyond_rpc.py`(`BioyondV1RPC`)
|
||||
|
||||
### 1.2 HTTP 回调服务
|
||||
|
||||
接收外部系统报送的标准模式。使用 `WorkstationHTTPService`,在 `post_init` 中启动。
|
||||
|
||||
```python
|
||||
from unilabos.devices.workstation.workstation_http_service import WorkstationHTTPService
|
||||
|
||||
|
||||
class MyWorkstation(WorkstationBase):
|
||||
def __init__(self, config=None, deck=None, **kwargs):
|
||||
super().__init__(deck=deck, **kwargs)
|
||||
self.config = config or {}
|
||||
http_cfg = self.config.get("http_service_config", {})
|
||||
self._http_service_config = {
|
||||
"host": http_cfg.get("http_service_host", "127.0.0.1"),
|
||||
"port": http_cfg.get("http_service_port", 8080),
|
||||
}
|
||||
self.http_service = None
|
||||
|
||||
def post_init(self, ros_node):
|
||||
super().post_init(ros_node)
|
||||
self.http_service = WorkstationHTTPService(
|
||||
workstation_instance=self,
|
||||
host=self._http_service_config["host"],
|
||||
port=self._http_service_config["port"],
|
||||
)
|
||||
self.http_service.start()
|
||||
```
|
||||
|
||||
**HTTP 服务路由**(固定端点,由 `WorkstationHTTPHandler` 自动分发):
|
||||
|
||||
| 端点 | 调用的工作站方法 |
|
||||
|------|-----------------|
|
||||
| `/report/step_finish` | `process_step_finish_report(report_request)` |
|
||||
| `/report/sample_finish` | `process_sample_finish_report(report_request)` |
|
||||
| `/report/order_finish` | `process_order_finish_report(report_request, used_materials)` |
|
||||
| `/report/material_change` | `process_material_change_report(report_data)` |
|
||||
| `/report/error_handling` | `handle_external_error(error_data)` |
|
||||
|
||||
实现对应方法即可接收回调:
|
||||
|
||||
```python
|
||||
def process_step_finish_report(self, report_request) -> Dict[str, Any]:
|
||||
"""处理步骤完成报告"""
|
||||
step_name = report_request.data.get("stepName")
|
||||
return {"success": True, "message": f"步骤 {step_name} 已处理"}
|
||||
|
||||
def process_order_finish_report(self, report_request, used_materials) -> Dict[str, Any]:
|
||||
"""处理订单完成报告"""
|
||||
order_code = report_request.data.get("orderCode")
|
||||
return {"success": True}
|
||||
```
|
||||
|
||||
参考:`unilabos/devices/workstation/workstation_http_service.py`
|
||||
|
||||
### 1.3 连接监控
|
||||
|
||||
独立线程周期性检测外部系统连接状态,状态变化时发布 ROS 事件。
|
||||
|
||||
```python
|
||||
class ConnectionMonitor:
|
||||
def __init__(self, workstation, check_interval=30):
|
||||
self.workstation = workstation
|
||||
self.check_interval = check_interval
|
||||
self._running = False
|
||||
self._thread = None
|
||||
|
||||
def start(self):
|
||||
self._running = True
|
||||
self._thread = threading.Thread(target=self._monitor_loop, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
def _monitor_loop(self):
|
||||
while self._running:
|
||||
try:
|
||||
# 调用外部系统接口检测连接
|
||||
self.workstation.hardware_interface.ping()
|
||||
status = "online"
|
||||
except Exception:
|
||||
status = "offline"
|
||||
time.sleep(self.check_interval)
|
||||
```
|
||||
|
||||
参考:`unilabos/devices/workstation/bioyond_studio/station.py`(`ConnectionMonitor`)
|
||||
|
||||
---
|
||||
|
||||
## 2. Config 结构模式
|
||||
|
||||
工作站的 `config` 在图文件中定义,传入 `__init__`。以下是常见字段模式:
|
||||
|
||||
### 2.1 外部系统连接
|
||||
|
||||
```json
|
||||
{
|
||||
"api_host": "http://192.168.1.100:8080",
|
||||
"api_key": "YOUR_API_KEY"
|
||||
}
|
||||
```
|
||||
|
||||
### 2.2 HTTP 回调服务
|
||||
|
||||
```json
|
||||
{
|
||||
"http_service_config": {
|
||||
"http_service_host": "127.0.0.1",
|
||||
"http_service_port": 8080
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2.3 物料类型映射
|
||||
|
||||
将 PLR 资源类名映射到外部系统的物料类型(名称 + UUID)。用于双向物料转换。
|
||||
|
||||
```json
|
||||
{
|
||||
"material_type_mappings": {
|
||||
"PLR_ResourceClassName": ["外部系统显示名", "external-type-uuid"],
|
||||
"BIOYOND_PolymerStation_Reactor": ["反应器", "3a14233b-902d-0d7b-..."]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2.4 仓库映射
|
||||
|
||||
将仓库名映射到外部系统的仓库 UUID 和库位 UUID。用于入库/出库操作。
|
||||
|
||||
```json
|
||||
{
|
||||
"warehouse_mapping": {
|
||||
"仓库名": {
|
||||
"uuid": "warehouse-uuid",
|
||||
"site_uuids": {
|
||||
"A01": "site-uuid-A01",
|
||||
"A02": "site-uuid-A02"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2.5 工作流映射
|
||||
|
||||
将内部工作流名映射到外部系统的工作流 ID。
|
||||
|
||||
```json
|
||||
{
|
||||
"workflow_mappings": {
|
||||
"internal_workflow_name": "external-workflow-uuid"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2.6 物料默认参数
|
||||
|
||||
```json
|
||||
{
|
||||
"material_default_parameters": {
|
||||
"NMP": {
|
||||
"unit": "毫升",
|
||||
"density": "1.03",
|
||||
"densityUnit": "g/mL",
|
||||
"description": "N-甲基吡咯烷酮"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. 资源同步机制
|
||||
|
||||
### 3.1 ResourceSynchronizer
|
||||
|
||||
抽象基类,用于与外部物料系统双向同步。定义在 `workstation_base.py`。
|
||||
|
||||
```python
|
||||
from unilabos.devices.workstation.workstation_base import ResourceSynchronizer
|
||||
|
||||
|
||||
class MyResourceSynchronizer(ResourceSynchronizer):
|
||||
def __init__(self, workstation, api_client):
|
||||
super().__init__(workstation)
|
||||
self.api_client = api_client
|
||||
|
||||
def sync_from_external(self) -> bool:
|
||||
"""从外部系统拉取物料到 deck"""
|
||||
external_materials = self.api_client.list_materials()
|
||||
for material in external_materials:
|
||||
plr_resource = self._convert_to_plr(material)
|
||||
self.workstation.deck.assign_child_resource(plr_resource, coordinate)
|
||||
return True
|
||||
|
||||
def sync_to_external(self, plr_resource) -> bool:
|
||||
"""将 deck 中的物料变更推送到外部系统"""
|
||||
external_data = self._convert_from_plr(plr_resource)
|
||||
self.api_client.update_material(external_data)
|
||||
return True
|
||||
|
||||
def handle_external_change(self, change_info) -> bool:
|
||||
"""处理外部系统推送的物料变更"""
|
||||
return True
|
||||
```
|
||||
|
||||
### 3.2 update_resource — 上传资源树到云端
|
||||
|
||||
将 PLR Deck 序列化后通过 ROS 服务上传。典型使用场景:
|
||||
|
||||
```python
|
||||
# 在 post_init 中上传初始 deck
|
||||
from unilabos.ros.nodes.base_device_node import ROS2DeviceNode
|
||||
|
||||
ROS2DeviceNode.run_async_func(
|
||||
self._ros_node.update_resource, True,
|
||||
**{"resources": [self.deck]}
|
||||
)
|
||||
|
||||
# 在动作方法中更新特定资源
|
||||
ROS2DeviceNode.run_async_func(
|
||||
self._ros_node.update_resource, True,
|
||||
**{"resources": [updated_plate]}
|
||||
)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. 工作流序列管理
|
||||
|
||||
工作站通过 `workflow_sequence` 属性管理任务队列(JSON 字符串形式)。
|
||||
|
||||
```python
|
||||
class MyWorkstation(WorkstationBase):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self._workflow_sequence = []
|
||||
|
||||
@property
|
||||
def workflow_sequence(self) -> str:
|
||||
"""返回 JSON 字符串,ROS 自动发布"""
|
||||
import json
|
||||
return json.dumps(self._workflow_sequence)
|
||||
|
||||
async def append_to_workflow_sequence(self, workflow_name: str) -> Dict[str, Any]:
|
||||
"""添加工作流到队列"""
|
||||
self._workflow_sequence.append({
|
||||
"name": workflow_name,
|
||||
"status": "pending",
|
||||
"created_at": time.time(),
|
||||
})
|
||||
return {"success": True}
|
||||
|
||||
async def clear_workflows(self) -> Dict[str, Any]:
|
||||
"""清空工作流队列"""
|
||||
self._workflow_sequence = []
|
||||
return {"success": True}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. 站间物料转移
|
||||
|
||||
工作站之间转移物料的模式。通过 ROS ActionClient 调用目标站的动作。
|
||||
|
||||
```python
|
||||
async def transfer_materials_to_another_station(
|
||||
self,
|
||||
target_device_id: str,
|
||||
transfer_groups: list,
|
||||
**kwargs,
|
||||
) -> Dict[str, Any]:
|
||||
"""将物料转移到另一个工作站"""
|
||||
target_node = self._children.get(target_device_id)
|
||||
if not target_node:
|
||||
# 通过 ROS 节点查找非子设备的目标站
|
||||
pass
|
||||
|
||||
for group in transfer_groups:
|
||||
resource = self.find_resource_by_name(group["resource_name"])
|
||||
# 从本站 deck 移除
|
||||
resource.unassign()
|
||||
# 调用目标站的接收方法
|
||||
# ...
|
||||
|
||||
return {"success": True, "transferred": len(transfer_groups)}
|
||||
```
|
||||
|
||||
参考:`BioyondDispensingStation.transfer_materials_to_reaction_station`
|
||||
|
||||
---
|
||||
|
||||
## 6. post_init 完整模式
|
||||
|
||||
`post_init` 是工作站初始化的关键阶段,此时 ROS 节点和子设备已就绪。
|
||||
|
||||
```python
|
||||
def post_init(self, ros_node):
|
||||
super().post_init(ros_node)
|
||||
|
||||
# 1. 初始化外部系统客户端(此时 config 已可用)
|
||||
self.rpc_client = MySystemRPC(
|
||||
host=self.config.get("api_host"),
|
||||
api_key=self.config.get("api_key"),
|
||||
)
|
||||
self.hardware_interface = self.rpc_client
|
||||
|
||||
# 2. 启动连接监控
|
||||
self.connection_monitor = ConnectionMonitor(self)
|
||||
self.connection_monitor.start()
|
||||
|
||||
# 3. 启动 HTTP 回调服务
|
||||
if hasattr(self, '_http_service_config'):
|
||||
self.http_service = WorkstationHTTPService(
|
||||
workstation_instance=self,
|
||||
host=self._http_service_config["host"],
|
||||
port=self._http_service_config["port"],
|
||||
)
|
||||
self.http_service.start()
|
||||
|
||||
# 4. 上传 deck 到云端
|
||||
ROS2DeviceNode.run_async_func(
|
||||
self._ros_node.update_resource, True,
|
||||
**{"resources": [self.deck]}
|
||||
)
|
||||
|
||||
# 5. 初始化资源同步器(可选)
|
||||
self.resource_synchronizer = MyResourceSynchronizer(self, self.rpc_client)
|
||||
```
|
||||
233
.cursor/skills/batch-insert-reagent/SKILL.md
Normal file
@@ -0,0 +1,233 @@
|
||||
---
|
||||
name: batch-insert-reagent
|
||||
description: Batch insert reagents into Uni-Lab platform — add chemicals with CAS, SMILES, supplier info. Use when the user wants to add reagents, insert chemicals, batch register reagents, or mentions 录入试剂/添加试剂/试剂入库/reagent.
|
||||
---
|
||||
|
||||
# 批量录入试剂 Skill
|
||||
|
||||
通过云端 API 批量录入试剂信息,支持逐条或批量操作。
|
||||
|
||||
## 前置条件(缺一不可)
|
||||
|
||||
使用本 skill 前,**必须**先确认以下信息。如果缺少任何一项,**立即向用户询问并终止**,等补齐后再继续。
|
||||
|
||||
### 1. ak / sk → AUTH
|
||||
|
||||
询问用户的启动参数,从 `--ak` `--sk` 或 config.py 中获取。
|
||||
|
||||
生成 AUTH token(任选一种方式):
|
||||
|
||||
```bash
|
||||
# 方式一:Python 一行生成
|
||||
python -c "import base64,sys; print('Authorization: Lab ' + base64.b64encode(f'{sys.argv[1]}:{sys.argv[2]}'.encode()).decode())" <ak> <sk>
|
||||
|
||||
# 方式二:手动计算
|
||||
# base64(ak:sk) → Authorization: Lab <token>
|
||||
```
|
||||
|
||||
### 2. --addr → BASE URL
|
||||
|
||||
| `--addr` 值 | BASE |
|
||||
|-------------|------|
|
||||
| `test` | `https://uni-lab.test.bohrium.com` |
|
||||
| `uat` | `https://uni-lab.uat.bohrium.com` |
|
||||
| `local` | `http://127.0.0.1:48197` |
|
||||
| 不传(默认) | `https://uni-lab.bohrium.com` |
|
||||
|
||||
确认后设置:
|
||||
```bash
|
||||
BASE="<根据 addr 确定的 URL>"
|
||||
AUTH="Authorization: Lab <gen_auth.py 输出的 token>"
|
||||
```
|
||||
|
||||
**两项全部就绪后才可发起 API 请求。**
|
||||
|
||||
## Session State
|
||||
|
||||
- `lab_uuid` — 实验室 UUID(首次通过 API #1 自动获取,**不需要问用户**)
|
||||
|
||||
## 请求约定
|
||||
|
||||
所有请求使用 `curl -s`,POST 需加 `Content-Type: application/json`。
|
||||
|
||||
> **Windows 平台**必须使用 `curl.exe`(而非 PowerShell 的 `curl` 别名),示例中的 `curl` 均指 `curl.exe`。
|
||||
|
||||
---
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### 1. 获取实验室信息(自动获取 lab_uuid)
|
||||
|
||||
```bash
|
||||
curl -s -X GET "$BASE/api/v1/edge/lab/info" -H "$AUTH"
|
||||
```
|
||||
|
||||
返回:
|
||||
|
||||
```json
|
||||
{"code": 0, "data": {"uuid": "xxx", "name": "实验室名称"}}
|
||||
```
|
||||
|
||||
记住 `data.uuid` 为 `lab_uuid`。
|
||||
|
||||
### 2. 录入试剂
|
||||
|
||||
```bash
|
||||
curl -s -X POST "$BASE/api/v1/lab/reagent" \
|
||||
-H "$AUTH" -H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"lab_uuid": "<lab_uuid>",
|
||||
"cas": "<CAS号>",
|
||||
"name": "<试剂名称>",
|
||||
"molecular_formula": "<分子式>",
|
||||
"smiles": "<SMILES>",
|
||||
"stock_in_quantity": <入库数量>,
|
||||
"unit": "<单位字符串>",
|
||||
"supplier": "<供应商>",
|
||||
"production_date": "<生产日期 ISO 8601>",
|
||||
"expiry_date": "<过期日期 ISO 8601>"
|
||||
}'
|
||||
```
|
||||
|
||||
返回成功时包含试剂 UUID:
|
||||
```json
|
||||
{"code": 0, "data": {"uuid": "xxx", ...}}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 试剂字段说明
|
||||
|
||||
| 字段 | 类型 | 必填 | 说明 | 示例 |
|
||||
|------|------|------|------|------|
|
||||
| `lab_uuid` | string | 是 | 实验室 UUID(从 API #1 获取) | `"8511c672-..."` |
|
||||
| `cas` | string | 是 | CAS 注册号 | `"7732-18-3"` |
|
||||
| `name` | string | 是 | 试剂中文/英文名称 | `"水"` |
|
||||
| `molecular_formula` | string | 是 | 分子式 | `"H2O"` |
|
||||
| `smiles` | string | 是 | SMILES 表示 | `"O"` |
|
||||
| `stock_in_quantity` | number | 是 | 入库数量 | `10` |
|
||||
| `unit` | string | 是 | 单位(字符串,见下表) | `"mL"` |
|
||||
| `supplier` | string | 否 | 供应商名称 | `"国药集团"` |
|
||||
| `production_date` | string | 否 | 生产日期(ISO 8601) | `"2025-11-18T00:00:00Z"` |
|
||||
| `expiry_date` | string | 否 | 过期日期(ISO 8601) | `"2026-11-18T00:00:00Z"` |
|
||||
|
||||
### unit 单位值
|
||||
|
||||
| 值 | 单位 |
|
||||
|------|------|
|
||||
| `"mL"` | 毫升 |
|
||||
| `"L"` | 升 |
|
||||
| `"g"` | 克 |
|
||||
| `"kg"` | 千克 |
|
||||
| `"瓶"` | 瓶 |
|
||||
|
||||
> 根据试剂状态选择:液体用 `"mL"` / `"L"`,固体用 `"g"` / `"kg"`。
|
||||
|
||||
---
|
||||
|
||||
## 批量录入策略
|
||||
|
||||
### 方式一:用户提供 JSON 数组
|
||||
|
||||
用户一次性给出多条试剂数据:
|
||||
|
||||
```json
|
||||
[
|
||||
{"cas": "7732-18-3", "name": "水", "molecular_formula": "H2O", "smiles": "O", "stock_in_quantity": 10, "unit": "mL"},
|
||||
{"cas": "64-17-5", "name": "乙醇", "molecular_formula": "C2H6O", "smiles": "CCO", "stock_in_quantity": 5, "unit": "L"}
|
||||
]
|
||||
```
|
||||
|
||||
Agent 自动为每条补充 `lab_uuid`、`production_date`、`expiry_date` 等字段后逐条提交。
|
||||
|
||||
Agent 循环调用 API #2 逐条录入,每条记录一次 API 调用。
|
||||
|
||||
### 方式二:用户逐个描述
|
||||
|
||||
用户口头描述试剂(如「帮我录入 500mL 的无水乙醇,Sigma 的」),agent 自行补全字段:
|
||||
|
||||
1. 根据名称查找 CAS 号、分子式、SMILES(参考下方速查表或自行推断)
|
||||
2. 构建完整的请求体
|
||||
3. 向用户确认后提交
|
||||
|
||||
### 方式三:从 CSV/表格批量导入
|
||||
|
||||
用户提供 CSV 或表格文件路径,agent 读取并解析:
|
||||
|
||||
```bash
|
||||
# 期望的 CSV 格式(首行为表头)
|
||||
cas,name,molecular_formula,smiles,stock_in_quantity,unit,supplier,production_date,expiry_date
|
||||
7732-18-3,水,H2O,O,10,mL,农夫山泉,2025-11-18T00:00:00Z,2026-11-18T00:00:00Z
|
||||
```
|
||||
|
||||
### 执行与汇报
|
||||
|
||||
每次 API 调用后:
|
||||
1. 检查返回 `code`(0 = 成功)
|
||||
2. 记录成功/失败数量
|
||||
3. 全部完成后汇总:「共录入 N 条试剂,成功 X 条,失败 Y 条」
|
||||
4. 如有失败,列出失败的试剂名称和错误信息
|
||||
|
||||
---
|
||||
|
||||
## 常见试剂速查表
|
||||
|
||||
| 名称 | CAS | 分子式 | SMILES |
|
||||
|------|-----|--------|--------|
|
||||
| 水 | 7732-18-3 | H2O | O |
|
||||
| 乙醇 | 64-17-5 | C2H6O | CCO |
|
||||
| 甲醇 | 67-56-1 | CH4O | CO |
|
||||
| 丙酮 | 67-64-1 | C3H6O | CC(C)=O |
|
||||
| 二甲基亚砜(DMSO) | 67-68-5 | C2H6OS | CS(C)=O |
|
||||
| 乙酸乙酯 | 141-78-6 | C4H8O2 | CCOC(C)=O |
|
||||
| 二氯甲烷 | 75-09-2 | CH2Cl2 | ClCCl |
|
||||
| 四氢呋喃(THF) | 109-99-9 | C4H8O | C1CCOC1 |
|
||||
| N,N-二甲基甲酰胺(DMF) | 68-12-2 | C3H7NO | CN(C)C=O |
|
||||
| 氯仿 | 67-66-3 | CHCl3 | ClC(Cl)Cl |
|
||||
| 乙腈 | 75-05-8 | C2H3N | CC#N |
|
||||
| 甲苯 | 108-88-3 | C7H8 | Cc1ccccc1 |
|
||||
| 正己烷 | 110-54-3 | C6H14 | CCCCCC |
|
||||
| 异丙醇 | 67-63-0 | C3H8O | CC(C)O |
|
||||
| 盐酸 | 7647-01-0 | HCl | Cl |
|
||||
| 硫酸 | 7664-93-9 | H2SO4 | OS(O)(=O)=O |
|
||||
| 氢氧化钠 | 1310-73-2 | NaOH | [Na]O |
|
||||
| 碳酸钠 | 497-19-8 | Na2CO3 | [Na]OC([O-])=O.[Na+] |
|
||||
| 氯化钠 | 7647-14-5 | NaCl | [Na]Cl |
|
||||
| 乙二胺四乙酸(EDTA) | 60-00-4 | C10H16N2O8 | OC(=O)CN(CCN(CC(O)=O)CC(O)=O)CC(O)=O |
|
||||
|
||||
> 此表仅供快速参考。对于不在表中的试剂,agent 应根据化学知识推断或提示用户补充。
|
||||
|
||||
---
|
||||
|
||||
## 完整工作流 Checklist
|
||||
|
||||
```
|
||||
Task Progress:
|
||||
- [ ] Step 1: 确认 ak/sk → 生成 AUTH token
|
||||
- [ ] Step 2: 确认 --addr → 设置 BASE URL
|
||||
- [ ] Step 3: GET /edge/lab/info → 获取 lab_uuid
|
||||
- [ ] Step 4: 收集试剂信息(用户提供列表/逐个描述/CSV文件)
|
||||
- [ ] Step 5: 补全缺失字段(CAS、分子式、SMILES 等)
|
||||
- [ ] Step 6: 向用户确认待录入的试剂列表
|
||||
- [ ] Step 7: 循环调用 POST /lab/reagent 逐条录入(每条需含 lab_uuid)
|
||||
- [ ] Step 8: 汇总结果(成功/失败数量及详情)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 完整示例
|
||||
|
||||
用户说:「帮我录入 3 种试剂:500mL 无水乙醇、1kg 氯化钠、2L 去离子水」
|
||||
|
||||
Agent 构建的请求序列:
|
||||
|
||||
```json
|
||||
// 第 1 条
|
||||
{"lab_uuid": "8511c672-...", "cas": "64-17-5", "name": "无水乙醇", "molecular_formula": "C2H6O", "smiles": "CCO", "stock_in_quantity": 500, "unit": "mL", "supplier": "国药集团", "production_date": "2025-01-01T00:00:00Z", "expiry_date": "2026-01-01T00:00:00Z"}
|
||||
|
||||
// 第 2 条
|
||||
{"lab_uuid": "8511c672-...", "cas": "7647-14-5", "name": "氯化钠", "molecular_formula": "NaCl", "smiles": "[Na]Cl", "stock_in_quantity": 1, "unit": "kg", "supplier": "", "production_date": "2025-01-01T00:00:00Z", "expiry_date": "2026-01-01T00:00:00Z"}
|
||||
|
||||
// 第 3 条
|
||||
{"lab_uuid": "8511c672-...", "cas": "7732-18-3", "name": "去离子水", "molecular_formula": "H2O", "smiles": "O", "stock_in_quantity": 2, "unit": "L", "supplier": "", "production_date": "2025-01-01T00:00:00Z", "expiry_date": "2026-01-01T00:00:00Z"}
|
||||
```
|
||||
301
.cursor/skills/batch-submit-experiment/SKILL.md
Normal file
@@ -0,0 +1,301 @@
|
||||
---
|
||||
name: batch-submit-experiment
|
||||
description: Batch submit experiments (notebooks) to Uni-Lab platform — list workflows, generate node_params from registry schemas, submit multiple rounds. Use when the user wants to submit experiments, create notebooks, batch run workflows, or mentions 提交实验/批量实验/notebook/实验轮次.
|
||||
---
|
||||
|
||||
# 批量提交实验指南
|
||||
|
||||
通过云端 API 批量提交实验(notebook),支持多轮实验参数配置。根据 workflow 模板详情和本地设备注册表自动生成 `node_params` 模板。
|
||||
|
||||
## 前置条件(缺一不可)
|
||||
|
||||
使用本指南前,**必须**先确认以下信息。如果缺少任何一项,**立即向用户询问并终止**,等补齐后再继续。
|
||||
|
||||
### 1. ak / sk → AUTH
|
||||
|
||||
询问用户的启动参数,从 `--ak` `--sk` 或 config.py 中获取。
|
||||
|
||||
生成 AUTH token(任选一种方式):
|
||||
|
||||
```bash
|
||||
# 方式一:Python 一行生成
|
||||
python -c "import base64,sys; print('Authorization: Lab ' + base64.b64encode(f'{sys.argv[1]}:{sys.argv[2]}'.encode()).decode())" <ak> <sk>
|
||||
|
||||
# 方式二:手动计算
|
||||
# base64(ak:sk) → Authorization: Lab <token>
|
||||
```
|
||||
|
||||
### 2. --addr → BASE URL
|
||||
|
||||
| `--addr` 值 | BASE |
|
||||
|-------------|------|
|
||||
| `test` | `https://uni-lab.test.bohrium.com` |
|
||||
| `uat` | `https://uni-lab.uat.bohrium.com` |
|
||||
| `local` | `http://127.0.0.1:48197` |
|
||||
| 不传(默认) | `https://uni-lab.bohrium.com` |
|
||||
|
||||
确认后设置:
|
||||
```bash
|
||||
BASE="<根据 addr 确定的 URL>"
|
||||
AUTH="Authorization: Lab <上面命令输出的 token>"
|
||||
```
|
||||
|
||||
### 3. req_device_registry_upload.json(设备注册表)
|
||||
|
||||
**批量提交实验时需要本地注册表来解析 workflow 节点的参数 schema。**
|
||||
|
||||
按优先级搜索:
|
||||
|
||||
```
|
||||
<workspace 根目录>/unilabos_data/req_device_registry_upload.json
|
||||
<workspace 根目录>/req_device_registry_upload.json
|
||||
```
|
||||
|
||||
也可直接 Glob 搜索:`**/req_device_registry_upload.json`
|
||||
|
||||
找到后**检查文件修改时间**并告知用户。超过 1 天提醒用户是否需要重新启动 `unilab`。
|
||||
|
||||
**如果文件不存在** → 告知用户先运行 `unilab` 启动命令,等注册表生成后再执行。可跳过此步,但将无法自动生成参数模板,需要用户手动填写 `param`。
|
||||
|
||||
### 4. workflow_uuid(目标工作流)
|
||||
|
||||
用户需要提供要提交的 workflow UUID。如果用户不确定,通过 API #2 列出可用 workflow 供选择。
|
||||
|
||||
**四项全部就绪后才可开始。**
|
||||
|
||||
## Session State
|
||||
|
||||
在整个对话过程中,agent 需要记住以下状态,避免重复询问用户:
|
||||
|
||||
- `lab_uuid` — 实验室 UUID(首次通过 API #1 自动获取,**不需要问用户**)
|
||||
- `workflow_uuid` — 工作流 UUID(用户提供或从列表选择)
|
||||
- `workflow_nodes` — workflow 中各 action 节点的 uuid、设备 ID、动作名(从 API #3 获取)
|
||||
|
||||
## 请求约定
|
||||
|
||||
所有请求使用 `curl -s`,POST 需加 `Content-Type: application/json`。
|
||||
|
||||
> **Windows 平台**必须使用 `curl.exe`(而非 PowerShell 的 `curl` 别名),示例中的 `curl` 均指 `curl.exe`。
|
||||
>
|
||||
> **PowerShell JSON 传参**:PowerShell 中 `-d '{"key":"value"}'` 会因引号转义失败。请将 JSON 写入临时文件,用 `-d '@tmp_body.json'`(单引号包裹 `@`,否则会被解析为 splatting 运算符)。
|
||||
|
||||
---
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### 1. 获取实验室信息(自动获取 lab_uuid)
|
||||
|
||||
```bash
|
||||
curl -s -X GET "$BASE/api/v1/edge/lab/info" -H "$AUTH"
|
||||
```
|
||||
|
||||
返回:
|
||||
|
||||
```json
|
||||
{"code": 0, "data": {"uuid": "xxx", "name": "实验室名称"}}
|
||||
```
|
||||
|
||||
记住 `data.uuid` 为 `lab_uuid`。
|
||||
|
||||
### 2. 列出可用 workflow
|
||||
|
||||
```bash
|
||||
curl -s -X GET "$BASE/api/v1/lab/workflow/workflows?page=1&page_size=20&lab_uuid=$lab_uuid" -H "$AUTH"
|
||||
```
|
||||
|
||||
返回 workflow 列表,展示给用户选择。列出每个 workflow 的 `uuid` 和 `name`。
|
||||
|
||||
### 3. 获取 workflow 模板详情
|
||||
|
||||
```bash
|
||||
curl -s -X GET "$BASE/api/v1/lab/workflow/template/detail/$workflow_uuid" -H "$AUTH"
|
||||
```
|
||||
|
||||
返回 workflow 的完整结构,包含所有 action 节点信息。需要从响应中提取:
|
||||
- 每个 action 节点的 `node_uuid`
|
||||
- 每个节点对应的设备 ID(`resource_template_name`)
|
||||
- 每个节点的动作名(`node_template_name`)
|
||||
- 每个节点的现有参数(`param`)
|
||||
|
||||
> **注意**:此 API 返回格式可能因版本不同而有差异。首次调用时,先打印完整响应分析结构,再提取节点信息。常见的节点字段路径为 `data.nodes[]` 或 `data.workflow_nodes[]`。
|
||||
|
||||
### 4. 提交实验(创建 notebook)
|
||||
|
||||
```bash
|
||||
curl -s -X POST "$BASE/api/v1/lab/notebook" \
|
||||
-H "$AUTH" -H "Content-Type: application/json" \
|
||||
-d '<request_body>'
|
||||
```
|
||||
|
||||
请求体结构:
|
||||
|
||||
```json
|
||||
{
|
||||
"lab_uuid": "<lab_uuid>",
|
||||
"workflow_uuid": "<workflow_uuid>",
|
||||
"name": "<实验名称>",
|
||||
"node_params": [
|
||||
{
|
||||
"sample_uuids": ["<样品UUID1>", "<样品UUID2>"],
|
||||
"datas": [
|
||||
{
|
||||
"node_uuid": "<workflow中的节点UUID>",
|
||||
"param": {},
|
||||
"sample_params": [
|
||||
{
|
||||
"container_uuid": "<容器UUID>",
|
||||
"sample_value": {
|
||||
"liquid_names": "<液体名称>",
|
||||
"volumes": 1000
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
> **注意**:`sample_uuids` 必须是 **UUID 数组**(`[]uuid.UUID`),不是字符串。无样品时传空数组 `[]`。
|
||||
|
||||
---
|
||||
|
||||
## Notebook 请求体详解
|
||||
|
||||
### node_params 结构
|
||||
|
||||
`node_params` 是一个数组,**每个元素代表一轮实验**:
|
||||
|
||||
- 要跑 2 轮 → `node_params` 有 2 个元素
|
||||
- 要跑 N 轮 → `node_params` 有 N 个元素
|
||||
|
||||
### 每轮的字段
|
||||
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| `sample_uuids` | array\<uuid\> | 该轮实验的样品 UUID 数组,无样品时传 `[]` |
|
||||
| `datas` | array | 该轮中每个 workflow 节点的参数配置 |
|
||||
|
||||
### datas 中每个节点
|
||||
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| `node_uuid` | string | workflow 模板中的节点 UUID(从 API #3 获取) |
|
||||
| `param` | object | 动作参数(根据本地注册表 schema 填写) |
|
||||
| `sample_params` | array | 样品相关参数(液体名、体积等) |
|
||||
|
||||
### sample_params 中每条
|
||||
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| `container_uuid` | string | 容器 UUID |
|
||||
| `sample_value` | object | 样品值,如 `{"liquid_names": "水", "volumes": 1000}` |
|
||||
|
||||
---
|
||||
|
||||
## 从本地注册表生成 param 模板
|
||||
|
||||
### 自动方式 — 运行脚本
|
||||
|
||||
```bash
|
||||
python scripts/gen_notebook_params.py \
|
||||
--auth <token> \
|
||||
--base <BASE_URL> \
|
||||
--workflow-uuid <workflow_uuid> \
|
||||
[--registry <path/to/req_device_registry_upload.json>] \
|
||||
[--rounds <轮次数>] \
|
||||
[--output <输出文件路径>]
|
||||
```
|
||||
|
||||
> 脚本位于本文档同级目录下的 `scripts/gen_notebook_params.py`。
|
||||
|
||||
脚本会:
|
||||
1. 调用 workflow detail API 获取所有 action 节点
|
||||
2. 读取本地注册表,为每个节点查找对应的 action schema
|
||||
3. 生成 `notebook_template.json`,包含:
|
||||
- 完整 `node_params` 骨架
|
||||
- 每个节点的 param 字段及类型说明
|
||||
- `_schema_info` 辅助信息(不提交,仅供参考)
|
||||
|
||||
### 手动方式
|
||||
|
||||
如果脚本不可用或注册表不存在:
|
||||
|
||||
1. 调用 API #3 获取 workflow 详情
|
||||
2. 找到每个 action 节点的 `node_uuid`
|
||||
3. 在本地注册表中查找对应设备的 `action_value_mappings`:
|
||||
```
|
||||
resources[].id == <device_id>
|
||||
→ resources[].class.action_value_mappings.<action_name>.schema.properties.goal.properties
|
||||
```
|
||||
4. 将 schema 中的 properties 作为 `param` 的字段模板
|
||||
5. 按轮次复制 `node_params` 元素,让用户填写每轮的具体值
|
||||
|
||||
### 注册表结构参考
|
||||
|
||||
```json
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"id": "liquid_handler.prcxi",
|
||||
"class": {
|
||||
"module": "unilabos.devices.xxx:ClassName",
|
||||
"action_value_mappings": {
|
||||
"transfer_liquid": {
|
||||
"type": "LiquidHandlerTransfer",
|
||||
"schema": {
|
||||
"properties": {
|
||||
"goal": {
|
||||
"properties": {
|
||||
"asp_vols": {"type": "array", "items": {"type": "number"}},
|
||||
"sources": {"type": "array"}
|
||||
},
|
||||
"required": ["asp_vols", "sources"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"goal_default": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
`param` 填写时,使用 `goal.properties` 中的字段名和类型。
|
||||
|
||||
---
|
||||
|
||||
## 完整工作流 Checklist
|
||||
|
||||
```
|
||||
Task Progress:
|
||||
- [ ] Step 1: 确认 ak/sk → 生成 AUTH token
|
||||
- [ ] Step 2: 确认 --addr → 设置 BASE URL
|
||||
- [ ] Step 3: GET /edge/lab/info → 获取 lab_uuid
|
||||
- [ ] Step 4: 确认 workflow_uuid(用户提供或从 GET #2 列表选择)
|
||||
- [ ] Step 5: GET workflow detail (#3) → 提取各节点 uuid、设备ID、动作名
|
||||
- [ ] Step 6: 定位本地注册表 req_device_registry_upload.json
|
||||
- [ ] Step 7: 运行 gen_notebook_params.py 或手动匹配 → 生成 node_params 模板
|
||||
- [ ] Step 8: 引导用户填写每轮的参数(sample_uuids、param、sample_params)
|
||||
- [ ] Step 9: 构建完整请求体 → POST /lab/notebook 提交
|
||||
- [ ] Step 10: 检查返回结果,确认提交成功
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 常见问题
|
||||
|
||||
### Q: workflow 中有多个节点,每轮都要填所有节点的参数吗?
|
||||
|
||||
是的。`datas` 数组中需要包含该轮实验涉及的每个 workflow 节点的参数。通常每个 action 节点都需要一条 `datas` 记录。
|
||||
|
||||
### Q: 多轮实验的参数完全不同吗?
|
||||
|
||||
通常每轮的 `param`(设备动作参数)可能相同或相似,但 `sample_uuids` 和 `sample_params`(样品信息)每轮不同。脚本生成模板时会按轮次复制骨架,用户只需修改差异部分。
|
||||
|
||||
### Q: 如何获取 sample_uuids 和 container_uuid?
|
||||
|
||||
这些 UUID 通常来自实验室的样品管理系统。向用户询问,或从资源树(API `GET /lab/material/download/$lab_uuid`)中查找。
|
||||
@@ -0,0 +1,394 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
从 workflow 模板详情 + 本地设备注册表生成 notebook 提交用的 node_params 模板。
|
||||
|
||||
用法:
|
||||
python gen_notebook_params.py --auth <token> --base <url> --workflow-uuid <uuid> [选项]
|
||||
|
||||
选项:
|
||||
--auth <token> Lab token(base64(ak:sk) 的结果,不含 "Lab " 前缀)
|
||||
--base <url> API 基础 URL(如 https://uni-lab.test.bohrium.com)
|
||||
--workflow-uuid <uuid> 目标 workflow 的 UUID
|
||||
--registry <path> 本地注册表文件路径(默认自动搜索)
|
||||
--rounds <n> 实验轮次数(默认 1)
|
||||
--output <path> 输出模板文件路径(默认 notebook_template.json)
|
||||
--dump-response 打印 workflow detail API 的原始响应(调试用)
|
||||
|
||||
示例:
|
||||
python gen_notebook_params.py \\
|
||||
--auth YTFmZDlkNGUtxxxx \\
|
||||
--base https://uni-lab.test.bohrium.com \\
|
||||
--workflow-uuid abc-123-def \\
|
||||
--rounds 2
|
||||
"""
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
REGISTRY_FILENAME = "req_device_registry_upload.json"
|
||||
|
||||
|
||||
def find_registry(explicit_path=None):
|
||||
"""查找本地注册表文件,逻辑同 extract_device_actions.py"""
|
||||
if explicit_path:
|
||||
if os.path.isfile(explicit_path):
|
||||
return explicit_path
|
||||
if os.path.isdir(explicit_path):
|
||||
fp = os.path.join(explicit_path, REGISTRY_FILENAME)
|
||||
if os.path.isfile(fp):
|
||||
return fp
|
||||
print(f"警告: 指定的注册表路径不存在: {explicit_path}")
|
||||
return None
|
||||
|
||||
candidates = [
|
||||
os.path.join("unilabos_data", REGISTRY_FILENAME),
|
||||
REGISTRY_FILENAME,
|
||||
]
|
||||
for c in candidates:
|
||||
if os.path.isfile(c):
|
||||
return c
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
workspace_root = os.path.normpath(os.path.join(script_dir, "..", "..", ".."))
|
||||
for c in candidates:
|
||||
path = os.path.join(workspace_root, c)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
|
||||
cwd = os.getcwd()
|
||||
for _ in range(5):
|
||||
parent = os.path.dirname(cwd)
|
||||
if parent == cwd:
|
||||
break
|
||||
cwd = parent
|
||||
for c in candidates:
|
||||
path = os.path.join(cwd, c)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
def load_registry(path):
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def build_registry_index(registry_data):
|
||||
"""构建 device_id → action_value_mappings 的索引"""
|
||||
index = {}
|
||||
for res in registry_data.get("resources", []):
|
||||
rid = res.get("id", "")
|
||||
avm = res.get("class", {}).get("action_value_mappings", {})
|
||||
if rid and avm:
|
||||
index[rid] = avm
|
||||
return index
|
||||
|
||||
|
||||
def flatten_goal_schema(action_data):
|
||||
"""从 action_value_mappings 条目中提取 goal 层的 schema"""
|
||||
schema = action_data.get("schema", {})
|
||||
goal_schema = schema.get("properties", {}).get("goal", {})
|
||||
return goal_schema if goal_schema else schema
|
||||
|
||||
|
||||
def build_param_template(goal_schema):
|
||||
"""根据 goal schema 生成 param 模板,含类型标注"""
|
||||
properties = goal_schema.get("properties", {})
|
||||
required = set(goal_schema.get("required", []))
|
||||
template = {}
|
||||
for field_name, field_def in properties.items():
|
||||
if field_name == "unilabos_device_id":
|
||||
continue
|
||||
ftype = field_def.get("type", "any")
|
||||
default = field_def.get("default")
|
||||
if default is not None:
|
||||
template[field_name] = default
|
||||
elif ftype == "string":
|
||||
template[field_name] = f"$TODO ({ftype}, {'required' if field_name in required else 'optional'})"
|
||||
elif ftype == "number" or ftype == "integer":
|
||||
template[field_name] = 0
|
||||
elif ftype == "boolean":
|
||||
template[field_name] = False
|
||||
elif ftype == "array":
|
||||
template[field_name] = []
|
||||
elif ftype == "object":
|
||||
template[field_name] = {}
|
||||
else:
|
||||
template[field_name] = f"$TODO ({ftype})"
|
||||
return template
|
||||
|
||||
|
||||
def fetch_workflow_detail(base_url, auth_token, workflow_uuid):
|
||||
"""调用 workflow detail API"""
|
||||
url = f"{base_url}/api/v1/lab/workflow/template/detail/{workflow_uuid}"
|
||||
req = Request(url, method="GET")
|
||||
req.add_header("Authorization", f"Lab {auth_token}")
|
||||
try:
|
||||
with urlopen(req, timeout=30) as resp:
|
||||
return json.loads(resp.read().decode("utf-8"))
|
||||
except HTTPError as e:
|
||||
body = e.read().decode("utf-8", errors="replace")
|
||||
print(f"API 错误 {e.code}: {body}")
|
||||
return None
|
||||
except URLError as e:
|
||||
print(f"网络错误: {e.reason}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_nodes_from_response(response):
|
||||
"""
|
||||
从 workflow detail 响应中提取 action 节点列表。
|
||||
适配多种可能的响应格式。
|
||||
|
||||
返回: [(node_uuid, resource_template_name, node_template_name, existing_param), ...]
|
||||
"""
|
||||
data = response.get("data", response)
|
||||
|
||||
search_keys = ["nodes", "workflow_nodes", "node_list", "steps"]
|
||||
nodes_raw = None
|
||||
for key in search_keys:
|
||||
if key in data and isinstance(data[key], list):
|
||||
nodes_raw = data[key]
|
||||
break
|
||||
|
||||
if nodes_raw is None:
|
||||
if isinstance(data, list):
|
||||
nodes_raw = data
|
||||
else:
|
||||
for v in data.values():
|
||||
if isinstance(v, list) and len(v) > 0 and isinstance(v[0], dict):
|
||||
nodes_raw = v
|
||||
break
|
||||
|
||||
if not nodes_raw:
|
||||
print("警告: 未能从响应中提取节点列表")
|
||||
print("响应顶层 keys:", list(data.keys()) if isinstance(data, dict) else type(data).__name__)
|
||||
return []
|
||||
|
||||
result = []
|
||||
for node in nodes_raw:
|
||||
if not isinstance(node, dict):
|
||||
continue
|
||||
|
||||
node_uuid = (
|
||||
node.get("uuid")
|
||||
or node.get("node_uuid")
|
||||
or node.get("id")
|
||||
or ""
|
||||
)
|
||||
resource_name = (
|
||||
node.get("resource_template_name")
|
||||
or node.get("device_id")
|
||||
or node.get("resource_name")
|
||||
or node.get("device_name")
|
||||
or ""
|
||||
)
|
||||
template_name = (
|
||||
node.get("node_template_name")
|
||||
or node.get("action_name")
|
||||
or node.get("template_name")
|
||||
or node.get("action")
|
||||
or node.get("name")
|
||||
or ""
|
||||
)
|
||||
existing_param = node.get("param", {}) or {}
|
||||
|
||||
if node_uuid:
|
||||
result.append((node_uuid, resource_name, template_name, existing_param))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def generate_template(nodes, registry_index, rounds):
|
||||
"""生成 notebook 提交模板"""
|
||||
node_params = []
|
||||
schema_info = {}
|
||||
|
||||
datas_template = []
|
||||
for node_uuid, resource_name, template_name, existing_param in nodes:
|
||||
param_template = {}
|
||||
matched = False
|
||||
|
||||
if resource_name and template_name and resource_name in registry_index:
|
||||
avm = registry_index[resource_name]
|
||||
if template_name in avm:
|
||||
goal_schema = flatten_goal_schema(avm[template_name])
|
||||
param_template = build_param_template(goal_schema)
|
||||
goal_default = avm[template_name].get("goal_default", {})
|
||||
if goal_default:
|
||||
for k, v in goal_default.items():
|
||||
if k in param_template and v is not None:
|
||||
param_template[k] = v
|
||||
matched = True
|
||||
|
||||
schema_info[node_uuid] = {
|
||||
"device_id": resource_name,
|
||||
"action_name": template_name,
|
||||
"action_type": avm[template_name].get("type", ""),
|
||||
"schema_properties": list(goal_schema.get("properties", {}).keys()),
|
||||
"required": goal_schema.get("required", []),
|
||||
}
|
||||
|
||||
if not matched and existing_param:
|
||||
param_template = existing_param
|
||||
|
||||
if not matched and not existing_param:
|
||||
schema_info[node_uuid] = {
|
||||
"device_id": resource_name,
|
||||
"action_name": template_name,
|
||||
"warning": "未在本地注册表中找到匹配的 action schema",
|
||||
}
|
||||
|
||||
datas_template.append({
|
||||
"node_uuid": node_uuid,
|
||||
"param": param_template,
|
||||
"sample_params": [
|
||||
{
|
||||
"container_uuid": "$TODO_CONTAINER_UUID",
|
||||
"sample_value": {
|
||||
"liquid_names": "$TODO_LIQUID_NAME",
|
||||
"volumes": 0,
|
||||
},
|
||||
}
|
||||
],
|
||||
})
|
||||
|
||||
for i in range(rounds):
|
||||
node_params.append({
|
||||
"sample_uuids": f"$TODO_SAMPLE_UUID_ROUND_{i + 1}",
|
||||
"datas": copy.deepcopy(datas_template),
|
||||
})
|
||||
|
||||
return {
|
||||
"lab_uuid": "$TODO_LAB_UUID",
|
||||
"workflow_uuid": "$TODO_WORKFLOW_UUID",
|
||||
"name": "$TODO_EXPERIMENT_NAME",
|
||||
"node_params": node_params,
|
||||
"_schema_info(仅参考,提交时删除)": schema_info,
|
||||
}
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
"""简单的参数解析"""
|
||||
opts = {
|
||||
"auth": None,
|
||||
"base": None,
|
||||
"workflow_uuid": None,
|
||||
"registry": None,
|
||||
"rounds": 1,
|
||||
"output": "notebook_template.json",
|
||||
"dump_response": False,
|
||||
}
|
||||
i = 0
|
||||
while i < len(argv):
|
||||
arg = argv[i]
|
||||
if arg == "--auth" and i + 1 < len(argv):
|
||||
opts["auth"] = argv[i + 1]
|
||||
i += 2
|
||||
elif arg == "--base" and i + 1 < len(argv):
|
||||
opts["base"] = argv[i + 1].rstrip("/")
|
||||
i += 2
|
||||
elif arg == "--workflow-uuid" and i + 1 < len(argv):
|
||||
opts["workflow_uuid"] = argv[i + 1]
|
||||
i += 2
|
||||
elif arg == "--registry" and i + 1 < len(argv):
|
||||
opts["registry"] = argv[i + 1]
|
||||
i += 2
|
||||
elif arg == "--rounds" and i + 1 < len(argv):
|
||||
opts["rounds"] = int(argv[i + 1])
|
||||
i += 2
|
||||
elif arg == "--output" and i + 1 < len(argv):
|
||||
opts["output"] = argv[i + 1]
|
||||
i += 2
|
||||
elif arg == "--dump-response":
|
||||
opts["dump_response"] = True
|
||||
i += 1
|
||||
else:
|
||||
print(f"未知参数: {arg}")
|
||||
i += 1
|
||||
return opts
|
||||
|
||||
|
||||
def main():
|
||||
opts = parse_args(sys.argv[1:])
|
||||
|
||||
if not opts["auth"] or not opts["base"] or not opts["workflow_uuid"]:
|
||||
print("用法:")
|
||||
print(" python gen_notebook_params.py --auth <token> --base <url> --workflow-uuid <uuid> [选项]")
|
||||
print()
|
||||
print("必需参数:")
|
||||
print(" --auth <token> Lab token(base64(ak:sk))")
|
||||
print(" --base <url> API 基础 URL")
|
||||
print(" --workflow-uuid <uuid> 目标 workflow UUID")
|
||||
print()
|
||||
print("可选参数:")
|
||||
print(" --registry <path> 注册表文件路径(默认自动搜索)")
|
||||
print(" --rounds <n> 实验轮次数(默认 1)")
|
||||
print(" --output <path> 输出文件路径(默认 notebook_template.json)")
|
||||
print(" --dump-response 打印 API 原始响应")
|
||||
sys.exit(1)
|
||||
|
||||
# 1. 查找并加载本地注册表
|
||||
registry_path = find_registry(opts["registry"])
|
||||
registry_index = {}
|
||||
if registry_path:
|
||||
mtime = os.path.getmtime(registry_path)
|
||||
gen_time = datetime.fromtimestamp(mtime).strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"注册表: {registry_path} (生成时间: {gen_time})")
|
||||
registry_data = load_registry(registry_path)
|
||||
registry_index = build_registry_index(registry_data)
|
||||
print(f"已索引 {len(registry_index)} 个设备的 action schemas")
|
||||
else:
|
||||
print("警告: 未找到本地注册表,将跳过 param 模板生成")
|
||||
print(" 提交时需要手动填写各节点的 param 字段")
|
||||
|
||||
# 2. 获取 workflow 详情
|
||||
print(f"\n正在获取 workflow 详情: {opts['workflow_uuid']}")
|
||||
response = fetch_workflow_detail(opts["base"], opts["auth"], opts["workflow_uuid"])
|
||||
if not response:
|
||||
print("错误: 无法获取 workflow 详情")
|
||||
sys.exit(1)
|
||||
|
||||
if opts["dump_response"]:
|
||||
print("\n=== API 原始响应 ===")
|
||||
print(json.dumps(response, indent=2, ensure_ascii=False)[:5000])
|
||||
print("=== 响应结束(截断至 5000 字符) ===\n")
|
||||
|
||||
# 3. 提取节点
|
||||
nodes = extract_nodes_from_response(response)
|
||||
if not nodes:
|
||||
print("错误: 未能从 workflow 中提取任何 action 节点")
|
||||
print("请使用 --dump-response 查看原始响应结构")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"\n找到 {len(nodes)} 个 action 节点:")
|
||||
print(f" {'节点 UUID':<40} {'设备 ID':<30} {'动作名':<25} {'Schema'}")
|
||||
print(" " + "-" * 110)
|
||||
for node_uuid, resource_name, template_name, _ in nodes:
|
||||
matched = "✓" if (resource_name in registry_index and
|
||||
template_name in registry_index.get(resource_name, {})) else "✗"
|
||||
print(f" {node_uuid:<40} {resource_name:<30} {template_name:<25} {matched}")
|
||||
|
||||
# 4. 生成模板
|
||||
template = generate_template(nodes, registry_index, opts["rounds"])
|
||||
template["workflow_uuid"] = opts["workflow_uuid"]
|
||||
|
||||
output_path = opts["output"]
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
json.dump(template, f, indent=2, ensure_ascii=False)
|
||||
print(f"\n模板已写入: {output_path}")
|
||||
print(f" 轮次数: {opts['rounds']}")
|
||||
print(f" 节点数/轮: {len(nodes)}")
|
||||
print()
|
||||
print("下一步:")
|
||||
print(" 1. 打开模板文件,将 $TODO 占位符替换为实际值")
|
||||
print(" 2. 删除 _schema_info 字段(仅供参考)")
|
||||
print(" 3. 使用 POST /api/v1/lab/notebook 提交")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
328
.cursor/skills/create-device-skill/SKILL.md
Normal file
@@ -0,0 +1,328 @@
|
||||
---
|
||||
name: create-device-skill
|
||||
description: Create a skill for any Uni-Lab device by extracting action schemas from the device registry. Use when the user wants to create a new device skill, add device API documentation, or set up action schemas for a device.
|
||||
---
|
||||
|
||||
# 创建设备 Skill 指南
|
||||
|
||||
本 meta-skill 教你如何为任意 Uni-Lab-OS 设备创建完整的 API 操作技能(参考 `unilab-device-api` 的成功案例)。
|
||||
|
||||
## 数据源
|
||||
|
||||
- **设备注册表**: `unilabos_data/req_device_registry_upload.json`
|
||||
- **结构**: `{ "resources": [{ "id": "<device_id>", "class": { "module": "<python_module:ClassName>", "action_value_mappings": { ... } } }] }`
|
||||
- **生成时机**: `unilab` 启动并完成注册表上传后自动生成
|
||||
- **module 字段**: 格式 `unilabos.devices.xxx.yyy:ClassName`,可转为源码路径 `unilabos/devices/xxx/yyy.py`,阅读源码可了解参数含义和设备行为
|
||||
|
||||
## 创建流程
|
||||
|
||||
### Step 0 — 收集必备信息(缺一不可,否则询问后终止)
|
||||
|
||||
开始前**必须**确认以下 4 项信息全部就绪。如果用户未提供任何一项,**立即询问并终止当前流程**,等用户补齐后再继续。
|
||||
|
||||
向用户提问:「请提供你的 unilab 启动参数,我需要以下信息:」
|
||||
|
||||
#### 必备项 ①:ak / sk(认证凭据)
|
||||
|
||||
来源:启动命令的 `--ak` `--sk` 参数,或 config.py 中的 `ak = "..."` `sk = "..."`。
|
||||
|
||||
获取后立即生成 AUTH token:
|
||||
|
||||
```bash
|
||||
python ./scripts/gen_auth.py <ak> <sk>
|
||||
# 或从 config.py 提取
|
||||
python ./scripts/gen_auth.py --config <config.py>
|
||||
```
|
||||
|
||||
认证算法:`base64(ak:sk)` → `Authorization: Lab <token>`
|
||||
|
||||
#### 必备项 ②:--addr(目标环境)
|
||||
|
||||
决定 API 请求发往哪个服务器。从启动命令的 `--addr` 参数获取:
|
||||
|
||||
| `--addr` 值 | BASE URL |
|
||||
|-------------|----------|
|
||||
| `test` | `https://uni-lab.test.bohrium.com` |
|
||||
| `uat` | `https://uni-lab.uat.bohrium.com` |
|
||||
| `local` | `http://127.0.0.1:48197` |
|
||||
| 不传(默认) | `https://uni-lab.bohrium.com` |
|
||||
| 其他自定义 URL | 直接使用该 URL |
|
||||
|
||||
#### 必备项 ③:req_device_registry_upload.json(设备注册表)
|
||||
|
||||
数据文件由 `unilab` 启动时自动生成,需要定位它:
|
||||
|
||||
**推断 working_dir**(即 `unilabos_data` 所在目录):
|
||||
|
||||
| 条件 | working_dir 取值 |
|
||||
|------|------------------|
|
||||
| 传了 `--working_dir` | `<working_dir>/unilabos_data/`(若子目录已存在则直接用) |
|
||||
| 仅传了 `--config` | `<config 文件所在目录>/unilabos_data/` |
|
||||
| 都没传 | `<当前工作目录>/unilabos_data/` |
|
||||
|
||||
**按优先级搜索文件**:
|
||||
|
||||
```
|
||||
<推断的 working_dir>/unilabos_data/req_device_registry_upload.json
|
||||
<推断的 working_dir>/req_device_registry_upload.json
|
||||
<workspace 根目录>/unilabos_data/req_device_registry_upload.json
|
||||
```
|
||||
|
||||
也可以直接 Glob 搜索:`**/req_device_registry_upload.json`
|
||||
|
||||
找到后**必须检查文件修改时间**并告知用户:「找到注册表文件 `<路径>`,生成于 `<时间>`。请确认这是最近一次启动生成的。」超过 1 天提醒用户是否需要重新启动 `unilab`。
|
||||
|
||||
**如果文件不存在** → 告知用户先运行 `unilab` 启动命令,等日志出现 `注册表响应数据已保存` 后再执行本流程。**终止。**
|
||||
|
||||
#### 必备项 ④:目标设备
|
||||
|
||||
用户需要明确要为哪个设备创建 skill。可以是设备名称(如「PRCXI 移液站」)或 device_id(如 `liquid_handler.prcxi`)。
|
||||
|
||||
如果用户不确定,运行提取脚本列出所有设备供选择:
|
||||
|
||||
```bash
|
||||
python ./scripts/extract_device_actions.py --registry <找到的文件路径>
|
||||
```
|
||||
|
||||
#### 完整示例
|
||||
|
||||
用户提供:
|
||||
|
||||
```
|
||||
--ak a1fd9d4e-xxxx-xxxx-xxxx-d9a69c09f0fd
|
||||
--sk 136ff5c6-xxxx-xxxx-xxxx-a03e301f827b
|
||||
--addr test
|
||||
--port 8003
|
||||
--disable_browser
|
||||
```
|
||||
|
||||
从中提取:
|
||||
- ✅ ak/sk → 运行 `gen_auth.py` 得到 `AUTH="Authorization: Lab YTFmZDlk..."`
|
||||
- ✅ addr=test → `BASE=https://uni-lab.test.bohrium.com`
|
||||
- ✅ 搜索 `unilabos_data/req_device_registry_upload.json` → 找到并确认时间
|
||||
- ✅ 用户指明目标设备 → 如 `liquid_handler.prcxi`
|
||||
|
||||
**四项全部就绪后才进入 Step 1。**
|
||||
|
||||
### Step 1 — 列出可用设备
|
||||
|
||||
运行提取脚本,列出所有设备及 action 数量和 Python 源码路径,让用户选择:
|
||||
|
||||
```bash
|
||||
# 自动搜索(默认在 unilabos_data/ 和当前目录查找)
|
||||
python ./scripts/extract_device_actions.py
|
||||
|
||||
# 指定注册表文件路径
|
||||
python ./scripts/extract_device_actions.py --registry <path/to/req_device_registry_upload.json>
|
||||
```
|
||||
|
||||
脚本输出包含每个设备的 **Python 源码路径**(从 `class.module` 转换),可用于后续阅读源码理解参数含义。
|
||||
|
||||
### Step 2 — 提取 Action Schema
|
||||
|
||||
用户选择设备后,运行提取脚本:
|
||||
|
||||
```bash
|
||||
python ./scripts/extract_device_actions.py [--registry <path>] <device_id> ./skills/<skill-name>/actions/
|
||||
```
|
||||
|
||||
脚本会显示设备的 Python 源码路径和类名,方便阅读源码了解参数含义。
|
||||
|
||||
每个 action 生成一个 JSON 文件,包含:
|
||||
- `type` — 作为 API 调用的 `action_type`
|
||||
- `schema` — 完整 JSON Schema(含 `properties.goal.properties` 参数定义)
|
||||
- `goal` — goal 字段映射(含占位符 `$placeholder`)
|
||||
- `goal_default` — 默认值
|
||||
|
||||
### Step 3 — 写 action-index.md
|
||||
|
||||
按模板为每个 action 写条目:
|
||||
|
||||
```markdown
|
||||
### `<action_name>`
|
||||
|
||||
<用途描述(一句话)>
|
||||
|
||||
- **Schema**: [`actions/<filename>.json`](actions/<filename>.json)
|
||||
- **核心参数**: `param1`, `param2`(从 schema.required 获取)
|
||||
- **可选参数**: `param3`, `param4`
|
||||
- **占位符字段**: `field`(需填入物料信息,值以 `$` 开头)
|
||||
```
|
||||
|
||||
描述规则:
|
||||
- 从 `schema.properties` 读参数列表(schema 已提升为 goal 内容)
|
||||
- 从 `schema.required` 区分核心/可选参数
|
||||
- 按功能分类(移液、枪头、外设等)
|
||||
- 标注 `placeholder_keys` 中的字段类型:
|
||||
- `unilabos_resources` → **ResourceSlot**,填入 `{id, name, uuid}`(id 是路径格式,从资源树取物料节点)
|
||||
- `unilabos_devices` → **DeviceSlot**,填入路径字符串如 `"/host_node"`(从资源树筛选 type=device)
|
||||
- `unilabos_nodes` → **NodeSlot**,填入路径字符串如 `"/PRCXI/PRCXI_Deck"`(资源树中任意节点)
|
||||
- `unilabos_class` → **ClassSlot**,填入类名字符串如 `"container"`(从注册表查找)
|
||||
- array 类型字段 → `[{id, name, uuid}, ...]`
|
||||
- 特殊:`create_resource` 的 `res_id`(ResourceSlot)可填不存在的路径
|
||||
|
||||
### Step 4 — 写 SKILL.md
|
||||
|
||||
直接复用 `unilab-device-api` 的 API 模板(10 个 endpoint),修改:
|
||||
- 设备名称
|
||||
- Action 数量
|
||||
- 目录列表
|
||||
- Session state 中的 `device_name`
|
||||
- **AUTH 头** — 使用 Step 0 中 `gen_auth.py` 生成的 `Authorization: Lab <token>`(不要硬编码 `Api` 类型的 key)
|
||||
- **Python 源码路径** — 在 SKILL.md 开头注明设备对应的源码文件,方便参考参数含义
|
||||
- **Slot 字段表** — 列出本设备哪些 action 的哪些字段需要填入 Slot(物料/设备/节点/类名)
|
||||
|
||||
API 模板结构:
|
||||
|
||||
```markdown
|
||||
## 设备信息
|
||||
- device_id, Python 源码路径, 设备类名
|
||||
|
||||
## 前置条件(缺一不可)
|
||||
- ak/sk → AUTH, --addr → BASE URL
|
||||
|
||||
## Session State
|
||||
- lab_uuid(通过 API #1 自动匹配,不要问用户), device_name
|
||||
|
||||
## API Endpoints (10 个)
|
||||
# 注意:
|
||||
# - #1 获取 lab 列表 + 自动匹配 lab_uuid(遍历 is_admin 的 lab,
|
||||
# 调用 /lab/info/{uuid} 比对 access_key == ak)
|
||||
# - #2 创建工作流用 POST /lab/workflow
|
||||
# - #10 获取资源树路径含 lab_uuid: /lab/material/download/{lab_uuid}
|
||||
|
||||
## Placeholder Slot 填写规则
|
||||
- unilabos_resources → ResourceSlot → {"id":"/path/name","name":"name","uuid":"xxx"}
|
||||
- unilabos_devices → DeviceSlot → "/parent/device" 路径字符串
|
||||
- unilabos_nodes → NodeSlot → "/parent/node" 路径字符串
|
||||
- unilabos_class → ClassSlot → "class_name" 字符串
|
||||
- 特例:create_resource 的 res_id 允许填不存在的路径
|
||||
- 列出本设备所有 Slot 字段、类型及含义
|
||||
|
||||
## 渐进加载策略
|
||||
## 完整工作流 Checklist
|
||||
```
|
||||
|
||||
### Step 5 — 验证
|
||||
|
||||
检查文件完整性:
|
||||
- [ ] `SKILL.md` 包含 10 个 API endpoint
|
||||
- [ ] `SKILL.md` 包含 Placeholder Slot 填写规则(ResourceSlot / DeviceSlot / NodeSlot / ClassSlot + create_resource 特例)和本设备的 Slot 字段表
|
||||
- [ ] `action-index.md` 列出所有 action 并有描述
|
||||
- [ ] `actions/` 目录中每个 action 有对应 JSON 文件
|
||||
- [ ] JSON 文件包含 `type`, `schema`(已提升为 goal 内容), `goal`, `goal_default`, `placeholder_keys` 字段
|
||||
- [ ] 描述能让 agent 判断该用哪个 action
|
||||
|
||||
## Action JSON 文件结构
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "LiquidHandlerTransfer", // → API 的 action_type
|
||||
"goal": { // goal 字段映射
|
||||
"sources": "sources",
|
||||
"targets": "targets",
|
||||
"tip_racks": "tip_racks",
|
||||
"asp_vols": "asp_vols"
|
||||
},
|
||||
"schema": { // ← 直接是 goal 的 schema(已提升)
|
||||
"type": "object",
|
||||
"properties": { // 参数定义(即请求中 goal 的字段)
|
||||
"sources": { "type": "array", "items": { "type": "object" } },
|
||||
"targets": { "type": "array", "items": { "type": "object" } },
|
||||
"asp_vols": { "type": "array", "items": { "type": "number" } }
|
||||
},
|
||||
"required": [...],
|
||||
"_unilabos_placeholder_info": { // ← Slot 类型标记
|
||||
"sources": "unilabos_resources",
|
||||
"targets": "unilabos_resources",
|
||||
"tip_racks": "unilabos_resources"
|
||||
}
|
||||
},
|
||||
"goal_default": { ... }, // 默认值
|
||||
"placeholder_keys": { // ← 汇总所有 Slot 字段
|
||||
"sources": "unilabos_resources", // ResourceSlot
|
||||
"targets": "unilabos_resources",
|
||||
"tip_racks": "unilabos_resources",
|
||||
"target_device_id": "unilabos_devices" // DeviceSlot
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
> **注意**:`schema` 已由脚本从原始 `schema.properties.goal` 提升为顶层,直接包含参数定义。
|
||||
> `schema.properties` 中的字段即为 API 请求 `param.goal` 中的字段。
|
||||
|
||||
## Placeholder Slot 类型体系
|
||||
|
||||
`placeholder_keys` / `_unilabos_placeholder_info` 中有 4 种值,对应不同的填写方式:
|
||||
|
||||
| placeholder 值 | Slot 类型 | 填写格式 | 选取范围 |
|
||||
|---------------|-----------|---------|---------|
|
||||
| `unilabos_resources` | ResourceSlot | `{"id": "/path/name", "name": "name", "uuid": "xxx"}` | 仅**物料**节点(不含设备) |
|
||||
| `unilabos_devices` | DeviceSlot | `"/parent/device_name"` | 仅**设备**节点(type=device),路径字符串 |
|
||||
| `unilabos_nodes` | NodeSlot | `"/parent/node_name"` | **设备 + 物料**,即所有节点,路径字符串 |
|
||||
| `unilabos_class` | ClassSlot | `"class_name"` | 注册表中已上报的资源类 name |
|
||||
|
||||
### ResourceSlot(`unilabos_resources`)
|
||||
|
||||
最常见的类型。从资源树中选取**物料**节点(孔板、枪头盒、试剂槽等):
|
||||
|
||||
```json
|
||||
{"id": "/workstation/container1", "name": "container1", "uuid": "ff149a9a-2cb8-419d-8db5-d3ba056fb3c2"}
|
||||
```
|
||||
|
||||
- 单个(schema type=object):`{"id": "/path/name", "name": "name", "uuid": "xxx"}`
|
||||
- 数组(schema type=array):`[{"id": "/path/a", "name": "a", "uuid": "xxx"}, ...]`
|
||||
- `id` 本身是从 parent 计算的路径格式
|
||||
- 根据 action 语义选择正确的物料(如 `sources` = 液体来源,`targets` = 目标位置)
|
||||
|
||||
> **特例**:`create_resource` 的 `res_id` 字段,目标物料可能**尚不存在**,此时直接填写期望的路径(如 `"/workstation/container1"`),不需要 uuid。
|
||||
|
||||
### DeviceSlot(`unilabos_devices`)
|
||||
|
||||
填写**设备路径字符串**。从资源树中筛选 type=device 的节点,从 parent 计算路径:
|
||||
|
||||
```
|
||||
"/host_node"
|
||||
"/bioyond_cell/reaction_station"
|
||||
```
|
||||
|
||||
- 只填路径字符串,不需要 `{id, uuid}` 对象
|
||||
- 根据 action 语义选择正确的设备(如 `target_device_id` = 目标设备)
|
||||
|
||||
### NodeSlot(`unilabos_nodes`)
|
||||
|
||||
范围 = 设备 + 物料。即资源树中**所有节点**都可以选,填写**路径字符串**:
|
||||
|
||||
```
|
||||
"/PRCXI/PRCXI_Deck"
|
||||
```
|
||||
|
||||
- 使用场景:当参数既可能指向物料也可能指向设备时(如 `PumpTransferProtocol` 的 `from_vessel`/`to_vessel`,`create_resource` 的 `parent`)
|
||||
|
||||
### ClassSlot(`unilabos_class`)
|
||||
|
||||
填写注册表中已上报的**资源类 name**。从本地 `req_resource_registry_upload.json` 中查找:
|
||||
|
||||
```
|
||||
"container"
|
||||
```
|
||||
|
||||
### 通过 API #10 获取资源树
|
||||
|
||||
```bash
|
||||
curl -s -X GET "$BASE/api/v1/lab/material/download/$lab_uuid" -H "$AUTH"
|
||||
```
|
||||
|
||||
注意 `lab_uuid` 在路径中(不是查询参数)。资源树返回所有节点,每个节点包含 `id`(路径格式)、`name`、`uuid`、`type`、`parent` 等字段。填写 Slot 时需根据 placeholder 类型筛选正确的节点。
|
||||
|
||||
## 最终目录结构
|
||||
|
||||
```
|
||||
./<skill-name>/
|
||||
├── SKILL.md # API 端点 + 渐进加载指引
|
||||
├── action-index.md # 动作索引:描述/用途/核心参数
|
||||
└── actions/ # 每个 action 的完整 JSON Schema
|
||||
├── action1.json
|
||||
├── action2.json
|
||||
└── ...
|
||||
```
|
||||
@@ -0,0 +1,200 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
从 req_device_registry_upload.json 中提取指定设备的 action schema。
|
||||
|
||||
用法:
|
||||
# 列出所有设备及 action 数量(自动搜索注册表文件)
|
||||
python extract_device_actions.py
|
||||
|
||||
# 指定注册表文件路径
|
||||
python extract_device_actions.py --registry <path/to/req_device_registry_upload.json>
|
||||
|
||||
# 提取指定设备的 action 到目录
|
||||
python extract_device_actions.py <device_id> <output_dir>
|
||||
python extract_device_actions.py --registry <path> <device_id> <output_dir>
|
||||
|
||||
示例:
|
||||
python extract_device_actions.py --registry unilabos_data/req_device_registry_upload.json
|
||||
python extract_device_actions.py liquid_handler.prcxi .cursor/skills/unilab-device-api/actions/
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
REGISTRY_FILENAME = "req_device_registry_upload.json"
|
||||
|
||||
def find_registry(explicit_path=None):
|
||||
"""
|
||||
查找 req_device_registry_upload.json 文件。
|
||||
|
||||
搜索优先级:
|
||||
1. 用户通过 --registry 显式指定的路径
|
||||
2. <cwd>/unilabos_data/req_device_registry_upload.json
|
||||
3. <cwd>/req_device_registry_upload.json
|
||||
4. <script所在目录>/../../.. (workspace根) 下的 unilabos_data/
|
||||
5. 向上逐级搜索父目录(最多 5 层)
|
||||
"""
|
||||
if explicit_path:
|
||||
if os.path.isfile(explicit_path):
|
||||
return explicit_path
|
||||
if os.path.isdir(explicit_path):
|
||||
fp = os.path.join(explicit_path, REGISTRY_FILENAME)
|
||||
if os.path.isfile(fp):
|
||||
return fp
|
||||
print(f"警告: 指定的路径不存在: {explicit_path}")
|
||||
return None
|
||||
|
||||
candidates = [
|
||||
os.path.join("unilabos_data", REGISTRY_FILENAME),
|
||||
REGISTRY_FILENAME,
|
||||
]
|
||||
|
||||
for c in candidates:
|
||||
if os.path.isfile(c):
|
||||
return c
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
workspace_root = os.path.normpath(os.path.join(script_dir, "..", "..", ".."))
|
||||
for c in candidates:
|
||||
path = os.path.join(workspace_root, c)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
|
||||
cwd = os.getcwd()
|
||||
for _ in range(5):
|
||||
parent = os.path.dirname(cwd)
|
||||
if parent == cwd:
|
||||
break
|
||||
cwd = parent
|
||||
for c in candidates:
|
||||
path = os.path.join(cwd, c)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
|
||||
return None
|
||||
|
||||
def load_registry(path):
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def list_devices(data):
|
||||
"""列出所有包含 action_value_mappings 的设备,同时返回 module 路径"""
|
||||
resources = data.get('resources', [])
|
||||
devices = []
|
||||
for res in resources:
|
||||
rid = res.get('id', '')
|
||||
cls = res.get('class', {})
|
||||
avm = cls.get('action_value_mappings', {})
|
||||
module = cls.get('module', '')
|
||||
if avm:
|
||||
devices.append((rid, len(avm), module))
|
||||
return devices
|
||||
|
||||
def flatten_schema_to_goal(action_data):
|
||||
"""将 schema 中嵌套的 goal 内容提升为顶层 schema,去掉 feedback/result 包装"""
|
||||
schema = action_data.get('schema', {})
|
||||
goal_schema = schema.get('properties', {}).get('goal', {})
|
||||
if goal_schema:
|
||||
action_data = dict(action_data)
|
||||
action_data['schema'] = goal_schema
|
||||
return action_data
|
||||
|
||||
|
||||
def extract_actions(data, device_id, output_dir):
|
||||
"""提取指定设备的 action schema 到独立 JSON 文件"""
|
||||
resources = data.get('resources', [])
|
||||
for res in resources:
|
||||
if res.get('id') == device_id:
|
||||
cls = res.get('class', {})
|
||||
module = cls.get('module', '')
|
||||
avm = cls.get('action_value_mappings', {})
|
||||
if not avm:
|
||||
print(f"设备 {device_id} 没有 action_value_mappings")
|
||||
return []
|
||||
|
||||
if module:
|
||||
py_path = module.split(":")[0].replace(".", "/") + ".py"
|
||||
class_name = module.split(":")[-1] if ":" in module else ""
|
||||
print(f"Python 源码: {py_path}")
|
||||
if class_name:
|
||||
print(f"设备类: {class_name}")
|
||||
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
written = []
|
||||
for action_name in sorted(avm.keys()):
|
||||
action_data = flatten_schema_to_goal(avm[action_name])
|
||||
filename = action_name.replace('-', '_') + '.json'
|
||||
filepath = os.path.join(output_dir, filename)
|
||||
with open(filepath, 'w', encoding='utf-8') as f:
|
||||
json.dump(action_data, f, indent=2, ensure_ascii=False)
|
||||
written.append(filename)
|
||||
print(f" {filepath}")
|
||||
return written
|
||||
|
||||
print(f"设备 {device_id} 未找到")
|
||||
return []
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
explicit_registry = None
|
||||
|
||||
if "--registry" in args:
|
||||
idx = args.index("--registry")
|
||||
if idx + 1 < len(args):
|
||||
explicit_registry = args[idx + 1]
|
||||
args = args[:idx] + args[idx + 2:]
|
||||
else:
|
||||
print("错误: --registry 需要指定路径")
|
||||
sys.exit(1)
|
||||
|
||||
registry_path = find_registry(explicit_registry)
|
||||
if not registry_path:
|
||||
print(f"错误: 找不到 {REGISTRY_FILENAME}")
|
||||
print()
|
||||
print("解决方法:")
|
||||
print(" 1. 先运行 unilab 启动命令,等待注册表生成")
|
||||
print(" 2. 用 --registry 指定文件路径:")
|
||||
print(f" python {sys.argv[0]} --registry <path/to/{REGISTRY_FILENAME}>")
|
||||
print()
|
||||
print("搜索过的路径:")
|
||||
for p in [
|
||||
os.path.join("unilabos_data", REGISTRY_FILENAME),
|
||||
REGISTRY_FILENAME,
|
||||
os.path.join("<workspace_root>", "unilabos_data", REGISTRY_FILENAME),
|
||||
]:
|
||||
print(f" - {p}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"注册表: {registry_path}")
|
||||
mtime = os.path.getmtime(registry_path)
|
||||
gen_time = datetime.fromtimestamp(mtime).strftime("%Y-%m-%d %H:%M:%S")
|
||||
size_mb = os.path.getsize(registry_path) / (1024 * 1024)
|
||||
print(f"生成时间: {gen_time} (文件大小: {size_mb:.1f} MB)")
|
||||
data = load_registry(registry_path)
|
||||
|
||||
if len(args) == 0:
|
||||
devices = list_devices(data)
|
||||
print(f"\n找到 {len(devices)} 个设备:")
|
||||
print(f"{'设备 ID':<50} {'Actions':>7} {'Python 模块'}")
|
||||
print("-" * 120)
|
||||
for did, count, module in sorted(devices, key=lambda x: x[0]):
|
||||
py_path = module.split(":")[0].replace(".", "/") + ".py" if module else ""
|
||||
print(f"{did:<50} {count:>7} {py_path}")
|
||||
|
||||
elif len(args) == 2:
|
||||
device_id = args[0]
|
||||
output_dir = args[1]
|
||||
print(f"\n提取 {device_id} 的 actions 到 {output_dir}/")
|
||||
written = extract_actions(data, device_id, output_dir)
|
||||
if written:
|
||||
print(f"\n共写入 {len(written)} 个 action 文件")
|
||||
|
||||
else:
|
||||
print("用法:")
|
||||
print(" python extract_device_actions.py [--registry <path>] # 列出设备")
|
||||
print(" python extract_device_actions.py [--registry <path>] <device_id> <dir> # 提取 actions")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
69
.cursor/skills/create-device-skill/scripts/gen_auth.py
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
从 ak/sk 生成 UniLab API Authorization header。
|
||||
|
||||
算法: base64(ak:sk) → "Authorization: Lab <token>"
|
||||
|
||||
用法:
|
||||
python gen_auth.py <ak> <sk>
|
||||
python gen_auth.py --config <config.py>
|
||||
|
||||
示例:
|
||||
python gen_auth.py myak mysk
|
||||
python gen_auth.py --config experiments/config.py
|
||||
"""
|
||||
import base64
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
def gen_auth(ak: str, sk: str) -> str:
|
||||
token = base64.b64encode(f"{ak}:{sk}".encode("utf-8")).decode("utf-8")
|
||||
return token
|
||||
|
||||
|
||||
def extract_from_config(config_path: str) -> tuple:
|
||||
"""从 config.py 中提取 ak 和 sk"""
|
||||
with open(config_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
ak_match = re.search(r'''ak\s*=\s*["']([^"']+)["']''', content)
|
||||
sk_match = re.search(r'''sk\s*=\s*["']([^"']+)["']''', content)
|
||||
if not ak_match or not sk_match:
|
||||
return None, None
|
||||
return ak_match.group(1), sk_match.group(1)
|
||||
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
|
||||
if len(args) == 2 and args[0] == "--config":
|
||||
ak, sk = extract_from_config(args[1])
|
||||
if not ak or not sk:
|
||||
print(f"错误: 在 {args[1]} 中未找到 ak/sk 配置")
|
||||
print("期望格式: ak = \"xxx\" sk = \"xxx\"")
|
||||
sys.exit(1)
|
||||
print(f"配置文件: {args[1]}")
|
||||
elif len(args) == 2:
|
||||
ak, sk = args
|
||||
else:
|
||||
print("用法:")
|
||||
print(" python gen_auth.py <ak> <sk>")
|
||||
print(" python gen_auth.py --config <config.py>")
|
||||
sys.exit(1)
|
||||
|
||||
token = gen_auth(ak, sk)
|
||||
print(f"ak: {ak}")
|
||||
print(f"sk: {sk}")
|
||||
print()
|
||||
print(f"Authorization header:")
|
||||
print(f" Authorization: Lab {token}")
|
||||
print()
|
||||
print(f"curl 用法:")
|
||||
print(f' curl -H "Authorization: Lab {token}" ...')
|
||||
print()
|
||||
print(f"Shell 变量:")
|
||||
print(f' AUTH="Authorization: Lab {token}"')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
19
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
version: 2
|
||||
updates:
|
||||
# GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
target-branch: "dev"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 5
|
||||
reviewers:
|
||||
- "msgcenterpy-team"
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github-actions"
|
||||
commit-message:
|
||||
prefix: "ci"
|
||||
include: "scope"
|
||||
67
.github/workflows/ci-check.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: CI Check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, dev]
|
||||
pull_request:
|
||||
branches: [main, dev]
|
||||
|
||||
jobs:
|
||||
registry-check:
|
||||
runs-on: windows-latest
|
||||
|
||||
env:
|
||||
# Fix Unicode encoding issue on Windows runner (cp1252 -> utf-8)
|
||||
PYTHONIOENCODING: utf-8
|
||||
PYTHONUTF8: 1
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: cmd
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Miniforge
|
||||
uses: conda-incubator/setup-miniconda@v3
|
||||
with:
|
||||
miniforge-version: latest
|
||||
use-mamba: true
|
||||
channels: robostack-staging,conda-forge,uni-lab
|
||||
channel-priority: flexible
|
||||
activate-environment: check-env
|
||||
auto-update-conda: false
|
||||
show-channel-urls: true
|
||||
|
||||
- name: Install ROS dependencies, uv and unilabos-msgs
|
||||
run: |
|
||||
echo Installing ROS dependencies...
|
||||
mamba install -n check-env conda-forge::uv conda-forge::opencv robostack-staging::ros-humble-ros-core robostack-staging::ros-humble-action-msgs robostack-staging::ros-humble-std-msgs robostack-staging::ros-humble-geometry-msgs robostack-staging::ros-humble-control-msgs robostack-staging::ros-humble-nav2-msgs uni-lab::ros-humble-unilabos-msgs robostack-staging::ros-humble-cv-bridge robostack-staging::ros-humble-vision-opencv robostack-staging::ros-humble-tf-transformations robostack-staging::ros-humble-moveit-msgs robostack-staging::ros-humble-tf2-ros robostack-staging::ros-humble-tf2-ros-py conda-forge::transforms3d -c robostack-staging -c conda-forge -c uni-lab -y
|
||||
|
||||
- name: Install pip dependencies and unilabos
|
||||
run: |
|
||||
call conda activate check-env
|
||||
echo Installing pip dependencies...
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
uv pip install pywinauto git+https://github.com/Xuwznln/pylabrobot.git
|
||||
uv pip uninstall enum34 || echo enum34 not installed, skipping
|
||||
uv pip install .
|
||||
|
||||
- name: Run check mode (AST registry validation)
|
||||
run: |
|
||||
call conda activate check-env
|
||||
echo Running check mode...
|
||||
python -m unilabos --check_mode --skip_env_check
|
||||
|
||||
- name: Check for uncommitted changes
|
||||
shell: bash
|
||||
run: |
|
||||
if ! git diff --exit-code; then
|
||||
echo "::error::检测到文件变化!请先在本地运行 'python -m unilabos --complete_registry' 并提交变更"
|
||||
echo "变化的文件:"
|
||||
git diff --name-only
|
||||
exit 1
|
||||
fi
|
||||
echo "检查通过:无文件变化"
|
||||
45
.github/workflows/conda-pack-build.yml
vendored
@@ -13,6 +13,11 @@ on:
|
||||
required: false
|
||||
default: 'win-64'
|
||||
type: string
|
||||
build_full:
|
||||
description: '是否构建完整版 unilabos-full (默认构建轻量版 unilabos)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
build-conda-pack:
|
||||
@@ -24,7 +29,7 @@ jobs:
|
||||
platform: linux-64
|
||||
env_file: unilabos-linux-64.yaml
|
||||
script_ext: sh
|
||||
- os: macos-13 # Intel
|
||||
- os: macos-15 # Intel (via Rosetta)
|
||||
platform: osx-64
|
||||
env_file: unilabos-osx-64.yaml
|
||||
script_ext: sh
|
||||
@@ -57,7 +62,7 @@ jobs:
|
||||
echo "should_build=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
@@ -69,7 +74,7 @@ jobs:
|
||||
with:
|
||||
miniforge-version: latest
|
||||
use-mamba: true
|
||||
python-version: '3.11.11'
|
||||
python-version: '3.11.14'
|
||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||
channel-priority: flexible
|
||||
activate-environment: unilab
|
||||
@@ -81,7 +86,14 @@ jobs:
|
||||
run: |
|
||||
echo Installing unilabos and dependencies to unilab environment...
|
||||
echo Using mamba for faster and more reliable dependency resolution...
|
||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
echo Build full: ${{ github.event.inputs.build_full }}
|
||||
if "${{ github.event.inputs.build_full }}"=="true" (
|
||||
echo Installing unilabos-full ^(complete package^)...
|
||||
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
) else (
|
||||
echo Installing unilabos ^(minimal package^)...
|
||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
)
|
||||
|
||||
- name: Install conda-pack, unilabos and dependencies (Unix)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
|
||||
@@ -89,7 +101,14 @@ jobs:
|
||||
run: |
|
||||
echo "Installing unilabos and dependencies to unilab environment..."
|
||||
echo "Using mamba for faster and more reliable dependency resolution..."
|
||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
echo "Build full: ${{ github.event.inputs.build_full }}"
|
||||
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
||||
echo "Installing unilabos-full (complete package)..."
|
||||
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
else
|
||||
echo "Installing unilabos (minimal package)..."
|
||||
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
|
||||
fi
|
||||
|
||||
- name: Get latest ros-humble-unilabos-msgs version (Windows)
|
||||
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
|
||||
@@ -293,7 +312,7 @@ jobs:
|
||||
|
||||
- name: Upload distribution package
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
|
||||
path: dist-package/
|
||||
@@ -308,7 +327,12 @@ jobs:
|
||||
echo ==========================================
|
||||
echo Platform: ${{ matrix.platform }}
|
||||
echo Branch: ${{ github.event.inputs.branch }}
|
||||
echo Python version: 3.11.11
|
||||
echo Python version: 3.11.14
|
||||
if "${{ github.event.inputs.build_full }}"=="true" (
|
||||
echo Package: unilabos-full ^(complete^)
|
||||
) else (
|
||||
echo Package: unilabos ^(minimal^)
|
||||
)
|
||||
echo.
|
||||
echo Distribution package contents:
|
||||
dir dist-package
|
||||
@@ -328,7 +352,12 @@ jobs:
|
||||
echo "=========================================="
|
||||
echo "Platform: ${{ matrix.platform }}"
|
||||
echo "Branch: ${{ github.event.inputs.branch }}"
|
||||
echo "Python version: 3.11.11"
|
||||
echo "Python version: 3.11.14"
|
||||
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
||||
echo "Package: unilabos-full (complete)"
|
||||
else
|
||||
echo "Package: unilabos (minimal)"
|
||||
fi
|
||||
echo ""
|
||||
echo "Distribution package contents:"
|
||||
ls -lh dist-package/
|
||||
|
||||
37
.github/workflows/deploy-docs.yml
vendored
@@ -1,10 +1,12 @@
|
||||
name: Deploy Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
# 在 CI Check 成功后自动触发(仅 main 分支)
|
||||
workflow_run:
|
||||
workflows: ["CI Check"]
|
||||
types: [completed]
|
||||
branches: [main]
|
||||
# 手动触发
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
@@ -33,12 +35,19 @@ concurrency:
|
||||
jobs:
|
||||
# Build documentation
|
||||
build:
|
||||
# 只在以下情况运行:
|
||||
# 1. workflow_run 触发且 CI Check 成功
|
||||
# 2. 手动触发
|
||||
if: |
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch || github.ref }}
|
||||
# workflow_run 时使用触发工作流的分支,手动触发时使用输入的分支
|
||||
ref: ${{ github.event.workflow_run.head_branch || github.event.inputs.branch || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Miniforge (with mamba)
|
||||
@@ -46,7 +55,7 @@ jobs:
|
||||
with:
|
||||
miniforge-version: latest
|
||||
use-mamba: true
|
||||
python-version: '3.11.11'
|
||||
python-version: '3.11.14'
|
||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||
channel-priority: flexible
|
||||
activate-environment: unilab
|
||||
@@ -75,8 +84,10 @@ jobs:
|
||||
|
||||
- name: Setup Pages
|
||||
id: pages
|
||||
uses: actions/configure-pages@v4
|
||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
uses: actions/configure-pages@v5
|
||||
if: |
|
||||
github.event.workflow_run.head_branch == 'main' ||
|
||||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
|
||||
- name: Build Sphinx documentation
|
||||
run: |
|
||||
@@ -94,14 +105,18 @@ jobs:
|
||||
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
uses: actions/upload-pages-artifact@v4
|
||||
if: |
|
||||
github.event.workflow_run.head_branch == 'main' ||
|
||||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
with:
|
||||
path: docs/_build/html
|
||||
|
||||
# Deploy to GitHub Pages
|
||||
deploy:
|
||||
if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
if: |
|
||||
github.event.workflow_run.head_branch == 'main' ||
|
||||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
|
||||
48
.github/workflows/multi-platform-build.yml
vendored
@@ -1,11 +1,16 @@
|
||||
name: Multi-Platform Conda Build
|
||||
|
||||
on:
|
||||
# 在 CI Check 工作流完成后触发(仅限 main/dev 分支)
|
||||
workflow_run:
|
||||
workflows: ["CI Check"]
|
||||
types:
|
||||
- completed
|
||||
branches: [main, dev]
|
||||
# 支持 tag 推送(不依赖 CI Check)
|
||||
push:
|
||||
branches: [main, dev]
|
||||
tags: ['v*']
|
||||
pull_request:
|
||||
branches: [main, dev]
|
||||
# 手动触发
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
platforms:
|
||||
@@ -17,9 +22,37 @@ on:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
skip_ci_check:
|
||||
description: '跳过等待 CI Check (手动触发时可选)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
|
||||
wait-for-ci:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_run'
|
||||
outputs:
|
||||
should_continue: ${{ steps.check.outputs.should_continue }}
|
||||
steps:
|
||||
- name: Check CI status
|
||||
id: check
|
||||
run: |
|
||||
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
||||
echo "should_continue=true" >> $GITHUB_OUTPUT
|
||||
echo "CI Check passed, proceeding with build"
|
||||
else
|
||||
echo "should_continue=false" >> $GITHUB_OUTPUT
|
||||
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
|
||||
fi
|
||||
|
||||
build:
|
||||
needs: [wait-for-ci]
|
||||
# 运行条件:workflow_run 触发且 CI 成功,或者其他触发方式
|
||||
if: |
|
||||
always() &&
|
||||
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -27,7 +60,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
platform: linux-64
|
||||
env_file: unilabos-linux-64.yaml
|
||||
- os: macos-13 # Intel
|
||||
- os: macos-15 # Intel (via Rosetta)
|
||||
platform: osx-64
|
||||
env_file: unilabos-osx-64.yaml
|
||||
- os: macos-latest # ARM64
|
||||
@@ -44,8 +77,10 @@ jobs:
|
||||
shell: bash -l {0}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if platform should be built
|
||||
@@ -69,7 +104,6 @@ jobs:
|
||||
channels: conda-forge,robostack-staging,defaults
|
||||
channel-priority: strict
|
||||
activate-environment: build-env
|
||||
auto-activate-base: false
|
||||
auto-update-conda: false
|
||||
show-channel-urls: true
|
||||
|
||||
@@ -115,7 +149,7 @@ jobs:
|
||||
|
||||
- name: Upload conda package artifacts
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: conda-package-${{ matrix.platform }}
|
||||
path: conda-packages-temp
|
||||
|
||||
115
.github/workflows/unilabos-conda-build.yml
vendored
@@ -1,32 +1,69 @@
|
||||
name: UniLabOS Conda Build
|
||||
|
||||
on:
|
||||
# 在 CI Check 成功后自动触发
|
||||
workflow_run:
|
||||
workflows: ["CI Check"]
|
||||
types: [completed]
|
||||
branches: [main, dev]
|
||||
# 标签推送时直接触发(发布版本)
|
||||
push:
|
||||
branches: [main, dev]
|
||||
tags: ['v*']
|
||||
pull_request:
|
||||
branches: [main, dev]
|
||||
# 手动触发
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
platforms:
|
||||
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
|
||||
required: false
|
||||
default: 'linux-64'
|
||||
build_full:
|
||||
description: '是否构建 unilabos-full 完整包 (默认只构建 unilabos 基础包)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
upload_to_anaconda:
|
||||
description: '是否上传到Anaconda.org'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
skip_ci_check:
|
||||
description: '跳过等待 CI Check (手动触发时可选)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
|
||||
wait-for-ci:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_run'
|
||||
outputs:
|
||||
should_continue: ${{ steps.check.outputs.should_continue }}
|
||||
steps:
|
||||
- name: Check CI status
|
||||
id: check
|
||||
run: |
|
||||
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
||||
echo "should_continue=true" >> $GITHUB_OUTPUT
|
||||
echo "CI Check passed, proceeding with build"
|
||||
else
|
||||
echo "should_continue=false" >> $GITHUB_OUTPUT
|
||||
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
|
||||
fi
|
||||
|
||||
build:
|
||||
needs: [wait-for-ci]
|
||||
# 运行条件:workflow_run 触发且 CI 成功,或者其他触发方式
|
||||
if: |
|
||||
always() &&
|
||||
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
platform: linux-64
|
||||
- os: macos-13 # Intel
|
||||
- os: macos-15 # Intel (via Rosetta)
|
||||
platform: osx-64
|
||||
- os: macos-latest # ARM64
|
||||
platform: osx-arm64
|
||||
@@ -40,8 +77,10 @@ jobs:
|
||||
shell: bash -l {0}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if platform should be built
|
||||
@@ -65,7 +104,6 @@ jobs:
|
||||
channels: conda-forge,robostack-staging,uni-lab,defaults
|
||||
channel-priority: strict
|
||||
activate-environment: build-env
|
||||
auto-activate-base: false
|
||||
auto-update-conda: false
|
||||
show-channel-urls: true
|
||||
|
||||
@@ -81,12 +119,61 @@ jobs:
|
||||
conda list | grep -E "(rattler-build|anaconda-client)"
|
||||
echo "Platform: ${{ matrix.platform }}"
|
||||
echo "OS: ${{ matrix.os }}"
|
||||
echo "Building UniLabOS package"
|
||||
echo "Build full package: ${{ github.event.inputs.build_full || 'false' }}"
|
||||
echo "Building packages:"
|
||||
echo " - unilabos-env (environment dependencies)"
|
||||
echo " - unilabos (with pip package)"
|
||||
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
|
||||
echo " - unilabos-full (complete package)"
|
||||
fi
|
||||
|
||||
- name: Build conda package
|
||||
- name: Build unilabos-env (conda environment only, noarch)
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
run: |
|
||||
rattler-build build -r .conda/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
|
||||
echo "Building unilabos-env (conda environment dependencies)..."
|
||||
rattler-build build -r .conda/environment/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
|
||||
|
||||
- name: Upload unilabos-env to Anaconda.org (if enabled)
|
||||
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
|
||||
run: |
|
||||
echo "Uploading unilabos-env to uni-lab organization..."
|
||||
for package in $(find ./output -name "unilabos-env*.conda"); do
|
||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||
done
|
||||
|
||||
- name: Build unilabos (with pip package)
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
run: |
|
||||
echo "Building unilabos package..."
|
||||
# 如果已上传到 Anaconda,从 uni-lab channel 获取 unilabos-env;否则从本地 output 获取
|
||||
rattler-build build -r .conda/base/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
|
||||
|
||||
- name: Upload unilabos to Anaconda.org (if enabled)
|
||||
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
|
||||
run: |
|
||||
echo "Uploading unilabos to uni-lab organization..."
|
||||
for package in $(find ./output -name "unilabos-0*.conda" -o -name "unilabos-[0-9]*.conda"); do
|
||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||
done
|
||||
|
||||
- name: Build unilabos-full - Only when explicitly requested
|
||||
if: |
|
||||
steps.should_build.outputs.should_build == 'true' &&
|
||||
github.event.inputs.build_full == 'true'
|
||||
run: |
|
||||
echo "Building unilabos-full package on ${{ matrix.platform }}..."
|
||||
rattler-build build -r .conda/full/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
|
||||
|
||||
- name: Upload unilabos-full to Anaconda.org (if enabled)
|
||||
if: |
|
||||
steps.should_build.outputs.should_build == 'true' &&
|
||||
github.event.inputs.build_full == 'true' &&
|
||||
github.event.inputs.upload_to_anaconda == 'true'
|
||||
run: |
|
||||
echo "Uploading unilabos-full to uni-lab organization..."
|
||||
for package in $(find ./output -name "unilabos-full*.conda"); do
|
||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||
done
|
||||
|
||||
- name: List built packages
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
@@ -108,17 +195,9 @@ jobs:
|
||||
|
||||
- name: Upload conda package artifacts
|
||||
if: steps.should_build.outputs.should_build == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: conda-package-unilabos-${{ matrix.platform }}
|
||||
path: conda-packages-temp
|
||||
if-no-files-found: warn
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload to Anaconda.org (uni-lab organization)
|
||||
if: github.event.inputs.upload_to_anaconda == 'true'
|
||||
run: |
|
||||
for package in $(find ./output -name "*.conda"); do
|
||||
echo "Uploading $package to uni-lab organization..."
|
||||
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
|
||||
done
|
||||
|
||||
3
.gitignore
vendored
@@ -1,8 +1,11 @@
|
||||
cursor_docs/
|
||||
configs/
|
||||
temp/
|
||||
output/
|
||||
unilabos_data/
|
||||
pyrightconfig.json
|
||||
.cursorignore
|
||||
device_package*/
|
||||
## Python
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
||||
87
AGENTS.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# AGENTS.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
Also follow the monorepo-level rules in `../AGENTS.md`.
|
||||
|
||||
## Build & Development
|
||||
|
||||
```bash
|
||||
# Install in editable mode (requires mamba env with python 3.11)
|
||||
pip install -e .
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
|
||||
# Run with a device graph
|
||||
unilab --graph <graph.json> --config <config.py> --backend ros
|
||||
unilab --graph <graph.json> --config <config.py> --backend simple # no ROS2 needed
|
||||
|
||||
# Common CLI flags
|
||||
unilab --app_bridges websocket fastapi # communication bridges
|
||||
unilab --test_mode # simulate hardware, no real execution
|
||||
unilab --check_mode # CI validation of registry imports
|
||||
unilab --skip_env_check # skip auto-install of dependencies
|
||||
unilab --visual rviz|web|disable # visualization mode
|
||||
unilab --is_slave # run as slave node
|
||||
|
||||
# Workflow upload subcommand
|
||||
unilab workflow_upload -f <workflow.json> -n <name> --tags tag1 tag2
|
||||
|
||||
# Tests
|
||||
pytest tests/ # all tests
|
||||
pytest tests/resources/test_resourcetreeset.py # single test file
|
||||
pytest tests/resources/test_resourcetreeset.py::TestClassName::test_method # single test
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Startup Flow
|
||||
|
||||
`unilab` CLI → `unilabos/app/main.py:main()` → loads config → builds registry → reads device graph (JSON/GraphML) → starts backend thread (ROS2/simple) → starts FastAPI web server + WebSocket client.
|
||||
|
||||
### Core Layers
|
||||
|
||||
**Registry** (`unilabos/registry/`): Singleton `Registry` class discovers and catalogs all device types, resource types, and communication devices from YAML definitions. Device types live in `registry/devices/*.yaml`, resources in `registry/resources/`, comms in `registry/device_comms/`. The registry resolves class paths to actual Python classes via `utils/import_manager.py`.
|
||||
|
||||
**Resource Tracking** (`unilabos/resources/resource_tracker.py`): Pydantic-based `ResourceDict` → `ResourceDictInstance` → `ResourceTreeSet` hierarchy. `ResourceTreeSet` is the canonical in-memory representation of all devices and resources, used throughout the system. Graph I/O is in `resources/graphio.py` (reads JSON/GraphML device topology files into `nx.Graph` + `ResourceTreeSet`).
|
||||
|
||||
**Device Drivers** (`unilabos/devices/`): 30+ hardware drivers organized by device type (liquid_handling, hplc, balance, arm, etc.). Each driver is a Python class that gets wrapped by `ros/device_node_wrapper.py:ros2_device_node()` to become a ROS2 node with publishers, subscribers, and action servers.
|
||||
|
||||
**ROS2 Layer** (`unilabos/ros/`): `device_node_wrapper.py` dynamically wraps any device class into `ROS2DeviceNode` (defined in `ros/nodes/base_device_node.py`). Preset node types in `ros/nodes/presets/` include `host_node`, `controller_node`, `workstation`, `serial_node`, `camera`. Messages use custom `unilabos_msgs` (pre-built, distributed via releases).
|
||||
|
||||
**Protocol Compilation** (`unilabos/compile/`): 20+ protocol compilers (add, centrifuge, dissolve, filter, heatchill, stir, pump, etc.) that transform YAML protocol definitions into executable sequences.
|
||||
|
||||
**Communication** (`unilabos/device_comms/`): Hardware communication adapters — OPC-UA client, Modbus PLC, RPC, and a universal driver. `app/communication.py` provides a factory pattern for WebSocket client connections to the cloud.
|
||||
|
||||
**Web/API** (`unilabos/app/web/`): FastAPI server with REST API (`api.py`), Jinja2 template pages (`pages.py`), and HTTP client for cloud communication (`client.py`). Runs on port 8002 by default.
|
||||
|
||||
### Configuration System
|
||||
|
||||
- **Config classes** in `unilabos/config/config.py`: `BasicConfig`, `WSConfig`, `HTTPConfig`, `ROSConfig` — all class-level attributes, loaded from Python config files
|
||||
- Config files are `.py` files with matching class names (see `config/example_config.py`)
|
||||
- Environment variables override with prefix `UNILABOS_` (e.g., `UNILABOS_BASICCONFIG_PORT=9000`)
|
||||
- Device topology defined in graph files (JSON with node-link format, or GraphML)
|
||||
|
||||
### Key Data Flow
|
||||
|
||||
1. Graph file → `graphio.read_node_link_json()` → `(nx.Graph, ResourceTreeSet, resource_links)`
|
||||
2. `ResourceTreeSet` + `Registry` → `initialize_device.initialize_device_from_dict()` → `ROS2DeviceNode` instances
|
||||
3. Device nodes communicate via ROS2 topics/actions or direct Python calls (simple backend)
|
||||
4. Cloud sync via WebSocket (`app/ws_client.py`) and HTTP (`app/web/client.py`)
|
||||
|
||||
### Test Data
|
||||
|
||||
Example device graphs and experiment configs are in `unilabos/test/experiments/` (not `tests/`). Registry test fixtures in `unilabos/test/registry/`.
|
||||
|
||||
## Code Conventions
|
||||
|
||||
- Code comments and log messages in simplified Chinese
|
||||
- Python 3.11+, type hints expected
|
||||
- Pydantic models for data validation (`resource_tracker.py`)
|
||||
- Singleton pattern via `@singleton` decorator (`utils/decorator.py`)
|
||||
- Dynamic class loading via `utils/import_manager.py` — device classes resolved at runtime from registry YAML paths
|
||||
- CLI argument dashes auto-converted to underscores for consistency
|
||||
|
||||
## Licensing
|
||||
|
||||
- Framework code: GPL-3.0
|
||||
- Device drivers (`unilabos/devices/`): DP Technology Proprietary License — do not redistribute
|
||||
@@ -1,4 +1,5 @@
|
||||
recursive-include unilabos/test *
|
||||
recursive-include unilabos/utils *
|
||||
recursive-include unilabos/registry *.yaml
|
||||
recursive-include unilabos/app/web/static *
|
||||
recursive-include unilabos/app/web/templates *
|
||||
|
||||
17
NOTICE
Normal file
@@ -0,0 +1,17 @@
|
||||
# Uni-Lab-OS Licensing Notice
|
||||
|
||||
This project uses a dual licensing structure:
|
||||
|
||||
## 1. Main Framework - GPL-3.0
|
||||
|
||||
- unilabos/ (except unilabos/devices/)
|
||||
- docs/
|
||||
- tests/
|
||||
|
||||
See [LICENSE](LICENSE) for details.
|
||||
|
||||
## 2. Device Drivers - DP Technology Proprietary License
|
||||
|
||||
- unilabos/devices/
|
||||
|
||||
See [unilabos/devices/LICENSE](unilabos/devices/LICENSE) for details.
|
||||
90
README.md
@@ -8,17 +8,13 @@
|
||||
|
||||
**English** | [中文](README_zh.md)
|
||||
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/stargazers)
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/network/members)
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/blob/main/LICENSE)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/stargazers)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/network/members)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/blob/main/LICENSE)
|
||||
|
||||
Uni-Lab-OS is a platform for laboratory automation, designed to connect and control various experimental equipment, enabling automation and standardization of experimental workflows.
|
||||
|
||||
## 🏆 Competition
|
||||
|
||||
Join the [Intelligent Organic Chemistry Synthesis Competition](https://bohrium.dp.tech/competitions/1451645258) to explore automated synthesis with Uni-Lab-OS!
|
||||
|
||||
## Key Features
|
||||
|
||||
- Multi-device integration management
|
||||
@@ -31,41 +27,89 @@ Join the [Intelligent Organic Chemistry Synthesis Competition](https://bohrium.d
|
||||
|
||||
Detailed documentation can be found at:
|
||||
|
||||
- [Online Documentation](https://xuwznln.github.io/Uni-Lab-OS-Doc/)
|
||||
- [Online Documentation](https://deepmodeling.github.io/Uni-Lab-OS/)
|
||||
|
||||
## Quick Start
|
||||
|
||||
Uni-Lab-OS recommends using `mamba` for environment management. Choose the appropriate environment file for your operating system:
|
||||
### 1. Setup Conda Environment
|
||||
|
||||
Uni-Lab-OS recommends using `mamba` for environment management. Choose the package that fits your needs:
|
||||
|
||||
| Package | Use Case | Contents |
|
||||
|---------|----------|----------|
|
||||
| `unilabos` | **Recommended for most users** | Complete package, ready to use |
|
||||
| `unilabos-env` | Developers (editable install) | Environment only, install unilabos via pip |
|
||||
| `unilabos-full` | Simulation/Visualization | unilabos + ROS2 Desktop + Gazebo + MoveIt |
|
||||
|
||||
```bash
|
||||
# Create new environment
|
||||
mamba create -n unilab python=3.11.11
|
||||
mamba create -n unilab python=3.11.14
|
||||
mamba activate unilab
|
||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# Option A: Standard installation (recommended for most users)
|
||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# Option B: For developers (editable mode development)
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
# Then install unilabos and dependencies:
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
||||
pip install -e .
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
|
||||
# Option C: Full installation (simulation/visualization)
|
||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
## Install Dev Uni-Lab-OS
|
||||
**When to use which?**
|
||||
- **unilabos**: Standard installation for production deployment and general usage (recommended)
|
||||
- **unilabos-env**: For developers who need `pip install -e .` editable mode, modify source code
|
||||
- **unilabos-full**: For simulation (Gazebo), visualization (rviz2), and Jupyter notebooks
|
||||
|
||||
### 2. Clone Repository (Optional, for developers)
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/dptech-corp/Uni-Lab-OS.git
|
||||
# Clone the repository (only needed for development or examples)
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||
cd Uni-Lab-OS
|
||||
|
||||
# Install Uni-Lab-OS
|
||||
pip install .
|
||||
```
|
||||
|
||||
3. Start Uni-Lab System:
|
||||
3. Start Uni-Lab System
|
||||
|
||||
Please refer to [Documentation - Boot Examples](https://xuwznln.github.io/Uni-Lab-OS-Doc/boot_examples/index.html)
|
||||
Please refer to [Documentation - Boot Examples](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||
|
||||
4. Best Practice
|
||||
|
||||
See [Best Practice Guide](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
||||
|
||||
## Message Format
|
||||
|
||||
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/dptech-corp/Uni-Lab-OS/releases) page.
|
||||
Uni-Lab-OS uses pre-built `unilabos_msgs` for system communication. You can find the built versions on the [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) page.
|
||||
|
||||
## Citation
|
||||
|
||||
If you use [Uni-Lab-OS](https://arxiv.org/abs/2512.21766) in academic research, please cite:
|
||||
|
||||
```bibtex
|
||||
@article{gao2025unilabos,
|
||||
title = {UniLabOS: An AI-Native Operating System for Autonomous Laboratories},
|
||||
doi = {10.48550/arXiv.2512.21766},
|
||||
publisher = {arXiv},
|
||||
author = {Gao, Jing and Chang, Junhan and Que, Haohui and Xiong, Yanfei and
|
||||
Zhang, Shixiang and Qi, Xianwei and Liu, Zhen and Wang, Jun-Jie and
|
||||
Ding, Qianjun and Li, Xinyu and Pan, Ziwei and Xie, Qiming and
|
||||
Yan, Zhuang and Yan, Junchi and Zhang, Linfeng},
|
||||
year = {2025}
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under GPL-3.0 - see the [LICENSE](LICENSE) file for details.
|
||||
This project uses a dual licensing structure:
|
||||
|
||||
- **Main Framework**: GPL-3.0 - see [LICENSE](LICENSE)
|
||||
- **Device Drivers** (`unilabos/devices/`): DP Technology Proprietary License
|
||||
|
||||
See [NOTICE](NOTICE) for complete licensing details.
|
||||
|
||||
## Project Statistics
|
||||
|
||||
@@ -77,4 +121,4 @@ This project is licensed under GPL-3.0 - see the [LICENSE](LICENSE) file for det
|
||||
|
||||
## Contact Us
|
||||
|
||||
- GitHub Issues: [https://github.com/dptech-corp/Uni-Lab-OS/issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
- GitHub Issues: [https://github.com/deepmodeling/Uni-Lab-OS/issues](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||
|
||||
90
README_zh.md
@@ -8,17 +8,13 @@
|
||||
|
||||
[English](README.md) | **中文**
|
||||
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/stargazers)
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/network/members)
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
[](https://github.com/dptech-corp/Uni-Lab-OS/blob/main/LICENSE)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/stargazers)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/network/members)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||
[](https://github.com/deepmodeling/Uni-Lab-OS/blob/main/LICENSE)
|
||||
|
||||
Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控制各种实验设备,实现实验流程的自动化和标准化。
|
||||
|
||||
## 🏆 比赛
|
||||
|
||||
欢迎参加[有机化学合成智能实验大赛](https://bohrium.dp.tech/competitions/1451645258),使用 Uni-Lab-OS 探索自动化合成!
|
||||
|
||||
## 核心特点
|
||||
|
||||
- 多设备集成管理
|
||||
@@ -31,43 +27,89 @@ Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控
|
||||
|
||||
详细文档可在以下位置找到:
|
||||
|
||||
- [在线文档](https://xuwznln.github.io/Uni-Lab-OS-Doc/)
|
||||
- [在线文档](https://deepmodeling.github.io/Uni-Lab-OS/)
|
||||
|
||||
## 快速开始
|
||||
|
||||
1. 配置 Conda 环境
|
||||
### 1. 配置 Conda 环境
|
||||
|
||||
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的操作系统选择适当的环境文件:
|
||||
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的需求选择合适的安装包:
|
||||
|
||||
| 安装包 | 适用场景 | 包含内容 |
|
||||
|--------|----------|----------|
|
||||
| `unilabos` | **推荐大多数用户** | 完整安装包,开箱即用 |
|
||||
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
|
||||
| `unilabos-full` | 仿真/可视化 | unilabos + ROS2 桌面版 + Gazebo + MoveIt |
|
||||
|
||||
```bash
|
||||
# 创建新环境
|
||||
mamba create -n unilab python=3.11.11
|
||||
mamba create -n unilab python=3.11.14
|
||||
mamba activate unilab
|
||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# 方案 A:标准安装(推荐大多数用户)
|
||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# 方案 B:开发者环境(可编辑模式开发)
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
# 然后安装 unilabos 和依赖:
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
||||
pip install -e .
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
|
||||
# 方案 C:完整安装(仿真/可视化)
|
||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
2. 安装开发版 Uni-Lab-OS:
|
||||
**如何选择?**
|
||||
- **unilabos**:标准安装,适用于生产部署和日常使用(推荐)
|
||||
- **unilabos-env**:开发者使用,支持 `pip install -e .` 可编辑模式,可修改源代码
|
||||
- **unilabos-full**:需要仿真(Gazebo)、可视化(rviz2)或 Jupyter Notebook
|
||||
|
||||
### 2. 克隆仓库(可选,供开发者使用)
|
||||
|
||||
```bash
|
||||
# 克隆仓库
|
||||
git clone https://github.com/dptech-corp/Uni-Lab-OS.git
|
||||
# 克隆仓库(仅开发或查看示例时需要)
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||
cd Uni-Lab-OS
|
||||
|
||||
# 安装 Uni-Lab-OS
|
||||
pip install .
|
||||
```
|
||||
|
||||
3. 启动 Uni-Lab 系统:
|
||||
3. 启动 Uni-Lab 系统
|
||||
|
||||
请见[文档-启动样例](https://xuwznln.github.io/Uni-Lab-OS-Doc/boot_examples/index.html)
|
||||
请见[文档-启动样例](https://deepmodeling.github.io/Uni-Lab-OS/boot_examples/index.html)
|
||||
|
||||
4. 最佳实践
|
||||
|
||||
请见[最佳实践指南](https://deepmodeling.github.io/Uni-Lab-OS/user_guide/best_practice.html)
|
||||
|
||||
## 消息格式
|
||||
|
||||
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/dptech-corp/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
||||
Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在 [GitHub Releases](https://github.com/deepmodeling/Uni-Lab-OS/releases) 页面找到已构建的版本。
|
||||
|
||||
## 引用
|
||||
|
||||
如果您在学术研究中使用 [Uni-Lab-OS](https://arxiv.org/abs/2512.21766),请引用:
|
||||
|
||||
```bibtex
|
||||
@article{gao2025unilabos,
|
||||
title = {UniLabOS: An AI-Native Operating System for Autonomous Laboratories},
|
||||
doi = {10.48550/arXiv.2512.21766},
|
||||
publisher = {arXiv},
|
||||
author = {Gao, Jing and Chang, Junhan and Que, Haohui and Xiong, Yanfei and
|
||||
Zhang, Shixiang and Qi, Xianwei and Liu, Zhen and Wang, Jun-Jie and
|
||||
Ding, Qianjun and Li, Xinyu and Pan, Ziwei and Xie, Qiming and
|
||||
Yan, Zhuang and Yan, Junchi and Zhang, Linfeng},
|
||||
year = {2025}
|
||||
}
|
||||
```
|
||||
|
||||
## 许可证
|
||||
|
||||
此项目采用 GPL-3.0 许可 - 详情请参阅 [LICENSE](LICENSE) 文件。
|
||||
本项目采用双许可证结构:
|
||||
|
||||
- **主框架**:GPL-3.0 - 详见 [LICENSE](LICENSE)
|
||||
- **设备驱动** (`unilabos/devices/`):深势科技专有许可证
|
||||
|
||||
完整许可证说明请参阅 [NOTICE](NOTICE)。
|
||||
|
||||
## 项目统计
|
||||
|
||||
@@ -79,4 +121,4 @@ Uni-Lab-OS 使用预构建的 `unilabos_msgs` 进行系统通信。您可以在
|
||||
|
||||
## 联系我们
|
||||
|
||||
- GitHub Issues: [https://github.com/dptech-corp/Uni-Lab-OS/issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
- GitHub Issues: [https://github.com/deepmodeling/Uni-Lab-OS/issues](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||
|
||||
@@ -24,7 +24,7 @@ extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon", # 如果您使用 Google 或 NumPy 风格的 docstrings
|
||||
"sphinx_rtd_theme",
|
||||
"sphinxcontrib.mermaid"
|
||||
"sphinxcontrib.mermaid",
|
||||
]
|
||||
|
||||
source_suffix = {
|
||||
@@ -58,7 +58,7 @@ html_theme = "sphinx_rtd_theme"
|
||||
|
||||
# sphinx-book-theme 主题选项
|
||||
html_theme_options = {
|
||||
"repository_url": "https://github.com/用户名/Uni-Lab",
|
||||
"repository_url": "https://github.com/deepmodeling/Uni-Lab-OS",
|
||||
"use_repository_button": True,
|
||||
"use_issues_button": True,
|
||||
"use_edit_page_button": True,
|
||||
|
||||
@@ -15,6 +15,9 @@ Python 类设备驱动在完成注册表后可以直接在 Uni-Lab 中使用,
|
||||
**示例:**
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import device, topic_config
|
||||
|
||||
@device(id="mock_gripper", category=["gripper"], description="Mock Gripper")
|
||||
class MockGripper:
|
||||
def __init__(self):
|
||||
self._position: float = 0.0
|
||||
@@ -23,19 +26,23 @@ class MockGripper:
|
||||
self._status = "Idle"
|
||||
|
||||
@property
|
||||
@topic_config() # 添加 @topic_config 才会定时广播
|
||||
def position(self) -> float:
|
||||
return self._position
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def velocity(self) -> float:
|
||||
return self._velocity
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def torque(self) -> float:
|
||||
return self._torque
|
||||
|
||||
# 会被自动识别的设备属性,接入 Uni-Lab 时会定时对外广播
|
||||
# 使用 @topic_config 装饰的属性,接入 Uni-Lab 时会定时对外广播
|
||||
@property
|
||||
@topic_config(period=2.0) # 可自定义发布周期
|
||||
def status(self) -> str:
|
||||
return self._status
|
||||
|
||||
@@ -149,7 +156,7 @@ my_device: # 设备唯一标识符
|
||||
|
||||
系统会自动分析您的 Python 驱动类并生成:
|
||||
|
||||
- `status_types`:从 `@property` 装饰的方法自动识别状态属性
|
||||
- `status_types`:从 `@topic_config` 装饰的 `@property` 或方法自动识别状态属性
|
||||
- `action_value_mappings`:从类方法自动生成动作映射
|
||||
- `init_param_schema`:从 `__init__` 方法分析初始化参数
|
||||
- `schema`:前端显示用的属性类型定义
|
||||
@@ -179,7 +186,9 @@ Uni-Lab 设备驱动是一个 Python 类,需要遵循以下结构:
|
||||
|
||||
```python
|
||||
from typing import Dict, Any
|
||||
from unilabos.registry.decorators import device, topic_config
|
||||
|
||||
@device(id="my_device", category=["general"], description="My Device")
|
||||
class MyDevice:
|
||||
"""设备类文档字符串
|
||||
|
||||
@@ -198,8 +207,9 @@ class MyDevice:
|
||||
# 初始化硬件连接
|
||||
|
||||
@property
|
||||
@topic_config() # 必须添加 @topic_config 才会广播
|
||||
def status(self) -> str:
|
||||
"""设备状态(会自动广播)"""
|
||||
"""设备状态(通过 @topic_config 广播)"""
|
||||
return self._status
|
||||
|
||||
def my_action(self, param: float) -> Dict[str, Any]:
|
||||
@@ -217,34 +227,61 @@ class MyDevice:
|
||||
|
||||
## 状态属性 vs 动作方法
|
||||
|
||||
### 状态属性(@property)
|
||||
### 状态属性(@property + @topic_config)
|
||||
|
||||
状态属性会被自动识别并定期广播:
|
||||
状态属性需要同时使用 `@property` 和 `@topic_config` 装饰器才会被识别并定期广播:
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import topic_config
|
||||
|
||||
@property
|
||||
@topic_config() # 必须添加,否则不会广播
|
||||
def temperature(self) -> float:
|
||||
"""当前温度"""
|
||||
return self._read_temperature()
|
||||
|
||||
@property
|
||||
@topic_config(period=2.0) # 可自定义发布周期(秒)
|
||||
def status(self) -> str:
|
||||
"""设备状态: idle, running, error"""
|
||||
return self._status
|
||||
|
||||
@property
|
||||
@topic_config(name="ready") # 可自定义发布名称
|
||||
def is_ready(self) -> bool:
|
||||
"""设备是否就绪"""
|
||||
return self._status == "idle"
|
||||
```
|
||||
|
||||
也可以使用普通方法(非 @property)配合 `@topic_config`:
|
||||
|
||||
```python
|
||||
@topic_config(period=10.0)
|
||||
def get_sensor_data(self) -> Dict[str, float]:
|
||||
"""获取传感器数据(get_ 前缀会自动去除,发布名为 sensor_data)"""
|
||||
return {"temp": self._temp, "humidity": self._humidity}
|
||||
```
|
||||
|
||||
**`@topic_config` 参数**:
|
||||
|
||||
| 参数 | 类型 | 默认值 | 说明 |
|
||||
|------|------|--------|------|
|
||||
| `period` | float | 5.0 | 发布周期(秒) |
|
||||
| `print_publish` | bool | 节点默认 | 是否打印发布日志 |
|
||||
| `qos` | int | 10 | QoS 深度 |
|
||||
| `name` | str | None | 自定义发布名称 |
|
||||
|
||||
**发布名称优先级**:`@topic_config(name=...)` > `get_` 前缀去除 > 方法名
|
||||
|
||||
**特点**:
|
||||
|
||||
- 使用`@property`装饰器
|
||||
- 只读,不能有参数
|
||||
- 自动添加到注册表的`status_types`
|
||||
- 必须使用 `@topic_config` 装饰器
|
||||
- 支持 `@property` 和普通方法
|
||||
- 添加到注册表的 `status_types`
|
||||
- 定期发布到 ROS2 topic
|
||||
|
||||
> **⚠️ 重要:** 仅有 `@property` 装饰器而没有 `@topic_config` 的属性**不会**被广播。这是一个 Breaking Change。
|
||||
|
||||
### 动作方法
|
||||
|
||||
动作方法是设备可以执行的操作:
|
||||
@@ -497,6 +534,7 @@ class LiquidHandler:
|
||||
self._status = "idle"
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def status(self) -> str:
|
||||
return self._status
|
||||
|
||||
@@ -886,7 +924,52 @@ class MyDevice:
|
||||
|
||||
## 最佳实践
|
||||
|
||||
### 1. 类型注解
|
||||
### 1. 使用 `@device` 装饰器标识设备类
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import device
|
||||
|
||||
@device(id="my_device", category=["heating"], description="My Heating Device", icon="heater.webp")
|
||||
class MyDevice:
|
||||
...
|
||||
```
|
||||
|
||||
- `id`:设备唯一标识符,用于注册表匹配
|
||||
- `category`:分类列表,前端用于分组显示
|
||||
- `description`:设备描述
|
||||
- `icon`:图标文件名(可选)
|
||||
|
||||
### 2. 使用 `@topic_config` 声明需要广播的状态
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import topic_config
|
||||
|
||||
# ✓ @property + @topic_config → 会广播
|
||||
@property
|
||||
@topic_config(period=2.0)
|
||||
def temperature(self) -> float:
|
||||
return self._temp
|
||||
|
||||
# ✓ 普通方法 + @topic_config → 会广播(get_ 前缀自动去除)
|
||||
@topic_config(period=10.0)
|
||||
def get_sensor_data(self) -> Dict[str, float]:
|
||||
return {"temp": self._temp}
|
||||
|
||||
# ✓ 使用 name 参数自定义发布名称
|
||||
@property
|
||||
@topic_config(name="ready")
|
||||
def is_ready(self) -> bool:
|
||||
return self._status == "idle"
|
||||
|
||||
# ✗ 仅有 @property,没有 @topic_config → 不会广播
|
||||
@property
|
||||
def internal_state(self) -> str:
|
||||
return self._state
|
||||
```
|
||||
|
||||
> **注意:** 与 `@property` 连用时,`@topic_config` 必须放在 `@property` 下面。
|
||||
|
||||
### 3. 类型注解
|
||||
|
||||
```python
|
||||
from typing import Dict, Any, Optional, List
|
||||
@@ -901,7 +984,7 @@ def method(
|
||||
pass
|
||||
```
|
||||
|
||||
### 2. 文档字符串
|
||||
### 4. 文档字符串
|
||||
|
||||
```python
|
||||
def method(self, param: float) -> Dict[str, Any]:
|
||||
@@ -923,7 +1006,7 @@ def method(self, param: float) -> Dict[str, Any]:
|
||||
pass
|
||||
```
|
||||
|
||||
### 3. 配置验证
|
||||
### 5. 配置验证
|
||||
|
||||
```python
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
@@ -937,7 +1020,7 @@ def __init__(self, config: Dict[str, Any]):
|
||||
self.baudrate = config['baudrate']
|
||||
```
|
||||
|
||||
### 4. 资源清理
|
||||
### 6. 资源清理
|
||||
|
||||
```python
|
||||
def __del__(self):
|
||||
@@ -946,7 +1029,7 @@ def __del__(self):
|
||||
self.connection.close()
|
||||
```
|
||||
|
||||
### 5. 设计前端友好的返回值
|
||||
### 7. 设计前端友好的返回值
|
||||
|
||||
**记住:返回值会直接显示在 Web 界面**
|
||||
|
||||
|
||||
@@ -422,18 +422,20 @@ placeholder_keys:
|
||||
|
||||
### status_types
|
||||
|
||||
系统会扫描你的 Python 类,从状态方法(property 或 get\_方法)自动生成这部分:
|
||||
系统会扫描你的 Python 类,从带有 `@topic_config` 装饰器的 `@property` 或方法自动生成这部分:
|
||||
|
||||
```yaml
|
||||
status_types:
|
||||
current_temperature: float # 从 get_current_temperature() 或 @property current_temperature
|
||||
is_heating: bool # 从 get_is_heating() 或 @property is_heating
|
||||
status: str # 从 get_status() 或 @property status
|
||||
current_temperature: float # 从 @topic_config 装饰的 @property 或方法
|
||||
is_heating: bool
|
||||
status: str
|
||||
```
|
||||
|
||||
**注意事项**:
|
||||
|
||||
- 系统会查找所有 `get_` 开头的方法和 `@property` 装饰的属性
|
||||
- 仅有带 `@topic_config` 装饰器的 `@property` 或方法才会被识别为状态属性
|
||||
- 没有 `@topic_config` 的 `@property` 不会生成 status_types,也不会广播
|
||||
- `get_` 前缀的方法名会自动去除前缀(如 `get_temperature` → `temperature`)
|
||||
- 类型会自动转成相应的类型(如 `str`、`float`、`bool`)
|
||||
- 如果类型是 `Any`、`None` 或未知的,默认使用 `String`
|
||||
|
||||
@@ -537,11 +539,13 @@ class AdvancedLiquidHandler:
|
||||
self._temperature = 25.0
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def status(self) -> str:
|
||||
"""设备状态"""
|
||||
return self._status
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def temperature(self) -> float:
|
||||
"""当前温度"""
|
||||
return self._temperature
|
||||
@@ -809,21 +813,23 @@ my_temperature_controller:
|
||||
你的设备类需要符合以下要求:
|
||||
|
||||
```python
|
||||
from unilabos.common.device_base import DeviceBase
|
||||
from unilabos.registry.decorators import device, topic_config
|
||||
|
||||
class MyDevice(DeviceBase):
|
||||
@device(id="my_device", category=["temperature"], description="My Device")
|
||||
class MyDevice:
|
||||
def __init__(self, config):
|
||||
"""初始化,参数会自动分析到 init_param_schema.config"""
|
||||
super().__init__(config)
|
||||
self.port = config.get('port', '/dev/ttyUSB0')
|
||||
|
||||
# 状态方法(会自动生成到 status_types)
|
||||
# 状态方法(必须添加 @topic_config 才会生成到 status_types 并广播)
|
||||
@property
|
||||
@topic_config()
|
||||
def status(self):
|
||||
"""返回设备状态"""
|
||||
return "idle"
|
||||
|
||||
@property
|
||||
@topic_config()
|
||||
def temperature(self):
|
||||
"""返回当前温度"""
|
||||
return 25.0
|
||||
@@ -1039,7 +1045,34 @@ resource.type # "resource"
|
||||
|
||||
### 代码规范
|
||||
|
||||
1. **始终使用类型注解**
|
||||
1. **使用 `@device` 装饰器标识设备类**
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import device
|
||||
|
||||
@device(id="my_device", category=["heating"], description="My Device")
|
||||
class MyDevice:
|
||||
...
|
||||
```
|
||||
|
||||
2. **使用 `@topic_config` 声明广播属性**
|
||||
|
||||
```python
|
||||
from unilabos.registry.decorators import topic_config
|
||||
|
||||
# ✓ 需要广播的状态属性
|
||||
@property
|
||||
@topic_config(period=2.0)
|
||||
def temperature(self) -> float:
|
||||
return self._temp
|
||||
|
||||
# ✗ 仅有 @property 不会广播
|
||||
@property
|
||||
def internal_counter(self) -> int:
|
||||
return self._counter
|
||||
```
|
||||
|
||||
3. **始终使用类型注解**
|
||||
|
||||
```python
|
||||
# ✓ 好
|
||||
@@ -1051,7 +1084,7 @@ def method(self, resource, device):
|
||||
pass
|
||||
```
|
||||
|
||||
2. **提供有意义的参数名**
|
||||
4. **提供有意义的参数名**
|
||||
|
||||
```python
|
||||
# ✓ 好 - 清晰的参数名
|
||||
@@ -1063,7 +1096,7 @@ def transfer(self, r1: ResourceSlot, r2: ResourceSlot):
|
||||
pass
|
||||
```
|
||||
|
||||
3. **使用 Optional 表示可选参数**
|
||||
5. **使用 Optional 表示可选参数**
|
||||
|
||||
```python
|
||||
from typing import Optional
|
||||
@@ -1076,7 +1109,7 @@ def method(
|
||||
pass
|
||||
```
|
||||
|
||||
4. **添加详细的文档字符串**
|
||||
6. **添加详细的文档字符串**
|
||||
|
||||
```python
|
||||
def method(
|
||||
@@ -1096,13 +1129,13 @@ def method(
|
||||
pass
|
||||
```
|
||||
|
||||
5. **方法命名规范**
|
||||
7. **方法命名规范**
|
||||
|
||||
- 状态方法使用 `@property` 装饰器或 `get_` 前缀
|
||||
- 状态方法使用 `@property` + `@topic_config` 装饰器,或普通方法 + `@topic_config`
|
||||
- 动作方法使用动词开头
|
||||
- 保持命名清晰、一致
|
||||
|
||||
6. **完善的错误处理**
|
||||
8. **完善的错误处理**
|
||||
- 实现完善的错误处理
|
||||
- 添加日志记录
|
||||
- 提供有意义的错误信息
|
||||
|
||||
@@ -221,10 +221,10 @@ Laboratory A Laboratory B
|
||||
|
||||
```bash
|
||||
# 实验室A
|
||||
unilab --ak your_ak --sk your_sk --upload_registry --use_remote_resource
|
||||
unilab --ak your_ak --sk your_sk --upload_registry
|
||||
|
||||
# 实验室B
|
||||
unilab --ak your_ak --sk your_sk --upload_registry --use_remote_resource
|
||||
unilab --ak your_ak --sk your_sk --upload_registry
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -12,3 +12,7 @@ sphinx-copybutton>=0.5.0
|
||||
|
||||
# 用于自动摘要生成
|
||||
sphinx-autobuild>=2024.2.4
|
||||
|
||||
# 用于PDF导出 (rinohtype方案,纯Python无需LaTeX)
|
||||
rinohtype>=0.5.4
|
||||
sphinx-simplepdf>=1.6.0
|
||||
@@ -31,6 +31,14 @@
|
||||
|
||||
详细的安装步骤请参考 [安装指南](installation.md)。
|
||||
|
||||
**选择合适的安装包:**
|
||||
|
||||
| 安装包 | 适用场景 | 包含组件 |
|
||||
|--------|----------|----------|
|
||||
| `unilabos` | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 |
|
||||
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
|
||||
| `unilabos-full` | 仿真/可视化 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt |
|
||||
|
||||
**关键步骤:**
|
||||
|
||||
```bash
|
||||
@@ -38,15 +46,30 @@
|
||||
# 下载 Miniforge: https://github.com/conda-forge/miniforge/releases
|
||||
|
||||
# 2. 创建 Conda 环境
|
||||
mamba create -n unilab python=3.11.11
|
||||
mamba create -n unilab python=3.11.14
|
||||
|
||||
# 3. 激活环境
|
||||
mamba activate unilab
|
||||
|
||||
# 4. 安装 Uni-Lab-OS
|
||||
# 4. 安装 Uni-Lab-OS(选择其一)
|
||||
|
||||
# 方案 A:标准安装(推荐大多数用户)
|
||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# 方案 B:开发者环境(可编辑模式开发)
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
pip install -e /path/to/Uni-Lab-OS # 可编辑安装
|
||||
uv pip install -r unilabos/utils/requirements.txt # 安装 pip 依赖
|
||||
|
||||
# 方案 C:完整版(仿真/可视化)
|
||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
**选择建议:**
|
||||
- **日常使用/生产部署**:使用 `unilabos`(推荐),完整功能,开箱即用
|
||||
- **开发者**:使用 `unilabos-env` + `pip install -e .` + `uv pip install -r unilabos/utils/requirements.txt`,代码修改立即生效
|
||||
- **仿真/可视化**:使用 `unilabos-full`,含 Gazebo、rviz2、MoveIt
|
||||
|
||||
#### 1.2 验证安装
|
||||
|
||||
```bash
|
||||
@@ -416,6 +439,9 @@ unilab --ak your_ak --sk your_sk -g test/experiments/mock_devices/mock_all.json
|
||||
1. 访问 Web 界面,进入"仪器耗材"模块
|
||||
2. 在"仪器设备"区域找到并添加上述设备
|
||||
3. 在"物料耗材"区域找到并添加容器
|
||||
4. 在workstation中配置protocol_type包含PumpTransferProtocol
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
@@ -426,8 +452,9 @@ unilab --ak your_ak --sk your_sk -g test/experiments/mock_devices/mock_all.json
|
||||
**操作步骤:**
|
||||
|
||||
1. 将两个 `container` 拖拽到 `workstation` 中
|
||||
2. 将 `virtual_transfer_pump` 拖拽到 `workstation` 中
|
||||
3. 在画布上连接它们(建立父子关系)
|
||||
2. 将 `virtual_multiway_valve` 拖拽到 `workstation` 中
|
||||
3. 将 `virtual_transfer_pump` 拖拽到 `workstation` 中
|
||||
4. 在画布上连接它们(建立父子关系)
|
||||
|
||||

|
||||
|
||||
@@ -768,7 +795,43 @@ Waiting for host service...
|
||||
|
||||
详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。
|
||||
|
||||
#### 9.1 为什么需要自定义设备?
|
||||
#### 9.1 开发环境准备
|
||||
|
||||
**推荐使用 `unilabos-env` + `pip install -e .` + `uv pip install`** 进行设备开发:
|
||||
|
||||
```bash
|
||||
# 1. 创建环境并安装 unilabos-env(ROS2 + conda 依赖 + uv)
|
||||
mamba create -n unilab python=3.11.14
|
||||
conda activate unilab
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
|
||||
# 2. 克隆代码
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||
cd Uni-Lab-OS
|
||||
|
||||
# 3. 以可编辑模式安装(推荐使用脚本,自动检测中文环境)
|
||||
python scripts/dev_install.py
|
||||
|
||||
# 或手动安装:
|
||||
pip install -e .
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
```
|
||||
|
||||
**为什么使用这种方式?**
|
||||
- `unilabos-env` 提供 ROS2 核心组件和 uv(通过 conda 安装,避免编译)
|
||||
- `unilabos/utils/requirements.txt` 包含所有运行时需要的 pip 依赖
|
||||
- `dev_install.py` 自动检测中文环境,中文系统自动使用清华镜像
|
||||
- 使用 `uv` 替代 `pip`,安装速度更快
|
||||
- 可编辑模式:代码修改**立即生效**,无需重新安装
|
||||
|
||||
**如果安装失败或速度太慢**,可以手动执行(使用清华镜像):
|
||||
|
||||
```bash
|
||||
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
```
|
||||
|
||||
#### 9.2 为什么需要自定义设备?
|
||||
|
||||
Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成:
|
||||
|
||||
@@ -777,7 +840,7 @@ Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要
|
||||
- 特殊的实验流程
|
||||
- 第三方设备集成
|
||||
|
||||
#### 9.2 创建 Python 包
|
||||
#### 9.3 创建 Python 包
|
||||
|
||||
为了方便开发和管理,建议为您的实验室创建独立的 Python 包。
|
||||
|
||||
@@ -814,7 +877,7 @@ touch my_lab_devices/my_lab_devices/__init__.py
|
||||
touch my_lab_devices/my_lab_devices/devices/__init__.py
|
||||
```
|
||||
|
||||
#### 9.3 创建 setup.py
|
||||
#### 9.4 创建 setup.py
|
||||
|
||||
```python
|
||||
# my_lab_devices/setup.py
|
||||
@@ -845,7 +908,7 @@ setup(
|
||||
)
|
||||
```
|
||||
|
||||
#### 9.4 开发安装
|
||||
#### 9.5 开发安装
|
||||
|
||||
使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效:
|
||||
|
||||
@@ -860,7 +923,7 @@ pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
- 方便调试和测试
|
||||
- 支持版本控制(git)
|
||||
|
||||
#### 9.5 编写设备驱动
|
||||
#### 9.6 编写设备驱动
|
||||
|
||||
创建设备驱动文件:
|
||||
|
||||
@@ -1001,7 +1064,7 @@ class MyPump:
|
||||
- **返回 Dict**:所有动作方法返回字典类型
|
||||
- **文档字符串**:详细说明参数和功能
|
||||
|
||||
#### 9.6 测试设备驱动
|
||||
#### 9.7 测试设备驱动
|
||||
|
||||
创建简单的测试脚本:
|
||||
|
||||
@@ -1807,7 +1870,7 @@ unilab --ak your_ak --sk your_sk -g graph.json \
|
||||
|
||||
#### 14.5 社区支持
|
||||
|
||||
- **GitHub Issues**:[https://github.com/dptech-corp/Uni-Lab-OS/issues](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
- **GitHub Issues**:[https://github.com/deepmodeling/Uni-Lab-OS/issues](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||
- **官方网站**:[https://uni-lab.bohrium.com](https://uni-lab.bohrium.com)
|
||||
|
||||
---
|
||||
|
||||
@@ -463,7 +463,7 @@ Uni-Lab 使用 `ResourceDictInstance.get_resource_instance_from_dict()` 方法
|
||||
### 使用示例
|
||||
|
||||
```python
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceDictInstance
|
||||
from unilabos.resources.resource_tracker import ResourceDictInstance
|
||||
|
||||
# 旧格式节点
|
||||
old_format_node = {
|
||||
@@ -477,10 +477,10 @@ old_format_node = {
|
||||
instance = ResourceDictInstance.get_resource_instance_from_dict(old_format_node)
|
||||
|
||||
# 访问标准化后的数据
|
||||
print(instance.res_content.id) # "pump_1"
|
||||
print(instance.res_content.uuid) # 自动生成的 UUID
|
||||
print(instance.res_content.id) # "pump_1"
|
||||
print(instance.res_content.uuid) # 自动生成的 UUID
|
||||
print(instance.res_content.config) # {}
|
||||
print(instance.res_content.data) # {}
|
||||
print(instance.res_content.data) # {}
|
||||
```
|
||||
|
||||
### 格式迁移建议
|
||||
@@ -857,4 +857,4 @@ class ResourceDictPosition(BaseModel):
|
||||
- 在 Web 界面中使用模板创建
|
||||
- 参考示例文件:`test/experiments/` 目录
|
||||
- 查看 ResourceDict 源码了解完整定义
|
||||
- [GitHub 讨论区](https://github.com/dptech-corp/Uni-Lab-OS/discussions)
|
||||
- [GitHub 讨论区](https://github.com/deepmodeling/Uni-Lab-OS/discussions)
|
||||
|
||||
BIN
docs/user_guide/image/add_protocol.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
|
Before Width: | Height: | Size: 275 KiB After Width: | Height: | Size: 415 KiB |
@@ -13,15 +13,26 @@
|
||||
- 开发者需要 Git 和基本的 Python 开发知识
|
||||
- 自定义 msgs 需要 GitHub 账号
|
||||
|
||||
## 安装包选择
|
||||
|
||||
Uni-Lab-OS 提供三个安装包版本,根据您的需求选择:
|
||||
|
||||
| 安装包 | 适用场景 | 包含组件 | 磁盘占用 |
|
||||
|--------|----------|----------|----------|
|
||||
| **unilabos** | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 | ~2-3 GB |
|
||||
| **unilabos-env** | 开发者环境(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos | ~2 GB |
|
||||
| **unilabos-full** | 仿真可视化、完整功能体验 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt | ~8-10 GB |
|
||||
|
||||
## 安装方式选择
|
||||
|
||||
根据您的使用场景,选择合适的安装方式:
|
||||
|
||||
| 安装方式 | 适用人群 | 特点 | 安装时间 |
|
||||
| ---------------------- | -------------------- | ------------------------------ | ---------------------------- |
|
||||
| **方式一:一键安装** | 实验室用户、快速体验 | 预打包环境,离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
|
||||
| **方式二:手动安装** | 标准用户、生产环境 | 灵活配置,版本可控 | 10-20 分钟 |
|
||||
| **方式三:开发者安装** | 开发者、需要修改源码 | 可编辑模式,支持自定义 msgs | 20-30 分钟 |
|
||||
| 安装方式 | 适用人群 | 推荐安装包 | 特点 | 安装时间 |
|
||||
| ---------------------- | -------------------- | ----------------- | ------------------------------ | ---------------------------- |
|
||||
| **方式一:一键安装** | 快速体验、演示 | 预打包环境 | 离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
|
||||
| **方式二:手动安装** | **大多数用户** | `unilabos` | 完整功能,开箱即用 | 10-20 分钟 |
|
||||
| **方式三:开发者安装** | 开发者、需要修改源码 | `unilabos-env` | 可编辑模式,支持自定义开发 | 20-30 分钟 |
|
||||
| **仿真/可视化** | 仿真测试、可视化调试 | `unilabos-full` | 含 Gazebo、rviz2、MoveIt | 30-60 分钟 |
|
||||
|
||||
---
|
||||
|
||||
@@ -37,7 +48,7 @@
|
||||
|
||||
#### 第一步:下载预打包环境
|
||||
|
||||
1. 访问 [GitHub Actions - Conda Pack Build](https://github.com/dptech-corp/Uni-Lab-OS/actions/workflows/conda-pack-build.yml)
|
||||
1. 访问 [GitHub Actions - Conda Pack Build](https://github.com/deepmodeling/Uni-Lab-OS/actions/workflows/conda-pack-build.yml)
|
||||
|
||||
2. 选择最新的成功构建记录(绿色勾号 ✓)
|
||||
|
||||
@@ -144,17 +155,38 @@ bash Miniforge3-$(uname)-$(uname -m).sh
|
||||
使用以下命令创建 Uni-Lab 专用环境:
|
||||
|
||||
```bash
|
||||
mamba create -n unilab python=3.11.11 # 目前ros2组件依赖版本大多为3.11.11
|
||||
mamba create -n unilab python=3.11.14 # 目前ros2组件依赖版本大多为3.11.14
|
||||
mamba activate unilab
|
||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# 选择安装包(三选一):
|
||||
|
||||
# 方案 A:标准安装(推荐大多数用户)
|
||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
|
||||
# 方案 B:开发者环境(可编辑模式开发)
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
# 然后安装 unilabos 和 pip 依赖:
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
|
||||
pip install -e .
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
|
||||
# 方案 C:完整版(含仿真和可视化工具)
|
||||
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
**参数说明**:
|
||||
|
||||
- `-n unilab`: 创建名为 "unilab" 的环境
|
||||
- `uni-lab::unilabos`: 从 uni-lab channel 安装 unilabos 包
|
||||
- `uni-lab::unilabos`: 安装 unilabos 完整包,开箱即用(推荐)
|
||||
- `uni-lab::unilabos-env`: 仅安装环境依赖,适合开发者使用 `pip install -e .`
|
||||
- `uni-lab::unilabos-full`: 安装完整包(含 ROS2 Desktop、Gazebo、MoveIt 等)
|
||||
- `-c robostack-staging -c conda-forge`: 添加额外的软件源
|
||||
|
||||
**包选择建议**:
|
||||
- **日常使用/生产部署**:安装 `unilabos`(推荐,完整功能,开箱即用)
|
||||
- **开发者**:安装 `unilabos-env`,然后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖,再 `pip install -e .` 进行可编辑安装
|
||||
- **仿真/可视化**:安装 `unilabos-full`(Gazebo、rviz2、MoveIt)
|
||||
|
||||
**如果遇到网络问题**,可以使用清华镜像源加速下载:
|
||||
|
||||
```bash
|
||||
@@ -163,8 +195,14 @@ mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/m
|
||||
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/
|
||||
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/
|
||||
|
||||
# 然后重新执行安装命令
|
||||
# 然后重新执行安装命令(推荐标准安装)
|
||||
mamba create -n unilab uni-lab::unilabos -c robostack-staging
|
||||
|
||||
# 或完整版(仿真/可视化)
|
||||
mamba create -n unilab uni-lab::unilabos-full -c robostack-staging
|
||||
|
||||
# pip 安装时使用清华镜像(开发者安装时使用)
|
||||
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
```
|
||||
|
||||
### 第三步:激活环境
|
||||
@@ -189,13 +227,13 @@ conda activate unilab
|
||||
### 第一步:克隆仓库
|
||||
|
||||
```bash
|
||||
git clone https://github.com/dptech-corp/Uni-Lab-OS.git
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||
cd Uni-Lab-OS
|
||||
```
|
||||
|
||||
如果您需要贡献代码,建议先 Fork 仓库:
|
||||
|
||||
1. 访问 https://github.com/dptech-corp/Uni-Lab-OS
|
||||
1. 访问 https://github.com/deepmodeling/Uni-Lab-OS
|
||||
2. 点击右上角的 "Fork" 按钮
|
||||
3. Clone 您的 Fork 版本:
|
||||
```bash
|
||||
@@ -203,58 +241,87 @@ cd Uni-Lab-OS
|
||||
cd Uni-Lab-OS
|
||||
```
|
||||
|
||||
### 第二步:安装基础环境
|
||||
### 第二步:安装开发环境(unilabos-env)
|
||||
|
||||
**推荐方式**:先通过**方式一(一键安装)**或**方式二(手动安装)**完成基础环境的安装,这将包含所有必需的依赖项(ROS2、msgs 等)。
|
||||
|
||||
#### 选项 A:通过一键安装(推荐)
|
||||
|
||||
参考上文"方式一:一键安装",完成基础环境的安装后,激活环境:
|
||||
**重要**:开发者请使用 `unilabos-env` 包,它专为开发者设计:
|
||||
- 包含 ROS2 核心组件和消息包(ros-humble-ros-core、std-msgs、geometry-msgs 等)
|
||||
- 包含 transforms3d、cv-bridge、tf2 等 conda 依赖
|
||||
- 包含 `uv` 工具,用于快速安装 pip 依赖
|
||||
- **不包含** pip 依赖和 unilabos 包(由 `pip install -e .` 和 `uv pip install` 安装)
|
||||
|
||||
```bash
|
||||
# 创建并激活环境
|
||||
mamba create -n unilab python=3.11.14
|
||||
conda activate unilab
|
||||
|
||||
# 安装开发者环境包(ROS2 + conda 依赖 + uv)
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
#### 选项 B:通过手动安装
|
||||
### 第三步:安装 pip 依赖和可编辑模式安装
|
||||
|
||||
参考上文"方式二:手动安装",创建并安装环境:
|
||||
|
||||
```bash
|
||||
mamba create -n unilab python=3.11.11
|
||||
conda activate unilab
|
||||
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
```
|
||||
|
||||
**说明**:这会安装包括 Python 3.11.11、ROS2 Humble、ros-humble-unilabos-msgs 和所有必需依赖
|
||||
|
||||
### 第三步:切换到开发版本
|
||||
|
||||
现在你已经有了一个完整可用的 Uni-Lab 环境,接下来将 unilabos 包切换为开发版本:
|
||||
克隆代码并安装依赖:
|
||||
|
||||
```bash
|
||||
# 确保环境已激活
|
||||
conda activate unilab
|
||||
|
||||
# 卸载 pip 安装的 unilabos(保留所有 conda 依赖)
|
||||
pip uninstall unilabos -y
|
||||
|
||||
# 克隆 dev 分支(如果还未克隆)
|
||||
cd /path/to/your/workspace
|
||||
git clone -b dev https://github.com/dptech-corp/Uni-Lab-OS.git
|
||||
# 或者如果已经克隆,切换到 dev 分支
|
||||
# 克隆仓库(如果还未克隆)
|
||||
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
|
||||
cd Uni-Lab-OS
|
||||
|
||||
# 切换到 dev 分支(可选)
|
||||
git checkout dev
|
||||
git pull
|
||||
|
||||
# 以可编辑模式安装开发版 unilabos
|
||||
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
```
|
||||
|
||||
**参数说明**:
|
||||
**推荐:使用安装脚本**(自动检测中文环境,使用 uv 加速):
|
||||
|
||||
- `-e`: editable mode(可编辑模式),代码修改立即生效,无需重新安装
|
||||
- `-i`: 使用清华镜像源加速下载
|
||||
- `pip uninstall unilabos`: 只卸载 pip 安装的 unilabos 包,不影响 conda 安装的其他依赖(如 ROS2、msgs 等)
|
||||
```bash
|
||||
# 自动检测中文环境,如果是中文系统则使用清华镜像
|
||||
python scripts/dev_install.py
|
||||
|
||||
# 或者手动指定:
|
||||
python scripts/dev_install.py --china # 强制使用清华镜像
|
||||
python scripts/dev_install.py --no-mirror # 强制使用 PyPI
|
||||
python scripts/dev_install.py --skip-deps # 跳过 pip 依赖安装
|
||||
python scripts/dev_install.py --use-pip # 使用 pip 而非 uv
|
||||
```
|
||||
|
||||
**手动安装**(如果脚本安装失败或速度太慢):
|
||||
|
||||
```bash
|
||||
# 1. 安装 unilabos(可编辑模式)
|
||||
pip install -e .
|
||||
|
||||
# 2. 使用 uv 安装 pip 依赖(推荐,速度更快)
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
|
||||
# 国内用户使用清华镜像:
|
||||
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||
```
|
||||
|
||||
**注意**:
|
||||
- `uv` 已包含在 `unilabos-env` 中,无需单独安装
|
||||
- `unilabos/utils/requirements.txt` 包含运行 unilabos 所需的所有 pip 依赖
|
||||
- 部分特殊包(如 pylabrobot)会在运行时由 unilabos 自动检测并安装
|
||||
|
||||
**为什么使用可编辑模式?**
|
||||
|
||||
- `-e` (editable mode):代码修改**立即生效**,无需重新安装
|
||||
- 适合开发调试:修改代码后直接运行测试
|
||||
- 与 `unilabos-env` 配合:环境依赖由 conda 管理,unilabos 代码由 pip 管理
|
||||
|
||||
**验证安装**:
|
||||
|
||||
```bash
|
||||
# 检查 unilabos 版本
|
||||
python -c "import unilabos; print(unilabos.__version__)"
|
||||
|
||||
# 检查安装位置(应该指向你的代码目录)
|
||||
pip show unilabos | grep Location
|
||||
```
|
||||
|
||||
### 第四步:安装或自定义 ros-humble-unilabos-msgs(可选)
|
||||
|
||||
@@ -464,7 +531,45 @@ cd $CONDA_PREFIX/envs/unilab
|
||||
|
||||
### 问题 8: 环境很大,有办法减小吗?
|
||||
|
||||
**解决方案**: 预打包的环境包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。如果空间有限,考虑使用方式二手动安装,只安装需要的组件。
|
||||
**解决方案**:
|
||||
|
||||
1. **使用 `unilabos` 标准版**(推荐大多数用户):
|
||||
```bash
|
||||
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
|
||||
```
|
||||
标准版包含完整功能,环境大小约 2-3GB(相比完整版的 8-10GB)。
|
||||
|
||||
2. **使用 `unilabos-env` 开发者版**(最小化):
|
||||
```bash
|
||||
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
|
||||
# 然后手动安装依赖
|
||||
pip install -e .
|
||||
uv pip install -r unilabos/utils/requirements.txt
|
||||
```
|
||||
开发者版只包含环境依赖,体积最小约 2GB。
|
||||
|
||||
3. **按需安装额外组件**:
|
||||
如果后续需要特定功能,可以单独安装:
|
||||
```bash
|
||||
# 需要 Jupyter
|
||||
mamba install jupyter jupyros
|
||||
|
||||
# 需要可视化
|
||||
mamba install matplotlib opencv
|
||||
|
||||
# 需要仿真(注意:这会安装大量依赖)
|
||||
mamba install ros-humble-gazebo-ros
|
||||
```
|
||||
|
||||
4. **预打包环境问题**:
|
||||
预打包环境(方式一)包含所有依赖,通常较大(压缩后 2-5GB)。这是为了确保离线安装和完整功能。
|
||||
|
||||
**包选择建议**:
|
||||
| 需求 | 推荐包 | 预估大小 |
|
||||
|------|--------|----------|
|
||||
| 日常使用/生产部署 | `unilabos` | ~2-3 GB |
|
||||
| 开发调试(可编辑模式) | `unilabos-env` | ~2 GB |
|
||||
| 仿真/可视化 | `unilabos-full` | ~8-10 GB |
|
||||
|
||||
### 问题 9: 如何更新到最新版本?
|
||||
|
||||
@@ -503,14 +608,15 @@ mamba update ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-f
|
||||
## 需要帮助?
|
||||
|
||||
- **故障排查**: 查看更详细的故障排查信息
|
||||
- **GitHub Issues**: [报告问题](https://github.com/dptech-corp/Uni-Lab-OS/issues)
|
||||
- **GitHub Issues**: [报告问题](https://github.com/deepmodeling/Uni-Lab-OS/issues)
|
||||
- **开发者文档**: 查看开发者指南获取更多技术细节
|
||||
- **社区讨论**: [GitHub Discussions](https://github.com/dptech-corp/Uni-Lab-OS/discussions)
|
||||
- **社区讨论**: [GitHub Discussions](https://github.com/deepmodeling/Uni-Lab-OS/discussions)
|
||||
|
||||
---
|
||||
|
||||
**提示**:
|
||||
|
||||
- 生产环境推荐使用方式二(手动安装)的稳定版本
|
||||
- 开发和测试推荐使用方式三(开发者安装)
|
||||
- 快速体验和演示推荐使用方式一(一键安装)
|
||||
- **大多数用户**推荐使用方式二(手动安装)的 `unilabos` 标准版
|
||||
- **开发者**推荐使用方式三(开发者安装),安装 `unilabos-env` 后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖
|
||||
- **仿真/可视化**推荐安装 `unilabos-full` 完整版
|
||||
- **快速体验和演示**推荐使用方式一(一键安装)
|
||||
|
||||
@@ -22,7 +22,6 @@ options:
|
||||
--is_slave Run the backend as slave node (without host privileges).
|
||||
--slave_no_host Skip waiting for host service in slave mode
|
||||
--upload_registry Upload registry information when starting unilab
|
||||
--use_remote_resource Use remote resources when starting unilab
|
||||
--config CONFIG Configuration file path, supports .py format Python config files
|
||||
--port PORT Port for web service information page
|
||||
--disable_browser Disable opening information page on startup
|
||||
@@ -85,7 +84,7 @@ Uni-Lab 的启动过程分为以下几个阶段:
|
||||
支持两种方式:
|
||||
|
||||
- **本地文件**:使用 `-g` 指定图谱文件(支持 JSON 和 GraphML 格式)
|
||||
- **远程资源**:使用 `--use_remote_resource` 从云端获取
|
||||
- **远程资源**:不指定本地文件即可
|
||||
|
||||
### 7. 注册表构建
|
||||
|
||||
@@ -196,7 +195,7 @@ unilab --config path/to/your/config.py
|
||||
unilab --ak your_ak --sk your_sk -g path/to/graph.json --upload_registry
|
||||
|
||||
# 使用远程资源启动
|
||||
unilab --ak your_ak --sk your_sk --use_remote_resource
|
||||
unilab --ak your_ak --sk your_sk
|
||||
|
||||
# 更新注册表
|
||||
unilab --ak your_ak --sk your_sk --complete_registry
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package:
|
||||
name: ros-humble-unilabos-msgs
|
||||
version: 0.10.12
|
||||
version: 0.10.19
|
||||
source:
|
||||
path: ../../unilabos_msgs
|
||||
target_directory: src
|
||||
@@ -17,7 +17,7 @@ build:
|
||||
- bash $SRC_DIR/build_ament_cmake.sh
|
||||
|
||||
about:
|
||||
repository: https://github.com/dptech-corp/Uni-Lab-OS
|
||||
repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||
license: BSD-3-Clause
|
||||
description: "ros-humble-unilabos-msgs is a package that provides message definitions for Uni-Lab-OS."
|
||||
|
||||
@@ -25,7 +25,7 @@ requirements:
|
||||
build:
|
||||
- ${{ compiler('cxx') }}
|
||||
- ${{ compiler('c') }}
|
||||
- python ==3.11.11
|
||||
- python ==3.11.14
|
||||
- numpy
|
||||
- if: build_platform != target_platform
|
||||
then:
|
||||
@@ -63,14 +63,14 @@ requirements:
|
||||
- robostack-staging::ros-humble-rosidl-default-generators
|
||||
- robostack-staging::ros-humble-std-msgs
|
||||
- robostack-staging::ros-humble-geometry-msgs
|
||||
- robostack-staging::ros2-distro-mutex=0.6
|
||||
- robostack-staging::ros2-distro-mutex=0.7
|
||||
run:
|
||||
- robostack-staging::ros-humble-action-msgs
|
||||
- robostack-staging::ros-humble-ros-workspace
|
||||
- robostack-staging::ros-humble-rosidl-default-runtime
|
||||
- robostack-staging::ros-humble-std-msgs
|
||||
- robostack-staging::ros-humble-geometry-msgs
|
||||
- robostack-staging::ros2-distro-mutex=0.6
|
||||
- robostack-staging::ros2-distro-mutex=0.7
|
||||
- if: osx and x86_64
|
||||
then:
|
||||
- __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package:
|
||||
name: unilabos
|
||||
version: "0.10.12"
|
||||
version: "0.10.19"
|
||||
|
||||
source:
|
||||
path: ../..
|
||||
|
||||
@@ -85,7 +85,7 @@ Verification:
|
||||
-------------
|
||||
|
||||
The verify_installation.py script will check:
|
||||
- Python version (3.11.11)
|
||||
- Python version (3.11.14)
|
||||
- ROS2 rclpy installation
|
||||
- UniLabOS installation and dependencies
|
||||
|
||||
@@ -104,7 +104,7 @@ Build Information:
|
||||
|
||||
Branch: {branch}
|
||||
Platform: {platform}
|
||||
Python: 3.11.11
|
||||
Python: 3.11.14
|
||||
Date: {build_date}
|
||||
|
||||
Troubleshooting:
|
||||
@@ -126,7 +126,7 @@ If installation fails:
|
||||
For more help:
|
||||
- Documentation: docs/user_guide/installation.md
|
||||
- Quick Start: QUICK_START_CONDA_PACK.md
|
||||
- Issues: https://github.com/dptech-corp/Uni-Lab-OS/issues
|
||||
- Issues: https://github.com/deepmodeling/Uni-Lab-OS/issues
|
||||
|
||||
License:
|
||||
--------
|
||||
@@ -134,7 +134,7 @@ License:
|
||||
UniLabOS is licensed under GPL-3.0-only.
|
||||
See LICENSE file for details.
|
||||
|
||||
Repository: https://github.com/dptech-corp/Uni-Lab-OS
|
||||
Repository: https://github.com/deepmodeling/Uni-Lab-OS
|
||||
"""
|
||||
|
||||
return readme
|
||||
|
||||
214
scripts/dev_install.py
Normal file
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Development installation script for UniLabOS.
|
||||
Auto-detects Chinese locale and uses appropriate mirror.
|
||||
|
||||
Usage:
|
||||
python scripts/dev_install.py
|
||||
python scripts/dev_install.py --no-mirror # Force no mirror
|
||||
python scripts/dev_install.py --china # Force China mirror
|
||||
python scripts/dev_install.py --skip-deps # Skip pip dependencies installation
|
||||
|
||||
Flow:
|
||||
1. pip install -e . (install unilabos in editable mode)
|
||||
2. Detect Chinese locale
|
||||
3. Use uv to install pip dependencies from requirements.txt
|
||||
4. Special packages (like pylabrobot) are handled by environment_check.py at runtime
|
||||
"""
|
||||
|
||||
import locale
|
||||
import subprocess
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
# Tsinghua mirror URL
|
||||
TSINGHUA_MIRROR = "https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple"
|
||||
|
||||
|
||||
def is_chinese_locale() -> bool:
|
||||
"""
|
||||
Detect if system is in Chinese locale.
|
||||
Same logic as EnvironmentChecker._is_chinese_locale()
|
||||
"""
|
||||
try:
|
||||
lang = locale.getdefaultlocale()[0]
|
||||
if lang and ("zh" in lang.lower() or "chinese" in lang.lower()):
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def run_command(cmd: list, description: str, retry: int = 2) -> bool:
|
||||
"""Run command with retry support."""
|
||||
print(f"[INFO] {description}")
|
||||
print(f"[CMD] {' '.join(cmd)}")
|
||||
|
||||
for attempt in range(retry + 1):
|
||||
try:
|
||||
result = subprocess.run(cmd, check=True, timeout=600)
|
||||
print(f"[OK] {description}")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
if attempt < retry:
|
||||
print(f"[WARN] Attempt {attempt + 1} failed, retrying...")
|
||||
else:
|
||||
print(f"[ERROR] {description} failed: {e}")
|
||||
return False
|
||||
except subprocess.TimeoutExpired:
|
||||
print(f"[ERROR] {description} timed out")
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def install_editable(project_root: Path, use_mirror: bool) -> bool:
|
||||
"""Install unilabos in editable mode using pip."""
|
||||
cmd = [sys.executable, "-m", "pip", "install", "-e", str(project_root)]
|
||||
if use_mirror:
|
||||
cmd.extend(["-i", TSINGHUA_MIRROR])
|
||||
|
||||
return run_command(cmd, "Installing unilabos in editable mode")
|
||||
|
||||
|
||||
def install_requirements_uv(requirements_file: Path, use_mirror: bool) -> bool:
|
||||
"""Install pip dependencies using uv (installed via conda-forge::uv)."""
|
||||
cmd = ["uv", "pip", "install", "-r", str(requirements_file)]
|
||||
if use_mirror:
|
||||
cmd.extend(["-i", TSINGHUA_MIRROR])
|
||||
|
||||
return run_command(cmd, "Installing pip dependencies with uv", retry=2)
|
||||
|
||||
|
||||
def install_requirements_pip(requirements_file: Path, use_mirror: bool) -> bool:
|
||||
"""Fallback: Install pip dependencies using pip."""
|
||||
cmd = [sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
|
||||
if use_mirror:
|
||||
cmd.extend(["-i", TSINGHUA_MIRROR])
|
||||
|
||||
return run_command(cmd, "Installing pip dependencies with pip", retry=2)
|
||||
|
||||
|
||||
def check_uv_available() -> bool:
|
||||
"""Check if uv is available (installed via conda-forge::uv)."""
|
||||
try:
|
||||
subprocess.run(["uv", "--version"], capture_output=True, check=True)
|
||||
return True
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Development installation script for UniLabOS")
|
||||
parser.add_argument("--china", action="store_true", help="Force use China mirror (Tsinghua)")
|
||||
parser.add_argument("--no-mirror", action="store_true", help="Force use default PyPI (no mirror)")
|
||||
parser.add_argument(
|
||||
"--skip-deps", action="store_true", help="Skip pip dependencies installation (only install unilabos)"
|
||||
)
|
||||
parser.add_argument("--use-pip", action="store_true", help="Use pip instead of uv for dependencies")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Determine project root
|
||||
script_dir = Path(__file__).parent
|
||||
project_root = script_dir.parent
|
||||
requirements_file = project_root / "unilabos" / "utils" / "requirements.txt"
|
||||
|
||||
if not (project_root / "setup.py").exists():
|
||||
print(f"[ERROR] setup.py not found in {project_root}")
|
||||
sys.exit(1)
|
||||
|
||||
print("=" * 60)
|
||||
print("UniLabOS Development Installation")
|
||||
print("=" * 60)
|
||||
print(f"Project root: {project_root}")
|
||||
print()
|
||||
|
||||
# Determine mirror usage based on locale
|
||||
if args.no_mirror:
|
||||
use_mirror = False
|
||||
print("[INFO] Mirror disabled by --no-mirror flag")
|
||||
elif args.china:
|
||||
use_mirror = True
|
||||
print("[INFO] China mirror enabled by --china flag")
|
||||
else:
|
||||
use_mirror = is_chinese_locale()
|
||||
if use_mirror:
|
||||
print("[INFO] Chinese locale detected, using Tsinghua mirror")
|
||||
else:
|
||||
print("[INFO] Non-Chinese locale detected, using default PyPI")
|
||||
|
||||
print()
|
||||
|
||||
# Step 1: Install unilabos in editable mode
|
||||
print("[STEP 1] Installing unilabos in editable mode...")
|
||||
if not install_editable(project_root, use_mirror):
|
||||
print("[ERROR] Failed to install unilabos")
|
||||
print()
|
||||
print("Manual fallback:")
|
||||
if use_mirror:
|
||||
print(f" pip install -e {project_root} -i {TSINGHUA_MIRROR}")
|
||||
else:
|
||||
print(f" pip install -e {project_root}")
|
||||
sys.exit(1)
|
||||
|
||||
print()
|
||||
|
||||
# Step 2: Install pip dependencies
|
||||
if args.skip_deps:
|
||||
print("[INFO] Skipping pip dependencies installation (--skip-deps)")
|
||||
else:
|
||||
print("[STEP 2] Installing pip dependencies...")
|
||||
|
||||
if not requirements_file.exists():
|
||||
print(f"[WARN] Requirements file not found: {requirements_file}")
|
||||
print("[INFO] Skipping dependencies installation")
|
||||
else:
|
||||
# Try uv first (faster), fallback to pip
|
||||
if args.use_pip:
|
||||
print("[INFO] Using pip (--use-pip flag)")
|
||||
success = install_requirements_pip(requirements_file, use_mirror)
|
||||
elif check_uv_available():
|
||||
print("[INFO] Using uv (installed via conda-forge::uv)")
|
||||
success = install_requirements_uv(requirements_file, use_mirror)
|
||||
if not success:
|
||||
print("[WARN] uv failed, falling back to pip...")
|
||||
success = install_requirements_pip(requirements_file, use_mirror)
|
||||
else:
|
||||
print("[WARN] uv not available (should be installed via: mamba install conda-forge::uv)")
|
||||
print("[INFO] Falling back to pip...")
|
||||
success = install_requirements_pip(requirements_file, use_mirror)
|
||||
|
||||
if not success:
|
||||
print()
|
||||
print("[WARN] Failed to install some dependencies automatically.")
|
||||
print("You can manually install them:")
|
||||
if use_mirror:
|
||||
print(f" uv pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
|
||||
print(" or:")
|
||||
print(f" pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
|
||||
else:
|
||||
print(f" uv pip install -r {requirements_file}")
|
||||
print(" or:")
|
||||
print(f" pip install -r {requirements_file}")
|
||||
|
||||
print()
|
||||
print("=" * 60)
|
||||
print("Installation complete!")
|
||||
print("=" * 60)
|
||||
print()
|
||||
print("Note: Some special packages (like pylabrobot) are installed")
|
||||
print("automatically at runtime by unilabos if needed.")
|
||||
print()
|
||||
print("Verify installation:")
|
||||
print(' python -c "import unilabos; print(unilabos.__version__)"')
|
||||
print()
|
||||
print("If you encounter issues, you can manually install dependencies:")
|
||||
if use_mirror:
|
||||
print(f" uv pip install -r unilabos/utils/requirements.txt -i {TSINGHUA_MIRROR}")
|
||||
else:
|
||||
print(" uv pip install -r unilabos/utils/requirements.txt")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
2
setup.py
@@ -4,7 +4,7 @@ package_name = 'unilabos'
|
||||
|
||||
setup(
|
||||
name=package_name,
|
||||
version='0.10.12',
|
||||
version='0.10.19',
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
install_requires=['setuptools'],
|
||||
|
||||
7
tests/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
测试包根目录。
|
||||
|
||||
让 `tests.*` 模块可以被正常 import(例如给 `unilabos` 下的测试入口使用)。
|
||||
"""
|
||||
|
||||
|
||||
296
tests/compile/test_batch_transfer_protocol.py
Normal file
@@ -0,0 +1,296 @@
|
||||
"""
|
||||
批量转运编译器测试
|
||||
|
||||
覆盖:单物料退化、刚好一批、多批次、空操作、AGV 配置发现、children dict 状态。
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import networkx as nx
|
||||
|
||||
from unilabos.compile.batch_transfer_protocol import generate_batch_transfer_protocol
|
||||
from unilabos.compile.agv_transfer_protocol import generate_agv_transfer_protocol
|
||||
from unilabos.compile._agv_utils import find_agv_config, get_agv_capacity, split_batches
|
||||
|
||||
|
||||
# ============ 构建测试用设备图 ============
|
||||
|
||||
def _make_graph(capacity_x=2, capacity_y=1, capacity_z=1):
|
||||
"""构建包含 AGV 节点的测试设备图"""
|
||||
G = nx.DiGraph()
|
||||
|
||||
# AGV 节点
|
||||
G.add_node("AGV", **{
|
||||
"type": "device",
|
||||
"class_": "agv_transport_station",
|
||||
"config": {
|
||||
"protocol_type": ["AGVTransferProtocol", "BatchTransferProtocol"],
|
||||
"device_roles": {
|
||||
"navigator": "zhixing_agv",
|
||||
"arm": "zhixing_ur_arm"
|
||||
},
|
||||
"route_table": {
|
||||
"StationA->StationB": {
|
||||
"nav_command": '{"target": "LM1"}',
|
||||
"arm_pick": '{"task_name": "pick.urp"}',
|
||||
"arm_place": '{"task_name": "place.urp"}'
|
||||
},
|
||||
"AGV->StationA": {
|
||||
"nav_command": '{"target": "LM1"}',
|
||||
"arm_pick": '{"task_name": "pick.urp"}',
|
||||
"arm_place": '{"task_name": "place.urp"}'
|
||||
},
|
||||
"StationA->StationA": {
|
||||
"nav_command": '{"target": "LM1"}',
|
||||
"arm_pick": '{"task_name": "pick.urp"}',
|
||||
"arm_place": '{"task_name": "place.urp"}'
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
# AGV 子设备
|
||||
G.add_node("zhixing_agv", type="device", class_="zhixing_agv")
|
||||
G.add_node("zhixing_ur_arm", type="device", class_="zhixing_ur_arm")
|
||||
G.add_edge("AGV", "zhixing_agv")
|
||||
G.add_edge("AGV", "zhixing_ur_arm")
|
||||
|
||||
# AGV Warehouse 子资源
|
||||
G.add_node("agv_platform", **{
|
||||
"type": "warehouse",
|
||||
"config": {
|
||||
"name": "agv_platform",
|
||||
"num_items_x": capacity_x,
|
||||
"num_items_y": capacity_y,
|
||||
"num_items_z": capacity_z,
|
||||
}
|
||||
})
|
||||
G.add_edge("AGV", "agv_platform")
|
||||
|
||||
# 来源/目标工站
|
||||
G.add_node("StationA", type="device", class_="workstation")
|
||||
G.add_node("StationB", type="device", class_="workstation")
|
||||
|
||||
return G
|
||||
|
||||
|
||||
def _make_repos(items_count=2):
|
||||
"""构建测试用的 from_repo 和 to_repo dict"""
|
||||
children = {}
|
||||
for i in range(items_count):
|
||||
pos = f"A{i + 1:02d}"
|
||||
children[pos] = {
|
||||
"id": f"resource_{i + 1}",
|
||||
"name": f"R{i + 1}",
|
||||
"parent": "StationA",
|
||||
"type": "resource",
|
||||
}
|
||||
|
||||
from_repo = {
|
||||
"StationA": {
|
||||
"id": "StationA",
|
||||
"name": "StationA",
|
||||
"children": children,
|
||||
}
|
||||
}
|
||||
to_repo = {
|
||||
"StationB": {
|
||||
"id": "StationB",
|
||||
"name": "StationB",
|
||||
"children": {},
|
||||
}
|
||||
}
|
||||
return from_repo, to_repo
|
||||
|
||||
|
||||
def _make_items(count=2):
|
||||
"""构建 transfer_resources / from_positions / to_positions"""
|
||||
resources = [
|
||||
{
|
||||
"id": f"resource_{i + 1}",
|
||||
"name": f"R{i + 1}",
|
||||
"sample_id": f"uuid-{i + 1}",
|
||||
"parent": "StationA",
|
||||
"type": "resource",
|
||||
}
|
||||
for i in range(count)
|
||||
]
|
||||
from_positions = [f"A{i + 1:02d}" for i in range(count)]
|
||||
to_positions = [f"A{i + 1:02d}" for i in range(count)]
|
||||
return resources, from_positions, to_positions
|
||||
|
||||
|
||||
# ============ _agv_utils 测试 ============
|
||||
|
||||
class TestAGVUtils:
|
||||
def test_find_agv_config(self):
|
||||
G = _make_graph()
|
||||
cfg = find_agv_config(G)
|
||||
assert cfg["agv_id"] == "AGV"
|
||||
assert cfg["device_roles"]["navigator"] == "zhixing_agv"
|
||||
assert cfg["device_roles"]["arm"] == "zhixing_ur_arm"
|
||||
assert "StationA->StationB" in cfg["route_table"]
|
||||
|
||||
def test_find_agv_config_by_id(self):
|
||||
G = _make_graph()
|
||||
cfg = find_agv_config(G, agv_id="AGV")
|
||||
assert cfg["agv_id"] == "AGV"
|
||||
|
||||
def test_find_agv_config_not_found(self):
|
||||
G = nx.DiGraph()
|
||||
G.add_node("SomeDevice", type="device", class_="pump")
|
||||
with pytest.raises(ValueError, match="未找到 AGV"):
|
||||
find_agv_config(G)
|
||||
|
||||
def test_get_agv_capacity(self):
|
||||
G = _make_graph(capacity_x=2, capacity_y=1, capacity_z=1)
|
||||
assert get_agv_capacity(G, "AGV") == 2
|
||||
|
||||
def test_get_agv_capacity_multi_layer(self):
|
||||
G = _make_graph(capacity_x=1, capacity_y=2, capacity_z=3)
|
||||
assert get_agv_capacity(G, "AGV") == 6
|
||||
|
||||
def test_split_batches_exact(self):
|
||||
assert split_batches([1, 2], 2) == [[1, 2]]
|
||||
|
||||
def test_split_batches_overflow(self):
|
||||
assert split_batches([1, 2, 3], 2) == [[1, 2], [3]]
|
||||
|
||||
def test_split_batches_single(self):
|
||||
assert split_batches([1], 4) == [[1]]
|
||||
|
||||
def test_split_batches_zero_capacity(self):
|
||||
with pytest.raises(ValueError):
|
||||
split_batches([1], 0)
|
||||
|
||||
|
||||
# ============ 批量转运编译器测试 ============
|
||||
|
||||
class TestBatchTransferProtocol:
|
||||
def test_empty_items(self):
|
||||
"""空物料列表返回空 steps"""
|
||||
G = _make_graph()
|
||||
from_repo, to_repo = _make_repos(0)
|
||||
steps = generate_batch_transfer_protocol(G, from_repo, to_repo, [], [], [])
|
||||
assert steps == []
|
||||
|
||||
def test_single_item(self):
|
||||
"""单物料转运(BatchTransfer 退化为单物料)"""
|
||||
G = _make_graph(capacity_x=2)
|
||||
from_repo, to_repo = _make_repos(1)
|
||||
resources, from_pos, to_pos = _make_items(1)
|
||||
steps = generate_batch_transfer_protocol(G, from_repo, to_repo, resources, from_pos, to_pos)
|
||||
|
||||
# 应该有: nav到来源 + 1个pick + nav到目标 + 1个place = 4 steps
|
||||
assert len(steps) == 4
|
||||
assert steps[0]["action_name"] == "send_nav_task"
|
||||
assert steps[1]["action_name"] == "move_pos_task"
|
||||
assert steps[1]["_transfer_meta"]["phase"] == "pick"
|
||||
assert steps[2]["action_name"] == "send_nav_task"
|
||||
assert steps[3]["action_name"] == "move_pos_task"
|
||||
assert steps[3]["_transfer_meta"]["phase"] == "place"
|
||||
|
||||
def test_exact_capacity(self):
|
||||
"""物料数 = AGV 容量,刚好一批"""
|
||||
G = _make_graph(capacity_x=2)
|
||||
from_repo, to_repo = _make_repos(2)
|
||||
resources, from_pos, to_pos = _make_items(2)
|
||||
steps = generate_batch_transfer_protocol(G, from_repo, to_repo, resources, from_pos, to_pos)
|
||||
|
||||
# nav + 2 pick + nav + 2 place = 6 steps
|
||||
assert len(steps) == 6
|
||||
pick_steps = [s for s in steps if s.get("_transfer_meta", {}).get("phase") == "pick"]
|
||||
place_steps = [s for s in steps if s.get("_transfer_meta", {}).get("phase") == "place"]
|
||||
assert len(pick_steps) == 2
|
||||
assert len(place_steps) == 2
|
||||
|
||||
def test_multi_batch(self):
|
||||
"""物料数 > AGV 容量,自动分批"""
|
||||
G = _make_graph(capacity_x=2)
|
||||
from_repo, to_repo = _make_repos(3)
|
||||
resources, from_pos, to_pos = _make_items(3)
|
||||
steps = generate_batch_transfer_protocol(G, from_repo, to_repo, resources, from_pos, to_pos)
|
||||
|
||||
# 批次1: nav + 2 pick + nav + 2 place + nav(返回) = 7
|
||||
# 批次2: nav + 1 pick + nav + 1 place = 4
|
||||
# 总计 11 steps
|
||||
assert len(steps) == 11
|
||||
|
||||
nav_steps = [s for s in steps if s["action_name"] == "send_nav_task"]
|
||||
# 批次1: 2 nav(去来源+去目标) + 1 nav(返回) + 批次2: 2 nav = 5 nav
|
||||
assert len(nav_steps) == 5
|
||||
|
||||
def test_children_dict_updated(self):
|
||||
"""compile 阶段三方 children dict 状态正确"""
|
||||
G = _make_graph(capacity_x=2)
|
||||
from_repo, to_repo = _make_repos(2)
|
||||
resources, from_pos, to_pos = _make_items(2)
|
||||
|
||||
assert "A01" in from_repo["StationA"]["children"]
|
||||
assert "A02" in from_repo["StationA"]["children"]
|
||||
assert len(to_repo["StationB"]["children"]) == 0
|
||||
|
||||
generate_batch_transfer_protocol(G, from_repo, to_repo, resources, from_pos, to_pos)
|
||||
|
||||
# compile 后 from_repo 的 children 应该被 pop 掉
|
||||
assert "A01" not in from_repo["StationA"]["children"]
|
||||
assert "A02" not in from_repo["StationA"]["children"]
|
||||
# to_repo 应该有新物料
|
||||
assert "A01" in to_repo["StationB"]["children"]
|
||||
assert "A02" in to_repo["StationB"]["children"]
|
||||
assert to_repo["StationB"]["children"]["A01"]["id"] == "resource_1"
|
||||
|
||||
def test_device_ids_from_config(self):
|
||||
"""设备 ID 全部从配置读取,不硬编码"""
|
||||
G = _make_graph()
|
||||
from_repo, to_repo = _make_repos(1)
|
||||
resources, from_pos, to_pos = _make_items(1)
|
||||
steps = generate_batch_transfer_protocol(G, from_repo, to_repo, resources, from_pos, to_pos)
|
||||
|
||||
device_ids = {s["device_id"] for s in steps}
|
||||
assert "zhixing_agv" in device_ids
|
||||
assert "zhixing_ur_arm" in device_ids
|
||||
|
||||
def test_route_not_found(self):
|
||||
"""路由表中无对应路线时报错"""
|
||||
G = _make_graph()
|
||||
from_repo = {"Unknown": {"id": "Unknown", "children": {"A01": {"id": "R1", "parent": "Unknown"}}}}
|
||||
to_repo = {"Other": {"id": "Other", "children": {}}}
|
||||
resources = [{"id": "R1", "name": "R1"}]
|
||||
with pytest.raises(KeyError, match="路由表"):
|
||||
generate_batch_transfer_protocol(G, from_repo, to_repo, resources, ["A01"], ["B01"])
|
||||
|
||||
def test_length_mismatch(self):
|
||||
"""三个数组长度不一致时报错"""
|
||||
G = _make_graph()
|
||||
from_repo, to_repo = _make_repos(2)
|
||||
resources = [{"id": "R1"}]
|
||||
with pytest.raises(ValueError, match="长度不一致"):
|
||||
generate_batch_transfer_protocol(G, from_repo, to_repo, resources, ["A01", "A02"], ["B01"])
|
||||
|
||||
|
||||
# ============ 改造后的 AGV 单物料编译器测试 ============
|
||||
|
||||
class TestAGVTransferProtocol:
|
||||
def test_single_transfer_from_config(self):
|
||||
"""改造后的单物料编译器从 G 读取配置"""
|
||||
G = _make_graph()
|
||||
from_repo = {"StationA": {"id": "StationA", "children": {"A01": {"id": "R1", "parent": "StationA"}}}}
|
||||
to_repo = {"StationB": {"id": "StationB", "children": {}}}
|
||||
steps = generate_agv_transfer_protocol(G, from_repo, "A01", to_repo, "B01")
|
||||
|
||||
assert len(steps) == 2
|
||||
assert steps[0]["device_id"] == "zhixing_agv"
|
||||
assert steps[0]["action_name"] == "send_nav_task"
|
||||
assert steps[1]["device_id"] == "zhixing_ur_arm"
|
||||
assert steps[1]["action_name"] == "move_pos_task"
|
||||
|
||||
def test_children_updated(self):
|
||||
"""单物料编译后 children dict 正确更新"""
|
||||
G = _make_graph()
|
||||
from_repo = {"StationA": {"id": "StationA", "children": {"A01": {"id": "R1", "parent": "StationA"}}}}
|
||||
to_repo = {"StationB": {"id": "StationB", "children": {}}}
|
||||
generate_agv_transfer_protocol(G, from_repo, "A01", to_repo, "B01")
|
||||
|
||||
assert "A01" not in from_repo["StationA"]["children"]
|
||||
assert "B01" in to_repo["StationB"]["children"]
|
||||
assert to_repo["StationB"]["children"]["B01"]["parent"] == "StationB"
|
||||
706
tests/compile/test_full_chain_conversion_to_compile.py
Normal file
@@ -0,0 +1,706 @@
|
||||
"""
|
||||
全链路集成测试:ROS Goal 转换 → ResourceTreeSet → get_plr_nested_dict → 编译器 → 动作列表
|
||||
|
||||
模拟 workstation.py 中的完整路径:
|
||||
1. host 返回 raw_data(模拟 resource_get 响应)
|
||||
2. ResourceTreeSet.from_raw_dict_list(raw_data) 构建资源树
|
||||
3. tree.root_node.get_plr_nested_dict() 生成嵌套 dict
|
||||
4. protocol_kwargs 传给编译器
|
||||
5. 编译器返回 action_list,验证结构和关键字段
|
||||
"""
|
||||
|
||||
import copy
|
||||
import json
|
||||
import pytest
|
||||
import networkx as nx
|
||||
|
||||
from unilabos.resources.resource_tracker import (
|
||||
ResourceDictInstance,
|
||||
ResourceTreeSet,
|
||||
)
|
||||
from unilabos.compile.utils.resource_helper import (
|
||||
ensure_resource_instance,
|
||||
resource_to_dict,
|
||||
get_resource_id,
|
||||
get_resource_data,
|
||||
)
|
||||
from unilabos.compile.utils.vessel_parser import get_vessel
|
||||
|
||||
# ============ 构建模拟设备图 ============
|
||||
|
||||
def _build_test_graph():
|
||||
"""构建一个包含常用设备节点的测试图"""
|
||||
G = nx.DiGraph()
|
||||
|
||||
# 容器
|
||||
G.add_node("reactor_01", **{
|
||||
"id": "reactor_01",
|
||||
"name": "reactor_01",
|
||||
"type": "device",
|
||||
"class": "virtual_stirrer",
|
||||
"data": {},
|
||||
"config": {},
|
||||
})
|
||||
|
||||
# 搅拌设备
|
||||
G.add_node("stirrer_1", **{
|
||||
"id": "stirrer_1",
|
||||
"name": "stirrer_1",
|
||||
"type": "device",
|
||||
"class": "virtual_stirrer",
|
||||
"data": {},
|
||||
"config": {},
|
||||
})
|
||||
G.add_edge("stirrer_1", "reactor_01")
|
||||
|
||||
# 加热设备
|
||||
G.add_node("heatchill_1", **{
|
||||
"id": "heatchill_1",
|
||||
"name": "heatchill_1",
|
||||
"type": "device",
|
||||
"class": "virtual_heatchill",
|
||||
"data": {},
|
||||
"config": {},
|
||||
})
|
||||
G.add_edge("heatchill_1", "reactor_01")
|
||||
|
||||
# 试剂容器(液体)
|
||||
G.add_node("flask_water", **{
|
||||
"id": "flask_water",
|
||||
"name": "flask_water",
|
||||
"type": "container",
|
||||
"class": "",
|
||||
"data": {"reagent_name": "water", "liquid": [{"liquid_type": "water", "volume": 500.0}]},
|
||||
"config": {"reagent": "water"},
|
||||
})
|
||||
|
||||
# 固体加样器
|
||||
G.add_node("solid_dispenser_1", **{
|
||||
"id": "solid_dispenser_1",
|
||||
"name": "solid_dispenser_1",
|
||||
"type": "device",
|
||||
"class": "solid_dispenser",
|
||||
"data": {},
|
||||
"config": {},
|
||||
})
|
||||
|
||||
# 泵
|
||||
G.add_node("pump_1", **{
|
||||
"id": "pump_1",
|
||||
"name": "pump_1",
|
||||
"type": "device",
|
||||
"class": "virtual_pump",
|
||||
"data": {},
|
||||
"config": {},
|
||||
})
|
||||
G.add_edge("flask_water", "pump_1")
|
||||
G.add_edge("pump_1", "reactor_01")
|
||||
|
||||
return G
|
||||
|
||||
|
||||
# ============ 构建模拟 host 返回数据 ============
|
||||
|
||||
def _make_raw_resource(
|
||||
id="reactor_01",
|
||||
uuid="uuid-reactor-01",
|
||||
name="reactor_01",
|
||||
klass="virtual_stirrer",
|
||||
type_="device",
|
||||
parent=None,
|
||||
parent_uuid=None,
|
||||
data=None,
|
||||
config=None,
|
||||
extra=None,
|
||||
):
|
||||
"""模拟 host 返回的单个资源 dict(与 resource_get 服务响应一致)"""
|
||||
return {
|
||||
"id": id,
|
||||
"uuid": uuid,
|
||||
"name": name,
|
||||
"class": klass,
|
||||
"type": type_,
|
||||
"parent": parent,
|
||||
"parent_uuid": parent_uuid or "",
|
||||
"description": "",
|
||||
"config": config or {},
|
||||
"data": data or {},
|
||||
"extra": extra or {},
|
||||
"position": {"x": 0.0, "y": 0.0, "z": 0.0},
|
||||
}
|
||||
|
||||
|
||||
def _simulate_workstation_resource_enrichment(raw_data_list, field_type="unilabos_msgs/Resource"):
|
||||
"""
|
||||
模拟 workstation.py 中 resource enrichment 的核心逻辑:
|
||||
raw_data → ResourceTreeSet.from_raw_dict_list → get_plr_nested_dict → protocol_kwargs[k]
|
||||
"""
|
||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data_list)
|
||||
|
||||
if field_type == "unilabos_msgs/Resource":
|
||||
# 单个 Resource:取第一棵树的根节点
|
||||
root_instance = tree_set.trees[0].root_node if tree_set.trees else None
|
||||
return root_instance.get_plr_nested_dict() if root_instance else {}
|
||||
else:
|
||||
# sequence<Resource>:返回列表
|
||||
return [tree.root_node.get_plr_nested_dict() for tree in tree_set.trees]
|
||||
|
||||
|
||||
# ============ 全链路测试:Stir 协议 ============
|
||||
|
||||
class TestStirProtocolFullChain:
|
||||
"""Stir 协议全链路:host raw_data → enriched dict → compiler → action_list"""
|
||||
|
||||
def test_stir_with_enriched_resource_dict(self):
|
||||
"""单个 Resource 经过 enrichment 后传给 stir compiler"""
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01", uuid="uuid-reactor-01",
|
||||
klass="virtual_stirrer", type_="device",
|
||||
)]
|
||||
|
||||
# 模拟 workstation enrichment
|
||||
enriched_vessel = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
assert enriched_vessel["id"] == "reactor_01"
|
||||
assert enriched_vessel["uuid"] == "uuid-reactor-01"
|
||||
assert enriched_vessel["class"] == "virtual_stirrer"
|
||||
|
||||
# 传给编译器
|
||||
G = _build_test_graph()
|
||||
actions = generate_stir_protocol(
|
||||
G=G,
|
||||
vessel=enriched_vessel,
|
||||
time="60",
|
||||
stir_speed=300.0,
|
||||
)
|
||||
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) >= 1
|
||||
action = actions[0]
|
||||
assert action["device_id"] == "stirrer_1"
|
||||
assert action["action_name"] == "stir"
|
||||
assert "vessel" in action["action_kwargs"]
|
||||
assert action["action_kwargs"]["vessel"]["id"] == "reactor_01"
|
||||
|
||||
def test_stir_with_resource_dict_instance(self):
|
||||
"""直接用 ResourceDictInstance 传给 stir compiler(通过 get_plr_nested_dict 转换)"""
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
||||
inst = tree_set.trees[0].root_node
|
||||
|
||||
# 通过 resource_to_dict 转换(resource_helper 兼容层)
|
||||
vessel_dict = resource_to_dict(inst)
|
||||
assert isinstance(vessel_dict, dict)
|
||||
assert vessel_dict["id"] == "reactor_01"
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_stir_protocol(G=G, vessel=vessel_dict, time="30")
|
||||
|
||||
assert len(actions) >= 1
|
||||
assert actions[0]["action_name"] == "stir"
|
||||
|
||||
def test_stir_with_string_vessel(self):
|
||||
"""兼容旧模式:直接传 vessel 字符串"""
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_stir_protocol(G=G, vessel="reactor_01", time="30")
|
||||
|
||||
assert len(actions) >= 1
|
||||
assert actions[0]["device_id"] == "stirrer_1"
|
||||
assert actions[0]["action_kwargs"]["vessel"]["id"] == "reactor_01"
|
||||
|
||||
|
||||
# ============ 全链路测试:HeatChill 协议 ============
|
||||
|
||||
class TestHeatChillProtocolFullChain:
|
||||
"""HeatChill 协议全链路"""
|
||||
|
||||
def test_heatchill_with_enriched_resource(self):
|
||||
from unilabos.compile.heatchill_protocol import generate_heat_chill_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01", klass="virtual_stirrer")]
|
||||
enriched_vessel = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_heat_chill_protocol(
|
||||
G=G,
|
||||
vessel=enriched_vessel,
|
||||
temp=80.0,
|
||||
time="300",
|
||||
)
|
||||
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) >= 1
|
||||
action = actions[0]
|
||||
assert action["device_id"] == "heatchill_1"
|
||||
assert action["action_name"] == "heat_chill"
|
||||
assert action["action_kwargs"]["temp"] == 80.0
|
||||
|
||||
def test_heatchill_start_with_enriched_resource(self):
|
||||
from unilabos.compile.heatchill_protocol import generate_heat_chill_start_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched_vessel = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_heat_chill_start_protocol(
|
||||
G=G,
|
||||
vessel=enriched_vessel,
|
||||
temp=60.0,
|
||||
)
|
||||
|
||||
assert len(actions) >= 1
|
||||
assert actions[0]["action_name"] == "heat_chill_start"
|
||||
assert actions[0]["action_kwargs"]["temp"] == 60.0
|
||||
|
||||
def test_heatchill_stop_with_enriched_resource(self):
|
||||
from unilabos.compile.heatchill_protocol import generate_heat_chill_stop_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched_vessel = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_heat_chill_stop_protocol(G=G, vessel=enriched_vessel)
|
||||
|
||||
assert len(actions) >= 1
|
||||
assert actions[0]["action_name"] == "heat_chill_stop"
|
||||
|
||||
|
||||
# ============ 全链路测试:Add 协议 ============
|
||||
|
||||
class TestAddProtocolFullChain:
|
||||
"""Add 协议全链路:vessel enrichment + reagent 查找 + 泵传输"""
|
||||
|
||||
def test_add_solid_with_enriched_resource(self):
|
||||
from unilabos.compile.add_protocol import generate_add_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched_vessel = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_add_protocol(
|
||||
G=G,
|
||||
vessel=enriched_vessel,
|
||||
reagent="NaCl",
|
||||
mass="5 g",
|
||||
)
|
||||
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) >= 1
|
||||
# 应该包含至少一个 add_solid 或 log_message 动作
|
||||
action_names = [a.get("action_name", "") for a in actions]
|
||||
assert any(name in ["add_solid", "log_message"] for name in action_names)
|
||||
|
||||
def test_add_liquid_with_enriched_resource(self):
|
||||
from unilabos.compile.add_protocol import generate_add_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched_vessel = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_add_protocol(
|
||||
G=G,
|
||||
vessel=enriched_vessel,
|
||||
reagent="water",
|
||||
volume="10 mL",
|
||||
)
|
||||
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) >= 1
|
||||
|
||||
|
||||
# ============ 全链路测试:ResourceDictInstance 兼容层 ============
|
||||
|
||||
class TestResourceDictInstanceCompatibility:
|
||||
"""验证编译器兼容层对 ResourceDictInstance 的处理"""
|
||||
|
||||
def test_get_vessel_from_enriched_dict(self):
|
||||
"""get_vessel 对 enriched dict 的处理"""
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01",
|
||||
data={"temperature": 25.0, "liquid": [{"liquid_type": "water", "volume": 10.0}]},
|
||||
)]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
vessel_id, vessel_data = get_vessel(enriched)
|
||||
assert vessel_id == "reactor_01"
|
||||
assert vessel_data["temperature"] == 25.0
|
||||
assert len(vessel_data["liquid"]) == 1
|
||||
|
||||
def test_get_vessel_from_resource_instance(self):
|
||||
"""get_vessel 直接对 ResourceDictInstance 的处理"""
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01",
|
||||
data={"temperature": 25.0},
|
||||
)]
|
||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
||||
inst = tree_set.trees[0].root_node
|
||||
|
||||
vessel_id, vessel_data = get_vessel(inst)
|
||||
assert vessel_id == "reactor_01"
|
||||
assert vessel_data["temperature"] == 25.0
|
||||
|
||||
def test_ensure_resource_instance_round_trip(self):
|
||||
"""ensure_resource_instance → resource_to_dict 无损往返"""
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01", uuid="uuid-r01", klass="virtual_stirrer",
|
||||
data={"temp": 25.0},
|
||||
)]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
# dict → ResourceDictInstance
|
||||
inst = ensure_resource_instance(enriched)
|
||||
assert isinstance(inst, ResourceDictInstance)
|
||||
assert inst.res_content.id == "reactor_01"
|
||||
assert inst.res_content.uuid == "uuid-r01"
|
||||
|
||||
# ResourceDictInstance → dict
|
||||
d = resource_to_dict(inst)
|
||||
assert isinstance(d, dict)
|
||||
assert d["id"] == "reactor_01"
|
||||
assert d["uuid"] == "uuid-r01"
|
||||
assert d["class"] == "virtual_stirrer"
|
||||
|
||||
|
||||
# ============ 全链路测试:带 children 的资源树 ============
|
||||
|
||||
class TestResourceTreeWithChildren:
|
||||
"""测试带 children 结构的资源树通过编译器的路径"""
|
||||
|
||||
def _make_tree_with_children(self):
|
||||
"""构建 StationA -> [Flask1, Flask2] 的资源树"""
|
||||
return [
|
||||
_make_raw_resource(
|
||||
id="StationA", uuid="uuid-station-a",
|
||||
name="StationA", klass="workstation", type_="device",
|
||||
),
|
||||
_make_raw_resource(
|
||||
id="Flask1", uuid="uuid-flask-1",
|
||||
name="Flask1", klass="", type_="resource",
|
||||
parent="StationA", parent_uuid="uuid-station-a",
|
||||
data={"liquid": [{"liquid_type": "water", "volume": 10.0}]},
|
||||
),
|
||||
_make_raw_resource(
|
||||
id="Flask2", uuid="uuid-flask-2",
|
||||
name="Flask2", klass="", type_="resource",
|
||||
parent="StationA", parent_uuid="uuid-station-a",
|
||||
data={"liquid": [{"liquid_type": "ethanol", "volume": 5.0}]},
|
||||
),
|
||||
]
|
||||
|
||||
def test_enrichment_preserves_children_structure(self):
|
||||
"""验证 enrichment 后 children 为嵌套 dict"""
|
||||
raw_data = self._make_tree_with_children()
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
assert enriched["id"] == "StationA"
|
||||
assert "children" in enriched
|
||||
assert isinstance(enriched["children"], dict)
|
||||
assert "Flask1" in enriched["children"]
|
||||
assert "Flask2" in enriched["children"]
|
||||
|
||||
def test_children_preserve_uuid_and_data(self):
|
||||
"""验证 children 中的 uuid 和 data 被正确保留"""
|
||||
raw_data = self._make_tree_with_children()
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
flask1 = enriched["children"]["Flask1"]
|
||||
assert flask1["uuid"] == "uuid-flask-1"
|
||||
assert flask1["data"]["liquid"][0]["liquid_type"] == "water"
|
||||
assert flask1["data"]["liquid"][0]["volume"] == 10.0
|
||||
|
||||
flask2 = enriched["children"]["Flask2"]
|
||||
assert flask2["uuid"] == "uuid-flask-2"
|
||||
assert flask2["data"]["liquid"][0]["liquid_type"] == "ethanol"
|
||||
|
||||
def test_children_dict_can_be_popped(self):
|
||||
"""模拟 batch_transfer_protocol 中 pop children 的操作"""
|
||||
raw_data = self._make_tree_with_children()
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
# batch_transfer_protocol 中会 pop children
|
||||
children = enriched["children"]
|
||||
popped = children.pop("Flask1")
|
||||
assert popped["id"] == "Flask1"
|
||||
assert "Flask1" not in enriched["children"]
|
||||
assert "Flask2" in enriched["children"]
|
||||
|
||||
def test_children_dict_usable_as_from_repo(self):
|
||||
"""模拟 batch_transfer_protocol 中 from_repo 参数"""
|
||||
raw_data = self._make_tree_with_children()
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
# 模拟编译器接收的 from_repo 格式
|
||||
from_repo = {"StationA": enriched}
|
||||
from_repo_ = list(from_repo.values())[0]
|
||||
|
||||
assert from_repo_["id"] == "StationA"
|
||||
assert "Flask1" in from_repo_["children"]
|
||||
assert from_repo_["children"]["Flask1"]["uuid"] == "uuid-flask-1"
|
||||
|
||||
def test_sequence_resource_enrichment(self):
|
||||
"""sequence<Resource> 情况:多个独立资源树"""
|
||||
raw_data1 = [_make_raw_resource(id="R1", uuid="uuid-r1")]
|
||||
raw_data2 = [_make_raw_resource(id="R2", uuid="uuid-r2")]
|
||||
|
||||
tree_set1 = ResourceTreeSet.from_raw_dict_list(raw_data1)
|
||||
tree_set2 = ResourceTreeSet.from_raw_dict_list(raw_data2)
|
||||
|
||||
results = [
|
||||
tree.root_node.get_plr_nested_dict()
|
||||
for ts in [tree_set1, tree_set2]
|
||||
for tree in ts.trees
|
||||
]
|
||||
|
||||
assert len(results) == 2
|
||||
assert results[0]["id"] == "R1"
|
||||
assert results[1]["id"] == "R2"
|
||||
|
||||
|
||||
# ============ 全链路测试:动作列表结构验证 ============
|
||||
|
||||
class TestActionListStructure:
|
||||
"""验证编译器返回的 action_list 结构符合 workstation 预期"""
|
||||
|
||||
def _validate_action(self, action):
|
||||
"""验证单个 action dict 的结构"""
|
||||
if action.get("action_name") == "wait":
|
||||
# wait 伪动作不需要 device_id
|
||||
assert "action_kwargs" in action
|
||||
assert "time" in action["action_kwargs"]
|
||||
return
|
||||
|
||||
if action.get("action_name") == "log_message":
|
||||
# log 伪动作
|
||||
assert "action_kwargs" in action
|
||||
return
|
||||
|
||||
# 正常设备动作
|
||||
assert "device_id" in action, f"action 缺少 device_id: {action}"
|
||||
assert "action_name" in action, f"action 缺少 action_name: {action}"
|
||||
assert "action_kwargs" in action, f"action 缺少 action_kwargs: {action}"
|
||||
assert isinstance(action["action_kwargs"], dict)
|
||||
|
||||
def test_stir_action_list_structure(self):
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_stir_protocol(G=G, vessel=enriched, time="60")
|
||||
|
||||
for action in actions:
|
||||
if isinstance(action, list):
|
||||
# 并行动作
|
||||
for sub_action in action:
|
||||
self._validate_action(sub_action)
|
||||
else:
|
||||
self._validate_action(action)
|
||||
|
||||
def test_heatchill_action_list_structure(self):
|
||||
from unilabos.compile.heatchill_protocol import generate_heat_chill_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_heat_chill_protocol(G=G, vessel=enriched, temp=80.0, time="60")
|
||||
|
||||
for action in actions:
|
||||
if isinstance(action, list):
|
||||
for sub_action in action:
|
||||
self._validate_action(sub_action)
|
||||
else:
|
||||
self._validate_action(action)
|
||||
|
||||
def test_add_action_list_structure(self):
|
||||
from unilabos.compile.add_protocol import generate_add_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
|
||||
G = _build_test_graph()
|
||||
actions = generate_add_protocol(G=G, vessel=enriched, reagent="NaCl", mass="5 g")
|
||||
|
||||
for action in actions:
|
||||
if isinstance(action, list):
|
||||
for sub_action in action:
|
||||
self._validate_action(sub_action)
|
||||
else:
|
||||
self._validate_action(action)
|
||||
|
||||
|
||||
# ============ 全链路测试:message_converter 到 enrichment ============
|
||||
|
||||
class TestMessageConverterToEnrichment:
|
||||
"""模拟从 ROS 消息转换后的 dict 到 enrichment 的完整链路"""
|
||||
|
||||
def test_ros_goal_conversion_simulation(self):
|
||||
"""
|
||||
模拟 workstation.py 中的完整流程:
|
||||
1. ROS goal 中的 vessel 字段被 convert_from_ros_msg 转换为浅层 dict
|
||||
2. workstation 用 resource_id 请求 host 获取完整资源数据
|
||||
3. ResourceTreeSet.from_raw_dict_list 构建资源树
|
||||
4. get_plr_nested_dict 生成嵌套 dict 替换 protocol_kwargs[k]
|
||||
"""
|
||||
# 步骤1: 模拟 convert_from_ros_msg 的输出(浅层 dict,只有 id 等基本字段)
|
||||
shallow_vessel = {
|
||||
"id": "reactor_01",
|
||||
"uuid": "uuid-reactor-01",
|
||||
"name": "reactor_01",
|
||||
"type": "device",
|
||||
"category": "virtual_stirrer",
|
||||
"children": [],
|
||||
"parent": "",
|
||||
"parent_uuid": "",
|
||||
"config": {},
|
||||
"data": {},
|
||||
"extra": {},
|
||||
"position": {"x": 0.0, "y": 0.0, "z": 0.0},
|
||||
}
|
||||
|
||||
protocol_kwargs = {
|
||||
"vessel": shallow_vessel,
|
||||
"time": "300",
|
||||
"stir_speed": 300.0,
|
||||
}
|
||||
|
||||
# 步骤2: 提取 resource_id
|
||||
resource_id = protocol_kwargs["vessel"]["id"]
|
||||
assert resource_id == "reactor_01"
|
||||
|
||||
# 步骤3: 模拟 host 返回完整数据(带 children)
|
||||
host_response = [
|
||||
_make_raw_resource(
|
||||
id="reactor_01", uuid="uuid-reactor-01",
|
||||
klass="virtual_stirrer", type_="device",
|
||||
data={"temperature": 25.0, "pressure": 1.0},
|
||||
config={"max_temp": 300.0},
|
||||
),
|
||||
]
|
||||
|
||||
# 步骤4: enrichment
|
||||
enriched = _simulate_workstation_resource_enrichment(host_response)
|
||||
protocol_kwargs["vessel"] = enriched
|
||||
|
||||
# 验证 enrichment 后的 protocol_kwargs
|
||||
assert protocol_kwargs["vessel"]["id"] == "reactor_01"
|
||||
assert protocol_kwargs["vessel"]["uuid"] == "uuid-reactor-01"
|
||||
assert protocol_kwargs["vessel"]["class"] == "virtual_stirrer"
|
||||
assert protocol_kwargs["vessel"]["data"]["temperature"] == 25.0
|
||||
assert protocol_kwargs["vessel"]["config"]["max_temp"] == 300.0
|
||||
|
||||
# 步骤5: 传给编译器
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
G = _build_test_graph()
|
||||
actions = generate_stir_protocol(G=G, **protocol_kwargs)
|
||||
|
||||
assert len(actions) >= 1
|
||||
assert actions[0]["device_id"] == "stirrer_1"
|
||||
assert actions[0]["action_name"] == "stir"
|
||||
|
||||
def test_ros_goal_with_children_enrichment(self):
|
||||
"""ROS goal → enrichment 带 children 的场景(batch transfer)"""
|
||||
# 模拟 host 返回带 children 的数据
|
||||
host_response = [
|
||||
_make_raw_resource(
|
||||
id="StationA", uuid="uuid-sa", klass="workstation", type_="device",
|
||||
config={"num_items_x": 4, "num_items_y": 2},
|
||||
),
|
||||
_make_raw_resource(
|
||||
id="Plate1", uuid="uuid-p1", type_="resource",
|
||||
parent="StationA", parent_uuid="uuid-sa",
|
||||
data={"sample": "sample_A"},
|
||||
),
|
||||
_make_raw_resource(
|
||||
id="Plate2", uuid="uuid-p2", type_="resource",
|
||||
parent="StationA", parent_uuid="uuid-sa",
|
||||
data={"sample": "sample_B"},
|
||||
),
|
||||
]
|
||||
|
||||
enriched = _simulate_workstation_resource_enrichment(host_response)
|
||||
|
||||
assert enriched["id"] == "StationA"
|
||||
assert enriched["class"] == "workstation"
|
||||
assert len(enriched["children"]) == 2
|
||||
assert enriched["children"]["Plate1"]["data"]["sample"] == "sample_A"
|
||||
assert enriched["children"]["Plate2"]["uuid"] == "uuid-p2"
|
||||
|
||||
# 模拟 batch_transfer 的 from_repo 格式
|
||||
from_repo = {"StationA": enriched}
|
||||
from_repo_ = list(from_repo.values())[0]
|
||||
assert "Plate1" in from_repo_["children"]
|
||||
assert from_repo_["children"]["Plate1"]["uuid"] == "uuid-p1"
|
||||
|
||||
|
||||
# ============ 全链路测试:多协议连续调用 ============
|
||||
|
||||
class TestMultiProtocolChain:
|
||||
"""模拟连续执行多个协议(如 add → stir → heatchill)"""
|
||||
|
||||
def test_sequential_protocol_execution(self):
|
||||
"""模拟典型合成路径:add → stir → heatchill"""
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
from unilabos.compile.heatchill_protocol import generate_heat_chill_protocol
|
||||
from unilabos.compile.add_protocol import generate_add_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01", uuid="uuid-reactor-01",
|
||||
klass="virtual_stirrer", type_="device",
|
||||
)]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
G = _build_test_graph()
|
||||
|
||||
# 每次调用用 enriched 的副本,避免编译器修改原数据
|
||||
all_actions = []
|
||||
|
||||
# 步骤1: 添加试剂
|
||||
add_actions = generate_add_protocol(
|
||||
G=G, vessel=copy.deepcopy(enriched),
|
||||
reagent="NaCl", mass="5 g",
|
||||
)
|
||||
all_actions.extend(add_actions)
|
||||
|
||||
# 步骤2: 搅拌
|
||||
stir_actions = generate_stir_protocol(
|
||||
G=G, vessel=copy.deepcopy(enriched),
|
||||
time="60", stir_speed=300.0,
|
||||
)
|
||||
all_actions.extend(stir_actions)
|
||||
|
||||
# 步骤3: 加热
|
||||
heat_actions = generate_heat_chill_protocol(
|
||||
G=G, vessel=copy.deepcopy(enriched),
|
||||
temp=80.0, time="300",
|
||||
)
|
||||
all_actions.extend(heat_actions)
|
||||
|
||||
# 验证总动作列表
|
||||
assert len(all_actions) >= 3
|
||||
# 每个协议至少产生一个核心动作
|
||||
action_names = [a.get("action_name", "") for a in all_actions if isinstance(a, dict)]
|
||||
assert "stir" in action_names
|
||||
assert "heat_chill" in action_names
|
||||
|
||||
def test_enriched_resource_not_mutated(self):
|
||||
"""验证编译器不应修改传入的 enriched dict(如果需要修改应 deepcopy)"""
|
||||
from unilabos.compile.stir_protocol import generate_stir_protocol
|
||||
|
||||
raw_data = [_make_raw_resource(id="reactor_01")]
|
||||
enriched = _simulate_workstation_resource_enrichment(raw_data)
|
||||
original_id = enriched["id"]
|
||||
original_uuid = enriched["uuid"]
|
||||
|
||||
G = _build_test_graph()
|
||||
generate_stir_protocol(G=G, vessel=enriched, time="60")
|
||||
|
||||
# 验证 enriched dict 核心字段未被修改
|
||||
assert enriched["id"] == original_id
|
||||
assert enriched["uuid"] == original_uuid
|
||||
538
tests/compile/test_pump_separate_full_chain.py
Normal file
@@ -0,0 +1,538 @@
|
||||
"""
|
||||
PumpTransfer 和 Separate 全链路测试
|
||||
|
||||
构建包含泵/阀门/分液漏斗的完整设备图,
|
||||
输出完整的中间数据(最短路径、泵骨架、动作列表等)。
|
||||
"""
|
||||
|
||||
import copy
|
||||
import json
|
||||
import pprint
|
||||
import pytest
|
||||
import networkx as nx
|
||||
|
||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||
from unilabos.compile.utils.resource_helper import get_resource_id, get_resource_data
|
||||
from unilabos.compile.utils.vessel_parser import get_vessel
|
||||
|
||||
|
||||
def _make_raw_resource(id, uuid=None, name=None, klass="", type_="device",
|
||||
parent=None, parent_uuid=None, data=None, config=None, extra=None):
|
||||
return {
|
||||
"id": id,
|
||||
"uuid": uuid or f"uuid-{id}",
|
||||
"name": name or id,
|
||||
"class": klass,
|
||||
"type": type_,
|
||||
"parent": parent,
|
||||
"parent_uuid": parent_uuid or "",
|
||||
"description": "",
|
||||
"config": config or {},
|
||||
"data": data or {},
|
||||
"extra": extra or {},
|
||||
"position": {"x": 0.0, "y": 0.0, "z": 0.0},
|
||||
}
|
||||
|
||||
|
||||
def _simulate_enrichment(raw_data_list):
|
||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data_list)
|
||||
root = tree_set.trees[0].root_node if tree_set.trees else None
|
||||
return root.get_plr_nested_dict() if root else {}
|
||||
|
||||
|
||||
def _build_pump_transfer_graph():
|
||||
"""
|
||||
构建带泵/阀门的设备图,用于测试 PumpTransfer:
|
||||
|
||||
flask_water (container)
|
||||
↓
|
||||
valve_1 (multiway_valve, pump_1 连接)
|
||||
↓
|
||||
reactor_01 (device)
|
||||
|
||||
同时有: stirrer_1, heatchill_1, separator_1
|
||||
"""
|
||||
G = nx.DiGraph()
|
||||
|
||||
# 源容器
|
||||
G.add_node("flask_water", **{
|
||||
"id": "flask_water", "name": "flask_water",
|
||||
"type": "container", "class": "",
|
||||
"data": {"reagent_name": "water", "liquid": [{"liquid_type": "water", "volume": 200.0}]},
|
||||
"config": {"reagent": "water"},
|
||||
})
|
||||
|
||||
# 多通阀
|
||||
G.add_node("valve_1", **{
|
||||
"id": "valve_1", "name": "valve_1",
|
||||
"type": "device", "class": "multiway_valve",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
|
||||
# 注射泵(连接到阀门)
|
||||
G.add_node("pump_1", **{
|
||||
"id": "pump_1", "name": "pump_1",
|
||||
"type": "device", "class": "virtual_pump",
|
||||
"data": {}, "config": {"max_volume": 25.0},
|
||||
})
|
||||
|
||||
# 目标容器
|
||||
G.add_node("reactor_01", **{
|
||||
"id": "reactor_01", "name": "reactor_01",
|
||||
"type": "device", "class": "virtual_stirrer",
|
||||
"data": {"liquid": [{"liquid_type": "water", "volume": 50.0}]},
|
||||
"config": {},
|
||||
})
|
||||
|
||||
# 搅拌器
|
||||
G.add_node("stirrer_1", **{
|
||||
"id": "stirrer_1", "name": "stirrer_1",
|
||||
"type": "device", "class": "virtual_stirrer",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
|
||||
# 加热器
|
||||
G.add_node("heatchill_1", **{
|
||||
"id": "heatchill_1", "name": "heatchill_1",
|
||||
"type": "device", "class": "virtual_heatchill",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
|
||||
# 分离器
|
||||
G.add_node("separator_1", **{
|
||||
"id": "separator_1", "name": "separator_1",
|
||||
"type": "device", "class": "separator_controller",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
|
||||
# 废液容器
|
||||
G.add_node("waste_workup", **{
|
||||
"id": "waste_workup", "name": "waste_workup",
|
||||
"type": "container", "class": "",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
|
||||
# 产物收集瓶
|
||||
G.add_node("product_flask", **{
|
||||
"id": "product_flask", "name": "product_flask",
|
||||
"type": "container", "class": "",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
|
||||
# DCM溶剂瓶
|
||||
G.add_node("flask_dcm", **{
|
||||
"id": "flask_dcm", "name": "flask_dcm",
|
||||
"type": "container", "class": "",
|
||||
"data": {"reagent_name": "dcm", "liquid": [{"liquid_type": "dcm", "volume": 500.0}]},
|
||||
"config": {"reagent": "dcm"},
|
||||
})
|
||||
|
||||
# 边连接 —— flask_water → valve_1 → reactor_01
|
||||
G.add_edge("flask_water", "valve_1", port={"valve_1": "port_1"})
|
||||
G.add_edge("valve_1", "reactor_01", port={"valve_1": "port_2"})
|
||||
# 阀门 → 泵
|
||||
G.add_edge("valve_1", "pump_1")
|
||||
G.add_edge("pump_1", "valve_1")
|
||||
# 搅拌器 ↔ reactor
|
||||
G.add_edge("stirrer_1", "reactor_01")
|
||||
# 加热器 ↔ reactor
|
||||
G.add_edge("heatchill_1", "reactor_01")
|
||||
# 分离器 ↔ reactor
|
||||
G.add_edge("separator_1", "reactor_01")
|
||||
G.add_edge("reactor_01", "separator_1")
|
||||
# DCM → valve → reactor (同一泵路)
|
||||
G.add_edge("flask_dcm", "valve_1", port={"valve_1": "port_3"})
|
||||
# reactor → valve → product/waste
|
||||
G.add_edge("valve_1", "product_flask", port={"valve_1": "port_4"})
|
||||
G.add_edge("valve_1", "waste_workup", port={"valve_1": "port_5"})
|
||||
|
||||
return G
|
||||
|
||||
|
||||
def _format_action(action, indent=0):
|
||||
"""格式化单个 action 为可读字符串"""
|
||||
prefix = " " * indent
|
||||
if isinstance(action, list):
|
||||
# 并行动作
|
||||
lines = [f"{prefix}[PARALLEL]"]
|
||||
for sub in action:
|
||||
lines.append(_format_action(sub, indent + 1))
|
||||
return "\n".join(lines)
|
||||
|
||||
name = action.get("action_name", "?")
|
||||
device = action.get("device_id", "")
|
||||
kwargs = action.get("action_kwargs", {})
|
||||
comment = action.get("_comment", "")
|
||||
meta = action.get("_transfer_meta", "")
|
||||
|
||||
parts = [f"{prefix}→ {device}::{name}"]
|
||||
if kwargs:
|
||||
# 精简输出
|
||||
kw_str = ", ".join(f"{k}={v}" for k, v in kwargs.items()
|
||||
if k not in ("progress_message",))
|
||||
if kw_str:
|
||||
parts.append(f" kwargs: {{{kw_str}}}")
|
||||
if comment:
|
||||
parts.append(f" # {comment}")
|
||||
if meta:
|
||||
parts.append(f" meta: {meta}")
|
||||
return "\n".join(f"{prefix}{p}" if i > 0 else p for i, p in enumerate(parts))
|
||||
|
||||
|
||||
def _dump_actions(actions, title=""):
|
||||
"""打印完整动作列表"""
|
||||
print(f"\n{'='*70}")
|
||||
print(f" {title}")
|
||||
print(f" 总动作数: {len(actions)}")
|
||||
print(f"{'='*70}")
|
||||
for i, action in enumerate(actions):
|
||||
print(f"\n [{i:02d}] {_format_action(action, indent=2)}")
|
||||
print(f"\n{'='*70}\n")
|
||||
|
||||
|
||||
# ==================== PumpTransfer 全链路 ====================
|
||||
|
||||
class TestPumpTransferFullChain:
|
||||
"""PumpTransfer: 包含图路径查找、泵骨架构建、动作序列生成"""
|
||||
|
||||
def test_pump_transfer_basic(self):
|
||||
"""基础泵转移:flask_water → valve_1 → reactor_01"""
|
||||
from unilabos.compile.pump_protocol import generate_pump_protocol
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
# 检查最短路径
|
||||
path = nx.shortest_path(G, "flask_water", "reactor_01")
|
||||
print(f"\n最短路径: {path}")
|
||||
assert "valve_1" in path
|
||||
|
||||
# 调用编译器
|
||||
actions = generate_pump_protocol(
|
||||
G=G,
|
||||
from_vessel_id="flask_water",
|
||||
to_vessel_id="reactor_01",
|
||||
volume=10.0,
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=0.5,
|
||||
)
|
||||
|
||||
_dump_actions(actions, "PumpTransfer: flask_water → reactor_01, 10mL")
|
||||
|
||||
# 验证
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) > 0
|
||||
# 应该有 set_valve_position 和 set_position 动作
|
||||
flat = [a for a in actions if isinstance(a, dict)]
|
||||
action_names = [a.get("action_name") for a in flat]
|
||||
print(f"动作名称列表: {action_names}")
|
||||
assert "set_valve_position" in action_names
|
||||
assert "set_position" in action_names
|
||||
|
||||
def test_pump_transfer_with_rinsing_enriched_vessel(self):
|
||||
"""pump_with_rinsing 接收 enriched vessel dict"""
|
||||
from unilabos.compile.pump_protocol import generate_pump_protocol_with_rinsing
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
# 模拟 enrichment
|
||||
from_raw = [_make_raw_resource(
|
||||
id="flask_water", klass="", type_="container",
|
||||
data={"reagent_name": "water", "liquid": [{"liquid_type": "water", "volume": 200.0}]},
|
||||
)]
|
||||
to_raw = [_make_raw_resource(
|
||||
id="reactor_01", klass="virtual_stirrer", type_="device",
|
||||
)]
|
||||
|
||||
from_enriched = _simulate_enrichment(from_raw)
|
||||
to_enriched = _simulate_enrichment(to_raw)
|
||||
|
||||
print(f"\nfrom_vessel enriched: {json.dumps(from_enriched, indent=2, ensure_ascii=False)[:300]}...")
|
||||
print(f"to_vessel enriched: {json.dumps(to_enriched, indent=2, ensure_ascii=False)[:300]}...")
|
||||
|
||||
# get_vessel 兼容
|
||||
fid, fdata = get_vessel(from_enriched)
|
||||
tid, tdata = get_vessel(to_enriched)
|
||||
print(f"from_vessel_id={fid}, to_vessel_id={tid}")
|
||||
assert fid == "flask_water"
|
||||
assert tid == "reactor_01"
|
||||
|
||||
actions = generate_pump_protocol_with_rinsing(
|
||||
G=G,
|
||||
from_vessel=from_enriched,
|
||||
to_vessel=to_enriched,
|
||||
volume=15.0,
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=0.5,
|
||||
)
|
||||
|
||||
_dump_actions(actions, "PumpTransferWithRinsing: flask_water → reactor_01, 15mL (enriched)")
|
||||
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) > 0
|
||||
|
||||
def test_pump_transfer_multi_batch(self):
|
||||
"""体积 > max_volume 时自动分批"""
|
||||
from unilabos.compile.pump_protocol import generate_pump_protocol
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
# pump_1 的 max_volume = 25mL,转 60mL 应该分 3 批
|
||||
actions = generate_pump_protocol(
|
||||
G=G,
|
||||
from_vessel_id="flask_water",
|
||||
to_vessel_id="reactor_01",
|
||||
volume=60.0,
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=0.5,
|
||||
)
|
||||
|
||||
_dump_actions(actions, "PumpTransfer 分批: 60mL (max_volume=25mL, 预期 3 批)")
|
||||
|
||||
assert len(actions) > 0
|
||||
# 应该有多轮 set_position
|
||||
flat = [a for a in actions if isinstance(a, dict)]
|
||||
set_position_count = sum(1 for a in flat if a.get("action_name") == "set_position")
|
||||
print(f"set_position 动作数: {set_position_count}")
|
||||
# 3批 × 2次 (吸液 + 排液) = 6 次 set_position
|
||||
assert set_position_count >= 6
|
||||
|
||||
def test_pump_transfer_no_path(self):
|
||||
"""无路径时返回空"""
|
||||
from unilabos.compile.pump_protocol import generate_pump_protocol
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
G.add_node("isolated_flask", type="container")
|
||||
|
||||
actions = generate_pump_protocol(
|
||||
G=G,
|
||||
from_vessel_id="isolated_flask",
|
||||
to_vessel_id="reactor_01",
|
||||
volume=10.0,
|
||||
)
|
||||
|
||||
print(f"\n无路径时的动作列表: {actions}")
|
||||
assert actions == []
|
||||
|
||||
def test_pump_backbone_filtering(self):
|
||||
"""验证泵骨架过滤逻辑(电磁阀被跳过)"""
|
||||
from unilabos.compile.pump_protocol import generate_pump_protocol
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
# 添加电磁阀到路径中
|
||||
G.add_node("solenoid_valve_1", **{
|
||||
"type": "device", "class": "solenoid_valve",
|
||||
"data": {}, "config": {},
|
||||
})
|
||||
# flask_water → solenoid_valve_1 → valve_1 → reactor_01
|
||||
G.remove_edge("flask_water", "valve_1")
|
||||
G.add_edge("flask_water", "solenoid_valve_1")
|
||||
G.add_edge("solenoid_valve_1", "valve_1")
|
||||
|
||||
path = nx.shortest_path(G, "flask_water", "reactor_01")
|
||||
print(f"\n含电磁阀的路径: {path}")
|
||||
assert "solenoid_valve_1" in path
|
||||
|
||||
actions = generate_pump_protocol(
|
||||
G=G,
|
||||
from_vessel_id="flask_water",
|
||||
to_vessel_id="reactor_01",
|
||||
volume=10.0,
|
||||
)
|
||||
|
||||
_dump_actions(actions, "PumpTransfer 含电磁阀: flask_water → solenoid → valve_1 → reactor_01")
|
||||
# 电磁阀应被跳过,泵骨架只有 valve_1
|
||||
assert len(actions) > 0
|
||||
|
||||
|
||||
# ==================== Separate 全链路 ====================
|
||||
|
||||
class TestSeparateProtocolFullChain:
|
||||
"""Separate: 包含 bug 确认和正常路径测试"""
|
||||
|
||||
def test_separate_bug_line_128_fixed(self):
|
||||
"""验证 separate_protocol.py:128 的 bug 已修复(不再 crash)"""
|
||||
from unilabos.compile.separate_protocol import generate_separate_protocol
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01", klass="virtual_stirrer",
|
||||
data={"liquid": [{"liquid_type": "water", "volume": 100.0}]},
|
||||
)]
|
||||
enriched = _simulate_enrichment(raw_data)
|
||||
|
||||
# 修复前:final_vessel_id, _ = vessel_id 会 crash(字符串解包)
|
||||
# 修复后:final_vessel_id = vessel_id,正常返回 action 列表
|
||||
result = generate_separate_protocol(
|
||||
G=G,
|
||||
vessel=enriched,
|
||||
purpose="extract",
|
||||
product_phase="top",
|
||||
product_vessel="product_flask",
|
||||
waste_vessel="waste_workup",
|
||||
solvent="dcm",
|
||||
volume="100 mL",
|
||||
)
|
||||
assert isinstance(result, list)
|
||||
assert len(result) > 0
|
||||
|
||||
def test_separate_manual_workaround(self):
|
||||
"""
|
||||
绕过 line 128 bug,手动测试分离编译器中可以工作的子函数
|
||||
"""
|
||||
from unilabos.compile.separate_protocol import (
|
||||
find_separator_device,
|
||||
find_separation_vessel_bottom,
|
||||
)
|
||||
from unilabos.compile.utils.vessel_parser import (
|
||||
find_connected_stirrer,
|
||||
find_solvent_vessel,
|
||||
)
|
||||
from unilabos.compile.utils.unit_parser import parse_volume_input
|
||||
from unilabos.compile.utils.resource_helper import get_resource_liquid_volume as get_vessel_liquid_volume
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
# 1. get_vessel 解析 enriched dict
|
||||
raw_data = [_make_raw_resource(
|
||||
id="reactor_01", klass="virtual_stirrer",
|
||||
data={"liquid": [{"liquid_type": "water", "volume": 100.0}]},
|
||||
)]
|
||||
enriched = _simulate_enrichment(raw_data)
|
||||
vessel_id, vessel_data = get_vessel(enriched)
|
||||
print(f"\nvessel_id: {vessel_id}")
|
||||
print(f"vessel_data: {vessel_data}")
|
||||
assert vessel_id == "reactor_01"
|
||||
assert vessel_data["liquid"][0]["volume"] == 100.0
|
||||
|
||||
# 2. find_separator_device
|
||||
sep = find_separator_device(G, vessel_id)
|
||||
print(f"分离器设备: {sep}")
|
||||
assert sep == "separator_1"
|
||||
|
||||
# 3. find_connected_stirrer
|
||||
stirrer = find_connected_stirrer(G, vessel_id)
|
||||
print(f"搅拌器设备: {stirrer}")
|
||||
assert stirrer == "stirrer_1"
|
||||
|
||||
# 4. find_solvent_vessel
|
||||
solvent_v = find_solvent_vessel(G, "dcm")
|
||||
print(f"DCM溶剂容器: {solvent_v}")
|
||||
assert solvent_v == "flask_dcm"
|
||||
|
||||
# 5. parse_volume_input
|
||||
vol = parse_volume_input("200 mL")
|
||||
print(f"体积解析: '200 mL' → {vol}")
|
||||
assert vol == 200.0
|
||||
|
||||
vol2 = parse_volume_input("1.5 L")
|
||||
print(f"体积解析: '1.5 L' → {vol2}")
|
||||
assert vol2 == 1500.0
|
||||
|
||||
# 6. get_vessel_liquid_volume
|
||||
liq_vol = get_vessel_liquid_volume(enriched)
|
||||
print(f"液体体积 (enriched dict): {liq_vol}")
|
||||
assert liq_vol == 100.0
|
||||
|
||||
# 7. find_separation_vessel_bottom
|
||||
bottom = find_separation_vessel_bottom(G, vessel_id)
|
||||
print(f"分离容器底部: {bottom}")
|
||||
# 当前图中没有命名匹配的底部容器
|
||||
|
||||
def test_pump_transfer_for_separate_subflow(self):
|
||||
"""测试 separate 中调用的 pump 子流程(溶剂添加 → 分液漏斗)"""
|
||||
from unilabos.compile.pump_protocol import generate_pump_protocol_with_rinsing
|
||||
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
# 模拟分离前的溶剂添加步骤
|
||||
actions = generate_pump_protocol_with_rinsing(
|
||||
G=G,
|
||||
from_vessel="flask_dcm",
|
||||
to_vessel="reactor_01",
|
||||
volume=100.0,
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=0.5,
|
||||
)
|
||||
|
||||
_dump_actions(actions, "Separate 子流程: flask_dcm → reactor_01, 100mL DCM")
|
||||
|
||||
assert isinstance(actions, list)
|
||||
assert len(actions) > 0
|
||||
|
||||
# 模拟分离后产物转移
|
||||
actions2 = generate_pump_protocol_with_rinsing(
|
||||
G=G,
|
||||
from_vessel="reactor_01",
|
||||
to_vessel="product_flask",
|
||||
volume=50.0,
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=0.5,
|
||||
)
|
||||
|
||||
_dump_actions(actions2, "Separate 子流程: reactor_01 → product_flask, 50mL 产物")
|
||||
|
||||
assert len(actions2) > 0
|
||||
|
||||
# 废液转移
|
||||
actions3 = generate_pump_protocol_with_rinsing(
|
||||
G=G,
|
||||
from_vessel="reactor_01",
|
||||
to_vessel="waste_workup",
|
||||
volume=50.0,
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=0.5,
|
||||
)
|
||||
|
||||
_dump_actions(actions3, "Separate 子流程: reactor_01 → waste_workup, 50mL 废液")
|
||||
|
||||
assert len(actions3) > 0
|
||||
|
||||
|
||||
# ==================== 图路径可视化 ====================
|
||||
|
||||
class TestGraphPathVisualization:
|
||||
"""输出图中关键路径信息"""
|
||||
|
||||
def test_all_shortest_paths(self):
|
||||
"""输出所有容器之间的最短路径"""
|
||||
G = _build_pump_transfer_graph()
|
||||
|
||||
containers = [n for n in G.nodes() if G.nodes[n].get("type") == "container"]
|
||||
devices = [n for n in G.nodes() if G.nodes[n].get("type") == "device"]
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print(f" 设备图概览")
|
||||
print(f"{'='*70}")
|
||||
print(f" 容器节点 ({len(containers)}): {containers}")
|
||||
print(f" 设备节点 ({len(devices)}): {devices}")
|
||||
print(f" 边数: {G.number_of_edges()}")
|
||||
print(f" 边列表:")
|
||||
for u, v, data in G.edges(data=True):
|
||||
port_info = data.get("port", "")
|
||||
print(f" {u} → {v} {port_info if port_info else ''}")
|
||||
|
||||
print(f"\n 关键路径:")
|
||||
pairs = [
|
||||
("flask_water", "reactor_01"),
|
||||
("flask_dcm", "reactor_01"),
|
||||
("reactor_01", "product_flask"),
|
||||
("reactor_01", "waste_workup"),
|
||||
("flask_water", "product_flask"),
|
||||
]
|
||||
for src, dst in pairs:
|
||||
try:
|
||||
path = nx.shortest_path(G, src, dst)
|
||||
length = len(path) - 1
|
||||
# 标注路径上的节点类型
|
||||
annotated = []
|
||||
for n in path:
|
||||
ntype = G.nodes[n].get("type", "?")
|
||||
nclass = G.nodes[n].get("class", "")
|
||||
annotated.append(f"{n}({ntype}{'/' + nclass if nclass else ''})")
|
||||
print(f" {src} → {dst}: 距离={length}")
|
||||
print(f" 路径: {' → '.join(annotated)}")
|
||||
except nx.NetworkXNoPath:
|
||||
print(f" {src} → {dst}: 无路径!")
|
||||
|
||||
print(f"{'='*70}\n")
|
||||
324
tests/compile/test_resource_conversion_path.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
ROS Goal → Resource 转换 → 编译器路径的集成测试
|
||||
|
||||
覆盖:
|
||||
1. Resource.msg 新字段(uuid, klass, extra)的往返转换
|
||||
2. dict → ROS Resource → dict 往返无损
|
||||
3. ResourceTreeSet → get_plr_nested_dict 保留 children 结构
|
||||
4. resource_helper 兼容 dict / ResourceDictInstance
|
||||
5. vessel_parser.get_vessel 兼容 ResourceDictInstance
|
||||
"""
|
||||
|
||||
import json
|
||||
import pytest
|
||||
|
||||
# 不依赖 ROS 的测试 —— 直接测试 resource 处理路径
|
||||
from unilabos.resources.resource_tracker import (
|
||||
ResourceDict,
|
||||
ResourceDictInstance,
|
||||
ResourceTreeInstance,
|
||||
ResourceTreeSet,
|
||||
)
|
||||
from unilabos.compile.utils.resource_helper import (
|
||||
ensure_resource_instance,
|
||||
resource_to_dict,
|
||||
get_resource_id,
|
||||
get_resource_data,
|
||||
get_resource_display_info,
|
||||
get_resource_liquid_volume,
|
||||
)
|
||||
from unilabos.compile.utils.vessel_parser import get_vessel
|
||||
|
||||
|
||||
# ============ 构建测试数据 ============
|
||||
|
||||
|
||||
def _make_resource_dict(
|
||||
id="reactor_01",
|
||||
uuid="uuid-reactor-01",
|
||||
name="reactor_01",
|
||||
klass="virtual_stirrer",
|
||||
type_="device",
|
||||
parent=None,
|
||||
parent_uuid=None,
|
||||
data=None,
|
||||
config=None,
|
||||
extra=None,
|
||||
):
|
||||
return {
|
||||
"id": id,
|
||||
"uuid": uuid,
|
||||
"name": name,
|
||||
"class": klass,
|
||||
"type": type_,
|
||||
"parent": parent,
|
||||
"parent_uuid": parent_uuid or "",
|
||||
"description": "",
|
||||
"config": config or {},
|
||||
"data": data or {},
|
||||
"extra": extra or {},
|
||||
"position": {"x": 1.0, "y": 2.0, "z": 3.0},
|
||||
}
|
||||
|
||||
|
||||
def _make_resource_instance(id="reactor_01", **kwargs):
|
||||
d = _make_resource_dict(id=id, **kwargs)
|
||||
return ResourceDictInstance.get_resource_instance_from_dict(d)
|
||||
|
||||
|
||||
def _make_tree_with_children():
|
||||
"""构建 StationA -> [R1, R2] 的资源树"""
|
||||
raw_data = [
|
||||
_make_resource_dict(
|
||||
id="StationA",
|
||||
uuid="uuid-station-a",
|
||||
name="StationA",
|
||||
klass="workstation",
|
||||
type_="device",
|
||||
),
|
||||
_make_resource_dict(
|
||||
id="R1",
|
||||
uuid="uuid-r1",
|
||||
name="R1",
|
||||
klass="",
|
||||
type_="resource",
|
||||
parent="StationA",
|
||||
parent_uuid="uuid-station-a",
|
||||
data={"liquid": [{"liquid_type": "water", "volume": 10.0}]},
|
||||
),
|
||||
_make_resource_dict(
|
||||
id="R2",
|
||||
uuid="uuid-r2",
|
||||
name="R2",
|
||||
klass="",
|
||||
type_="resource",
|
||||
parent="StationA",
|
||||
parent_uuid="uuid-station-a",
|
||||
data={"liquid": [{"liquid_type": "ethanol", "volume": 5.0}]},
|
||||
),
|
||||
]
|
||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
||||
return tree_set
|
||||
|
||||
|
||||
# ============ resource_helper 测试 ============
|
||||
|
||||
|
||||
class TestResourceHelper:
|
||||
"""测试 resource_helper 对 dict / ResourceDictInstance 的兼容性"""
|
||||
|
||||
def test_ensure_resource_instance_from_dict(self):
|
||||
d = _make_resource_dict()
|
||||
inst = ensure_resource_instance(d)
|
||||
assert isinstance(inst, ResourceDictInstance)
|
||||
assert inst.res_content.id == "reactor_01"
|
||||
assert inst.res_content.uuid == "uuid-reactor-01"
|
||||
|
||||
def test_ensure_resource_instance_passthrough(self):
|
||||
inst = _make_resource_instance()
|
||||
result = ensure_resource_instance(inst)
|
||||
assert result is inst # 同一个对象,不复制
|
||||
|
||||
def test_ensure_resource_instance_none(self):
|
||||
assert ensure_resource_instance(None) is None
|
||||
|
||||
def test_get_resource_id_from_dict(self):
|
||||
d = _make_resource_dict(id="my_device")
|
||||
assert get_resource_id(d) == "my_device"
|
||||
|
||||
def test_get_resource_id_from_instance(self):
|
||||
inst = _make_resource_instance(id="my_device")
|
||||
assert get_resource_id(inst) == "my_device"
|
||||
|
||||
def test_get_resource_id_from_string(self):
|
||||
assert get_resource_id("my_device") == "my_device"
|
||||
|
||||
def test_get_resource_id_from_wrapped_dict(self):
|
||||
"""兼容 {station_id: {...}} 格式"""
|
||||
d = {"StationA": {"id": "StationA", "name": "StationA"}}
|
||||
assert get_resource_id(d) == "StationA"
|
||||
|
||||
def test_get_resource_data_from_dict(self):
|
||||
d = _make_resource_dict(data={"temperature": 25.0})
|
||||
assert get_resource_data(d) == {"temperature": 25.0}
|
||||
|
||||
def test_get_resource_data_from_instance(self):
|
||||
inst = _make_resource_instance(data={"temperature": 25.0})
|
||||
data = get_resource_data(inst)
|
||||
assert data["temperature"] == 25.0
|
||||
|
||||
def test_get_resource_display_info_from_dict(self):
|
||||
d = _make_resource_dict(id="reactor_01", name="Reactor #1")
|
||||
info = get_resource_display_info(d)
|
||||
assert "reactor_01" in info
|
||||
assert "Reactor #1" in info
|
||||
|
||||
def test_get_resource_display_info_from_instance(self):
|
||||
inst = _make_resource_instance(id="reactor_01", name="Reactor #1")
|
||||
info = get_resource_display_info(inst)
|
||||
assert "reactor_01" in info
|
||||
|
||||
def test_get_resource_display_info_from_string(self):
|
||||
assert get_resource_display_info("reactor_01") == "reactor_01"
|
||||
|
||||
def test_get_resource_liquid_volume(self):
|
||||
d = _make_resource_dict(data={"liquid": [{"liquid_type": "water", "volume": 15.5}]})
|
||||
assert get_resource_liquid_volume(d) == pytest.approx(15.5)
|
||||
|
||||
def test_resource_to_dict_from_instance(self):
|
||||
inst = _make_resource_instance(id="reactor_01", klass="virtual_stirrer")
|
||||
d = resource_to_dict(inst)
|
||||
assert isinstance(d, dict)
|
||||
assert d["id"] == "reactor_01"
|
||||
assert d["class"] == "virtual_stirrer"
|
||||
|
||||
def test_resource_to_dict_passthrough(self):
|
||||
d = _make_resource_dict()
|
||||
result = resource_to_dict(d)
|
||||
assert result is d # 同一个 dict
|
||||
|
||||
|
||||
# ============ vessel_parser 兼容性测试 ============
|
||||
|
||||
|
||||
class TestVesselParser:
|
||||
"""测试 vessel_parser.get_vessel 对 ResourceDictInstance 的兼容"""
|
||||
|
||||
def test_get_vessel_from_dict(self):
|
||||
d = _make_resource_dict(id="reactor_01", data={"temperature": 25.0})
|
||||
vessel_id, vessel_data = get_vessel(d)
|
||||
assert vessel_id == "reactor_01"
|
||||
assert vessel_data["temperature"] == 25.0
|
||||
|
||||
def test_get_vessel_from_string(self):
|
||||
vessel_id, vessel_data = get_vessel("reactor_01")
|
||||
assert vessel_id == "reactor_01"
|
||||
assert vessel_data == {}
|
||||
|
||||
def test_get_vessel_from_resource_instance(self):
|
||||
inst = _make_resource_instance(id="reactor_01", data={"temperature": 25.0})
|
||||
vessel_id, vessel_data = get_vessel(inst)
|
||||
assert vessel_id == "reactor_01"
|
||||
assert vessel_data["temperature"] == 25.0
|
||||
|
||||
def test_get_vessel_from_wrapped_dict(self):
|
||||
"""兼容 {station_id: {id: ..., data: {...}}} 格式"""
|
||||
d = {"StationA": {"id": "StationA", "data": {"vol": 100}}}
|
||||
vessel_id, vessel_data = get_vessel(d)
|
||||
assert vessel_id == "StationA"
|
||||
|
||||
|
||||
# ============ ResourceTreeSet → get_plr_nested_dict 测试 ============
|
||||
|
||||
|
||||
class TestResourceTreeRoundTrip:
|
||||
"""测试 ResourceTreeSet → get_plr_nested_dict 保留树结构和关键字段"""
|
||||
|
||||
def test_tree_preserves_children(self):
|
||||
tree_set = _make_tree_with_children()
|
||||
assert len(tree_set.trees) == 1
|
||||
root = tree_set.trees[0].root_node
|
||||
assert root.res_content.id == "StationA"
|
||||
assert len(root.children) == 2
|
||||
|
||||
def test_plr_nested_dict_has_children(self):
|
||||
tree_set = _make_tree_with_children()
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
assert isinstance(nested, dict)
|
||||
assert "children" in nested
|
||||
assert isinstance(nested["children"], dict)
|
||||
assert "R1" in nested["children"]
|
||||
assert "R2" in nested["children"]
|
||||
|
||||
def test_plr_nested_dict_preserves_uuid(self):
|
||||
tree_set = _make_tree_with_children()
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
assert nested["uuid"] == "uuid-station-a"
|
||||
assert nested["children"]["R1"]["uuid"] == "uuid-r1"
|
||||
|
||||
def test_plr_nested_dict_preserves_klass(self):
|
||||
tree_set = _make_tree_with_children()
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
assert nested["class"] == "workstation"
|
||||
|
||||
def test_plr_nested_dict_preserves_data(self):
|
||||
tree_set = _make_tree_with_children()
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
r1_data = nested["children"]["R1"]["data"]
|
||||
assert "liquid" in r1_data
|
||||
assert r1_data["liquid"][0]["volume"] == 10.0
|
||||
|
||||
def test_plr_nested_dict_usable_by_get_vessel(self):
|
||||
"""get_plr_nested_dict 的结果可以直接传给 get_vessel"""
|
||||
tree_set = _make_tree_with_children()
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
vessel_id, vessel_data = get_vessel(nested)
|
||||
assert vessel_id == "StationA"
|
||||
|
||||
def test_dump_vs_plr_nested_dict(self):
|
||||
"""dump() 是扁平化的,get_plr_nested_dict 保留树结构"""
|
||||
tree_set = _make_tree_with_children()
|
||||
# dump 返回扁平列表
|
||||
dumped = tree_set.dump()
|
||||
assert isinstance(dumped[0], list)
|
||||
assert len(dumped[0]) == 3 # StationA + R1 + R2,全部扁平
|
||||
|
||||
# get_plr_nested_dict 保留嵌套
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
assert isinstance(nested["children"], dict)
|
||||
assert len(nested["children"]) == 2 # 嵌套的 children
|
||||
|
||||
|
||||
# ============ 模拟 workstation 路径测试 ============
|
||||
|
||||
|
||||
class TestWorkstationPath:
|
||||
"""模拟 workstation.py 中的关键路径:
|
||||
raw_data → ResourceTreeSet.from_raw_dict_list → get_plr_nested_dict → compiler
|
||||
"""
|
||||
|
||||
def test_single_resource_path(self):
|
||||
"""单个 Resource: 取第一棵树的根节点"""
|
||||
raw_data = [
|
||||
_make_resource_dict(id="reactor_01", uuid="uuid-r01", klass="virtual_stirrer"),
|
||||
]
|
||||
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
|
||||
root = tree_set.trees[0].root_node
|
||||
result = root.get_plr_nested_dict()
|
||||
assert result["id"] == "reactor_01"
|
||||
assert result["uuid"] == "uuid-r01"
|
||||
assert result["class"] == "virtual_stirrer"
|
||||
|
||||
def test_resource_with_children_path(self):
|
||||
"""Resource 带 children: AGV/batch transfer 场景"""
|
||||
tree_set = _make_tree_with_children()
|
||||
root = tree_set.trees[0].root_node
|
||||
nested = root.get_plr_nested_dict()
|
||||
|
||||
# 模拟编译器接收到的参数
|
||||
from_repo = {"StationA": nested}
|
||||
assert "A01" not in from_repo["StationA"]["children"] # children 按 id 索引
|
||||
assert "R1" in from_repo["StationA"]["children"]
|
||||
assert from_repo["StationA"]["children"]["R1"]["uuid"] == "uuid-r1"
|
||||
|
||||
def test_multiple_resource_path(self):
|
||||
"""多个 Resource: 每棵树取根节点"""
|
||||
raw_data1 = [_make_resource_dict(id="R1", uuid="uuid-r1")]
|
||||
raw_data2 = [_make_resource_dict(id="R2", uuid="uuid-r2")]
|
||||
# 模拟 host 返回多棵树
|
||||
tree_set1 = ResourceTreeSet.from_raw_dict_list(raw_data1)
|
||||
tree_set2 = ResourceTreeSet.from_raw_dict_list(raw_data2)
|
||||
results = [
|
||||
tree.root_node.get_plr_nested_dict()
|
||||
for ts in [tree_set1, tree_set2]
|
||||
for tree in ts.trees
|
||||
]
|
||||
assert len(results) == 2
|
||||
assert results[0]["id"] == "R1"
|
||||
assert results[1]["id"] == "R2"
|
||||
1
tests/devices/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
5
tests/devices/liquid_handling/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
液体处理设备相关测试。
|
||||
"""
|
||||
|
||||
|
||||
505
tests/devices/liquid_handling/test_transfer_liquid.py
Normal file
@@ -0,0 +1,505 @@
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Iterable, List, Optional, Sequence, Tuple
|
||||
|
||||
import pytest
|
||||
|
||||
from unilabos.devices.liquid_handling.liquid_handler_abstract import LiquidHandlerAbstract
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DummyContainer:
|
||||
name: str
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"DummyContainer({self.name})"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DummyTipSpot:
|
||||
name: str
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
return f"DummyTipSpot({self.name})"
|
||||
|
||||
|
||||
def make_tip_iter(n: int = 256) -> Iterable[List[DummyTipSpot]]:
|
||||
"""Yield lists so code can safely call `tip.extend(next(self.current_tip))`."""
|
||||
for i in range(n):
|
||||
yield [DummyTipSpot(f"tip_{i}")]
|
||||
|
||||
|
||||
class FakeLiquidHandler(LiquidHandlerAbstract):
|
||||
"""不初始化真实 backend/deck;仅用来记录 transfer_liquid 内部调用序列。"""
|
||||
|
||||
def __init__(self, channel_num: int = 8):
|
||||
# 不调用 super().__init__,避免真实硬件/后端依赖
|
||||
self.channel_num = channel_num
|
||||
self.support_touch_tip = True
|
||||
self.current_tip = iter(make_tip_iter())
|
||||
self.calls: List[Tuple[str, Any]] = []
|
||||
|
||||
async def pick_up_tips(self, tip_spots, use_channels=None, offsets=None, **backend_kwargs):
|
||||
self.calls.append(("pick_up_tips", {"tips": list(tip_spots), "use_channels": use_channels}))
|
||||
|
||||
async def aspirate(
|
||||
self,
|
||||
resources: Sequence[Any],
|
||||
vols: List[float],
|
||||
use_channels: Optional[List[int]] = None,
|
||||
flow_rates: Optional[List[Optional[float]]] = None,
|
||||
offsets: Any = None,
|
||||
liquid_height: Any = None,
|
||||
blow_out_air_volume: Any = None,
|
||||
spread: str = "wide",
|
||||
**backend_kwargs,
|
||||
):
|
||||
self.calls.append(
|
||||
(
|
||||
"aspirate",
|
||||
{
|
||||
"resources": list(resources),
|
||||
"vols": list(vols),
|
||||
"use_channels": list(use_channels) if use_channels is not None else None,
|
||||
"flow_rates": list(flow_rates) if flow_rates is not None else None,
|
||||
"offsets": list(offsets) if offsets is not None else None,
|
||||
"liquid_height": list(liquid_height) if liquid_height is not None else None,
|
||||
"blow_out_air_volume": list(blow_out_air_volume) if blow_out_air_volume is not None else None,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
async def dispense(
|
||||
self,
|
||||
resources: Sequence[Any],
|
||||
vols: List[float],
|
||||
use_channels: Optional[List[int]] = None,
|
||||
flow_rates: Optional[List[Optional[float]]] = None,
|
||||
offsets: Any = None,
|
||||
liquid_height: Any = None,
|
||||
blow_out_air_volume: Any = None,
|
||||
spread: str = "wide",
|
||||
**backend_kwargs,
|
||||
):
|
||||
self.calls.append(
|
||||
(
|
||||
"dispense",
|
||||
{
|
||||
"resources": list(resources),
|
||||
"vols": list(vols),
|
||||
"use_channels": list(use_channels) if use_channels is not None else None,
|
||||
"flow_rates": list(flow_rates) if flow_rates is not None else None,
|
||||
"offsets": list(offsets) if offsets is not None else None,
|
||||
"liquid_height": list(liquid_height) if liquid_height is not None else None,
|
||||
"blow_out_air_volume": list(blow_out_air_volume) if blow_out_air_volume is not None else None,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
async def discard_tips(self, use_channels=None, *args, **kwargs):
|
||||
# 有的分支是 discard_tips(use_channels=[0]),有的分支是 discard_tips([0..7])(位置参数)
|
||||
self.calls.append(("discard_tips", {"use_channels": list(use_channels) if use_channels is not None else None}))
|
||||
|
||||
async def custom_delay(self, seconds=0, msg=None):
|
||||
self.calls.append(("custom_delay", {"seconds": seconds, "msg": msg}))
|
||||
|
||||
async def touch_tip(self, targets):
|
||||
# 原实现会访问 targets.get_size_x() 等;测试里只记录调用
|
||||
self.calls.append(("touch_tip", {"targets": targets}))
|
||||
|
||||
async def mix(self, targets, mix_time=None, mix_vol=None, height_to_bottom=None, offsets=None, mix_rate=None, none_keys=None):
|
||||
self.calls.append(
|
||||
(
|
||||
"mix",
|
||||
{
|
||||
"targets": targets,
|
||||
"mix_time": mix_time,
|
||||
"mix_vol": mix_vol,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def run(coro):
|
||||
return asyncio.run(coro)
|
||||
|
||||
|
||||
def test_one_to_one_single_channel_basic_calls():
|
||||
lh = FakeLiquidHandler(channel_num=1)
|
||||
lh.current_tip = iter(make_tip_iter(64))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
||||
targets = [DummyContainer(f"T{i}") for i in range(3)]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=[1, 2, 3],
|
||||
dis_vols=[4, 5, 6],
|
||||
mix_times=None, # 应该仍能执行(不 mix)
|
||||
)
|
||||
)
|
||||
|
||||
assert [c[0] for c in lh.calls].count("pick_up_tips") == 3
|
||||
assert [c[0] for c in lh.calls].count("aspirate") == 3
|
||||
assert [c[0] for c in lh.calls].count("dispense") == 3
|
||||
assert [c[0] for c in lh.calls].count("discard_tips") == 3
|
||||
|
||||
# 每次 aspirate/dispense 都是单孔列表
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
assert aspirates[0]["resources"] == [sources[0]]
|
||||
assert aspirates[0]["vols"] == [1.0]
|
||||
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert dispenses[2]["resources"] == [targets[2]]
|
||||
assert dispenses[2]["vols"] == [6.0]
|
||||
|
||||
|
||||
def test_one_to_one_single_channel_before_stage_mixes_prior_to_aspirate():
|
||||
lh = FakeLiquidHandler(channel_num=1)
|
||||
lh.current_tip = iter(make_tip_iter(16))
|
||||
|
||||
source = DummyContainer("S0")
|
||||
target = DummyContainer("T0")
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=[source],
|
||||
targets=[target],
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=[5],
|
||||
dis_vols=[5],
|
||||
mix_stage="before",
|
||||
mix_times=1,
|
||||
mix_vol=3,
|
||||
)
|
||||
)
|
||||
|
||||
names = [name for name, _ in lh.calls]
|
||||
assert names.count("mix") == 1
|
||||
assert names.index("mix") < names.index("aspirate")
|
||||
|
||||
|
||||
def test_one_to_one_eight_channel_groups_by_8():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(256))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(16)]
|
||||
targets = [DummyContainer(f"T{i}") for i in range(16)]
|
||||
asp_vols = list(range(1, 17))
|
||||
dis_vols = list(range(101, 117))
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=list(range(8)),
|
||||
asp_vols=asp_vols,
|
||||
dis_vols=dis_vols,
|
||||
mix_times=0, # 触发逻辑但不 mix
|
||||
)
|
||||
)
|
||||
|
||||
# 16 个任务 -> 2 组,每组 8 通道一起做
|
||||
assert [c[0] for c in lh.calls].count("pick_up_tips") == 2
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert len(aspirates) == 2
|
||||
assert len(dispenses) == 2
|
||||
|
||||
assert aspirates[0]["resources"] == sources[0:8]
|
||||
assert aspirates[0]["vols"] == [float(v) for v in asp_vols[0:8]]
|
||||
assert dispenses[1]["resources"] == targets[8:16]
|
||||
assert dispenses[1]["vols"] == [float(v) for v in dis_vols[8:16]]
|
||||
|
||||
|
||||
def test_one_to_one_eight_channel_requires_multiple_of_8_targets():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(64))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(9)]
|
||||
targets = [DummyContainer(f"T{i}") for i in range(9)]
|
||||
|
||||
with pytest.raises(ValueError, match="multiple of 8"):
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=list(range(8)),
|
||||
asp_vols=[1] * 9,
|
||||
dis_vols=[1] * 9,
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_one_to_one_eight_channel_parameter_lists_are_chunked_per_8():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(512))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(16)]
|
||||
targets = [DummyContainer(f"T{i}") for i in range(16)]
|
||||
asp_vols = [i + 1 for i in range(16)]
|
||||
dis_vols = [200 + i for i in range(16)]
|
||||
asp_flow_rates = [0.1 * (i + 1) for i in range(16)]
|
||||
dis_flow_rates = [0.2 * (i + 1) for i in range(16)]
|
||||
offsets = [f"offset_{i}" for i in range(16)]
|
||||
liquid_heights = [i * 0.5 for i in range(16)]
|
||||
blow_out_air_volume = [i + 0.05 for i in range(16)]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=list(range(8)),
|
||||
asp_vols=asp_vols,
|
||||
dis_vols=dis_vols,
|
||||
asp_flow_rates=asp_flow_rates,
|
||||
dis_flow_rates=dis_flow_rates,
|
||||
offsets=offsets,
|
||||
liquid_height=liquid_heights,
|
||||
blow_out_air_volume=blow_out_air_volume,
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert len(aspirates) == len(dispenses) == 2
|
||||
|
||||
for batch_idx in range(2):
|
||||
start = batch_idx * 8
|
||||
end = start + 8
|
||||
asp_call = aspirates[batch_idx]
|
||||
dis_call = dispenses[batch_idx]
|
||||
assert asp_call["resources"] == sources[start:end]
|
||||
assert asp_call["flow_rates"] == asp_flow_rates[start:end]
|
||||
assert asp_call["offsets"] == offsets[start:end]
|
||||
assert asp_call["liquid_height"] == liquid_heights[start:end]
|
||||
assert asp_call["blow_out_air_volume"] == blow_out_air_volume[start:end]
|
||||
assert dis_call["flow_rates"] == dis_flow_rates[start:end]
|
||||
assert dis_call["offsets"] == offsets[start:end]
|
||||
assert dis_call["liquid_height"] == liquid_heights[start:end]
|
||||
assert dis_call["blow_out_air_volume"] == blow_out_air_volume[start:end]
|
||||
|
||||
|
||||
def test_one_to_one_eight_channel_handles_32_tasks_four_batches():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(1024))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(32)]
|
||||
targets = [DummyContainer(f"T{i}") for i in range(32)]
|
||||
asp_vols = [i + 1 for i in range(32)]
|
||||
dis_vols = [300 + i for i in range(32)]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=list(range(8)),
|
||||
asp_vols=asp_vols,
|
||||
dis_vols=dis_vols,
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
pick_calls = [name for name, _ in lh.calls if name == "pick_up_tips"]
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert len(pick_calls) == 4
|
||||
assert len(aspirates) == len(dispenses) == 4
|
||||
assert aspirates[0]["resources"] == sources[0:8]
|
||||
assert aspirates[-1]["resources"] == sources[24:32]
|
||||
assert dispenses[0]["resources"] == targets[0:8]
|
||||
assert dispenses[-1]["resources"] == targets[24:32]
|
||||
|
||||
|
||||
def test_one_to_many_single_channel_aspirates_total_when_asp_vol_too_small():
|
||||
lh = FakeLiquidHandler(channel_num=1)
|
||||
lh.current_tip = iter(make_tip_iter(64))
|
||||
|
||||
source = DummyContainer("SRC")
|
||||
targets = [DummyContainer(f"T{i}") for i in range(3)]
|
||||
dis_vols = [10, 20, 30] # sum=60
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=[source],
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=10, # 小于 sum(dis_vols) -> 应吸 60
|
||||
dis_vols=dis_vols,
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
assert len(aspirates) == 1
|
||||
assert aspirates[0]["resources"] == [source]
|
||||
assert aspirates[0]["vols"] == [60.0]
|
||||
assert aspirates[0]["use_channels"] == [0]
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert [d["vols"][0] for d in dispenses] == [10.0, 20.0, 30.0]
|
||||
|
||||
|
||||
def test_one_to_many_eight_channel_basic():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(128))
|
||||
|
||||
source = DummyContainer("SRC")
|
||||
targets = [DummyContainer(f"T{i}") for i in range(8)]
|
||||
dis_vols = [i + 1 for i in range(8)]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=[source],
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=list(range(8)),
|
||||
asp_vols=999, # one-to-many 8ch 会按 dis_vols 吸(每通道各自)
|
||||
dis_vols=dis_vols,
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
assert aspirates[0]["resources"] == [source] * 8
|
||||
assert aspirates[0]["vols"] == [float(v) for v in dis_vols]
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert dispenses[0]["resources"] == targets
|
||||
assert dispenses[0]["vols"] == [float(v) for v in dis_vols]
|
||||
|
||||
|
||||
def test_many_to_one_single_channel_standard_dispense_equals_asp_by_default():
|
||||
lh = FakeLiquidHandler(channel_num=1)
|
||||
lh.current_tip = iter(make_tip_iter(128))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
||||
target = DummyContainer("T")
|
||||
asp_vols = [5, 6, 7]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=[target],
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=asp_vols,
|
||||
dis_vols=1, # many-to-one 允许标量;非比例模式下实际每次分液=对应 asp_vol
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert [d["vols"][0] for d in dispenses] == [float(v) for v in asp_vols]
|
||||
assert all(d["resources"] == [target] for d in dispenses)
|
||||
|
||||
|
||||
def test_many_to_one_single_channel_before_stage_mixes_target_once():
|
||||
lh = FakeLiquidHandler(channel_num=1)
|
||||
lh.current_tip = iter(make_tip_iter(128))
|
||||
|
||||
sources = [DummyContainer("S0"), DummyContainer("S1")]
|
||||
target = DummyContainer("T")
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=[target],
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=[5, 6],
|
||||
dis_vols=1,
|
||||
mix_stage="before",
|
||||
mix_times=2,
|
||||
mix_vol=4,
|
||||
)
|
||||
)
|
||||
|
||||
names = [name for name, _ in lh.calls]
|
||||
assert names[0] == "mix"
|
||||
assert names.count("mix") == 1
|
||||
|
||||
|
||||
def test_many_to_one_single_channel_proportional_mixing_uses_dis_vols_per_source():
|
||||
lh = FakeLiquidHandler(channel_num=1)
|
||||
lh.current_tip = iter(make_tip_iter(128))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(3)]
|
||||
target = DummyContainer("T")
|
||||
asp_vols = [5, 6, 7]
|
||||
dis_vols = [1, 2, 3]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=[target],
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=asp_vols,
|
||||
dis_vols=dis_vols, # 比例模式
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert [d["vols"][0] for d in dispenses] == [float(v) for v in dis_vols]
|
||||
|
||||
|
||||
def test_many_to_one_eight_channel_basic():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(256))
|
||||
|
||||
sources = [DummyContainer(f"S{i}") for i in range(8)]
|
||||
target = DummyContainer("T")
|
||||
asp_vols = [10 + i for i in range(8)]
|
||||
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=[target],
|
||||
tip_racks=[],
|
||||
use_channels=list(range(8)),
|
||||
asp_vols=asp_vols,
|
||||
dis_vols=999, # 非比例模式下每通道分液=对应 asp_vol
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
aspirates = [payload for name, payload in lh.calls if name == "aspirate"]
|
||||
dispenses = [payload for name, payload in lh.calls if name == "dispense"]
|
||||
assert aspirates[0]["resources"] == sources
|
||||
assert aspirates[0]["vols"] == [float(v) for v in asp_vols]
|
||||
assert dispenses[0]["resources"] == [target] * 8
|
||||
assert dispenses[0]["vols"] == [float(v) for v in asp_vols]
|
||||
|
||||
|
||||
def test_transfer_liquid_mode_detection_unsupported_shape_raises():
|
||||
lh = FakeLiquidHandler(channel_num=8)
|
||||
lh.current_tip = iter(make_tip_iter(64))
|
||||
|
||||
sources = [DummyContainer("S0"), DummyContainer("S1")]
|
||||
targets = [DummyContainer("T0"), DummyContainer("T1"), DummyContainer("T2")]
|
||||
|
||||
with pytest.raises(ValueError, match="Unsupported transfer mode"):
|
||||
run(
|
||||
lh.transfer_liquid(
|
||||
sources=sources,
|
||||
targets=targets,
|
||||
tip_racks=[],
|
||||
use_channels=[0],
|
||||
asp_vols=[1, 1],
|
||||
dis_vols=[1, 1, 1],
|
||||
mix_times=0,
|
||||
)
|
||||
)
|
||||
|
||||
137
tests/devices/test_agv_transport_station.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""
|
||||
AGVTransportStation driver 测试
|
||||
|
||||
覆盖:初始化、carrier property、slot 查询、路由查询、capacity 计算。
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from unilabos.devices.transport.agv_workstation import AGVTransportStation
|
||||
from unilabos.resources.warehouse import WareHouse, warehouse_factory
|
||||
|
||||
|
||||
class TestAGVTransportStation:
|
||||
def _make_driver(self, route_table=None, device_roles=None):
|
||||
"""创建一个 AGVTransportStation 实例"""
|
||||
return AGVTransportStation(
|
||||
deck=None,
|
||||
route_table=route_table or {
|
||||
"A->B": {"nav_command": '{"target":"LM1"}', "arm_pick": "pick.urp", "arm_place": "place.urp"}
|
||||
},
|
||||
device_roles=device_roles or {"navigator": "agv_nav", "arm": "agv_arm"},
|
||||
)
|
||||
|
||||
def _make_warehouse(self, name="agv_platform", nx=2, ny=1, nz=1):
|
||||
"""创建一个测试用 Warehouse"""
|
||||
return warehouse_factory(name=name, num_items_x=nx, num_items_y=ny, num_items_z=nz)
|
||||
|
||||
def test_init_deck_none(self):
|
||||
"""AGVTransportStation 初始化时 deck=None"""
|
||||
driver = self._make_driver()
|
||||
assert driver.deck is None
|
||||
|
||||
def test_init_route_table(self):
|
||||
"""路由表正确存储"""
|
||||
driver = self._make_driver()
|
||||
assert "A->B" in driver.route_table
|
||||
|
||||
def test_init_device_roles(self):
|
||||
"""设备角色正确存储"""
|
||||
driver = self._make_driver()
|
||||
assert driver.device_roles["navigator"] == "agv_nav"
|
||||
assert driver.device_roles["arm"] == "agv_arm"
|
||||
|
||||
def test_carrier_without_ros_node(self):
|
||||
"""未 post_init 时 carrier 返回 None"""
|
||||
driver = self._make_driver()
|
||||
assert driver.carrier is None
|
||||
|
||||
def test_carrier_with_warehouse(self):
|
||||
"""post_init 后 carrier 返回正确的 WareHouse"""
|
||||
driver = self._make_driver()
|
||||
wh = self._make_warehouse()
|
||||
|
||||
# 模拟 ros_node 和 resource_tracker
|
||||
mock_ros_node = MagicMock()
|
||||
mock_ros_node.resource_tracker.resources = [wh]
|
||||
mock_ros_node.device_id = "AGV"
|
||||
driver.post_init(mock_ros_node)
|
||||
|
||||
assert driver.carrier is wh
|
||||
assert isinstance(driver.carrier, WareHouse)
|
||||
|
||||
def test_capacity(self):
|
||||
"""容量计算正确"""
|
||||
driver = self._make_driver()
|
||||
wh = self._make_warehouse(nx=2, ny=1, nz=1)
|
||||
mock_ros_node = MagicMock()
|
||||
mock_ros_node.resource_tracker.resources = [wh]
|
||||
mock_ros_node.device_id = "AGV"
|
||||
driver.post_init(mock_ros_node)
|
||||
|
||||
assert driver.capacity == 2
|
||||
|
||||
def test_capacity_multi_layer(self):
|
||||
"""多层 Warehouse 容量"""
|
||||
driver = self._make_driver()
|
||||
wh = self._make_warehouse(nx=1, ny=2, nz=3)
|
||||
mock_ros_node = MagicMock()
|
||||
mock_ros_node.resource_tracker.resources = [wh]
|
||||
mock_ros_node.device_id = "AGV"
|
||||
driver.post_init(mock_ros_node)
|
||||
|
||||
assert driver.capacity == 6
|
||||
|
||||
def test_capacity_no_carrier(self):
|
||||
"""无 carrier 时容量为 0"""
|
||||
driver = self._make_driver()
|
||||
assert driver.capacity == 0
|
||||
|
||||
def test_free_slots(self):
|
||||
"""空载时所有 slot 为空闲"""
|
||||
driver = self._make_driver()
|
||||
wh = self._make_warehouse(nx=2, ny=1, nz=1)
|
||||
mock_ros_node = MagicMock()
|
||||
mock_ros_node.resource_tracker.resources = [wh]
|
||||
mock_ros_node.device_id = "AGV"
|
||||
driver.post_init(mock_ros_node)
|
||||
|
||||
free = driver.free_slots
|
||||
assert len(free) == 2
|
||||
|
||||
def test_occupied_slots_empty(self):
|
||||
"""空载时 occupied_slots 为空"""
|
||||
driver = self._make_driver()
|
||||
wh = self._make_warehouse(nx=2, ny=1, nz=1)
|
||||
mock_ros_node = MagicMock()
|
||||
mock_ros_node.resource_tracker.resources = [wh]
|
||||
mock_ros_node.device_id = "AGV"
|
||||
driver.post_init(mock_ros_node)
|
||||
|
||||
assert len(driver.occupied_slots) == 0
|
||||
|
||||
def test_resolve_route(self):
|
||||
"""路由查询返回正确的指令"""
|
||||
driver = self._make_driver()
|
||||
route = driver.resolve_route("A", "B")
|
||||
assert route["nav_command"] == '{"target":"LM1"}'
|
||||
assert route["arm_pick"] == "pick.urp"
|
||||
|
||||
def test_resolve_route_not_found(self):
|
||||
"""查询不存在的路线时抛出 KeyError"""
|
||||
driver = self._make_driver()
|
||||
with pytest.raises(KeyError, match="路由表"):
|
||||
driver.resolve_route("X", "Y")
|
||||
|
||||
def test_get_device_id(self):
|
||||
"""获取子设备 ID"""
|
||||
driver = self._make_driver()
|
||||
assert driver.get_device_id("navigator") == "agv_nav"
|
||||
assert driver.get_device_id("arm") == "agv_arm"
|
||||
|
||||
def test_get_device_id_not_found(self):
|
||||
"""获取不存在的角色时抛出 KeyError"""
|
||||
driver = self._make_driver()
|
||||
with pytest.raises(KeyError, match="未配置设备角色"):
|
||||
driver.get_device_id("gripper")
|
||||
@@ -2,9 +2,8 @@ import pytest
|
||||
import json
|
||||
import os
|
||||
|
||||
from pylabrobot.resources import Resource as ResourcePLR
|
||||
from unilabos.resources.graphio import resource_bioyond_to_plr
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||
from unilabos.registry.registry import lab_registry
|
||||
|
||||
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
|
||||
@@ -11,10 +11,10 @@ import os
|
||||
# 添加项目根目录到路径
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
|
||||
|
||||
# 导入测试模块
|
||||
from test.ros.msgs.test_basic import TestBasicFunctionality
|
||||
from test.ros.msgs.test_conversion import TestBasicConversion, TestMappingConversion
|
||||
from test.ros.msgs.test_mapping import TestTypeMapping, TestFieldMapping
|
||||
# 导入测试模块(统一从 tests 包获取)
|
||||
from tests.ros.msgs.test_basic import TestBasicFunctionality
|
||||
from tests.ros.msgs.test_conversion import TestBasicConversion, TestMappingConversion
|
||||
from tests.ros.msgs.test_mapping import TestTypeMapping, TestFieldMapping
|
||||
|
||||
|
||||
def run_tests():
|
||||
|
Before Width: | Height: | Size: 148 KiB After Width: | Height: | Size: 148 KiB |
|
Before Width: | Height: | Size: 140 KiB After Width: | Height: | Size: 140 KiB |
|
Before Width: | Height: | Size: 117 KiB After Width: | Height: | Size: 117 KiB |
213
tests/workflow/test.json
Normal file
@@ -0,0 +1,213 @@
|
||||
{
|
||||
"workflow": [
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines",
|
||||
"targets": "Liquid_1",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines",
|
||||
"targets": "Liquid_2",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines",
|
||||
"targets": "Liquid_3",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines_2",
|
||||
"targets": "Liquid_4",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines_2",
|
||||
"targets": "Liquid_5",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines_2",
|
||||
"targets": "Liquid_6",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines_3",
|
||||
"targets": "dest_set",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines_3",
|
||||
"targets": "dest_set_2",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "transfer_liquid",
|
||||
"action_args": {
|
||||
"sources": "cell_lines_3",
|
||||
"targets": "dest_set_3",
|
||||
"asp_vol": 100.0,
|
||||
"dis_vol": 74.75,
|
||||
"asp_flow_rate": 94.0,
|
||||
"dis_flow_rate": 95.5
|
||||
}
|
||||
}
|
||||
],
|
||||
"reagent": {
|
||||
"Liquid_1": {
|
||||
"slot": 1,
|
||||
"well": [
|
||||
"A4",
|
||||
"A7",
|
||||
"A10"
|
||||
],
|
||||
"labware": "rep 1"
|
||||
},
|
||||
"Liquid_4": {
|
||||
"slot": 1,
|
||||
"well": [
|
||||
"A4",
|
||||
"A7",
|
||||
"A10"
|
||||
],
|
||||
"labware": "rep 1"
|
||||
},
|
||||
"dest_set": {
|
||||
"slot": 1,
|
||||
"well": [
|
||||
"A4",
|
||||
"A7",
|
||||
"A10"
|
||||
],
|
||||
"labware": "rep 1"
|
||||
},
|
||||
"Liquid_2": {
|
||||
"slot": 2,
|
||||
"well": [
|
||||
"A3",
|
||||
"A5",
|
||||
"A8"
|
||||
],
|
||||
"labware": "rep 2"
|
||||
},
|
||||
"Liquid_5": {
|
||||
"slot": 2,
|
||||
"well": [
|
||||
"A3",
|
||||
"A5",
|
||||
"A8"
|
||||
],
|
||||
"labware": "rep 2"
|
||||
},
|
||||
"dest_set_2": {
|
||||
"slot": 2,
|
||||
"well": [
|
||||
"A3",
|
||||
"A5",
|
||||
"A8"
|
||||
],
|
||||
"labware": "rep 2"
|
||||
},
|
||||
"Liquid_3": {
|
||||
"slot": 3,
|
||||
"well": [
|
||||
"A4",
|
||||
"A6",
|
||||
"A10"
|
||||
],
|
||||
"labware": "rep 3"
|
||||
},
|
||||
"Liquid_6": {
|
||||
"slot": 3,
|
||||
"well": [
|
||||
"A4",
|
||||
"A6",
|
||||
"A10"
|
||||
],
|
||||
"labware": "rep 3"
|
||||
},
|
||||
"dest_set_3": {
|
||||
"slot": 3,
|
||||
"well": [
|
||||
"A4",
|
||||
"A6",
|
||||
"A10"
|
||||
],
|
||||
"labware": "rep 3"
|
||||
},
|
||||
"cell_lines": {
|
||||
"slot": 4,
|
||||
"well": [
|
||||
"A1",
|
||||
"A3",
|
||||
"A5"
|
||||
],
|
||||
"labware": "DRUG + YOYO-MEDIA"
|
||||
},
|
||||
"cell_lines_2": {
|
||||
"slot": 4,
|
||||
"well": [
|
||||
"A1",
|
||||
"A3",
|
||||
"A5"
|
||||
],
|
||||
"labware": "DRUG + YOYO-MEDIA"
|
||||
},
|
||||
"cell_lines_3": {
|
||||
"slot": 4,
|
||||
"well": [
|
||||
"A1",
|
||||
"A3",
|
||||
"A5"
|
||||
],
|
||||
"labware": "DRUG + YOYO-MEDIA"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
__version__ = "0.10.12"
|
||||
__version__ = "0.10.19"
|
||||
|
||||
6
unilabos/__main__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Entry point for `python -m unilabos`."""
|
||||
|
||||
from unilabos.app.main import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,6 +1,6 @@
|
||||
import threading
|
||||
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||
from unilabos.utils import logger
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, Any, List
|
||||
|
||||
import networkx as nx
|
||||
import yaml
|
||||
|
||||
@@ -17,9 +18,92 @@ unilabos_dir = os.path.dirname(os.path.dirname(current_dir))
|
||||
if unilabos_dir not in sys.path:
|
||||
sys.path.append(unilabos_dir)
|
||||
|
||||
from unilabos.app.utils import cleanup_for_restart
|
||||
from unilabos.utils.banner_print import print_status, print_unilab_banner
|
||||
from unilabos.config.config import load_config, BasicConfig, HTTPConfig
|
||||
|
||||
# Global restart flags (used by ws_client and web/server)
|
||||
_restart_requested: bool = False
|
||||
_restart_reason: str = ""
|
||||
|
||||
RESTART_EXIT_CODE = 42
|
||||
|
||||
|
||||
def _build_child_argv():
|
||||
"""Build sys.argv for child process, stripping supervisor-only arguments."""
|
||||
result = []
|
||||
skip_next = False
|
||||
for arg in sys.argv:
|
||||
if skip_next:
|
||||
skip_next = False
|
||||
continue
|
||||
if arg in ("--restart_mode", "--restart-mode"):
|
||||
continue
|
||||
if arg in ("--auto_restart_count", "--auto-restart-count"):
|
||||
skip_next = True
|
||||
continue
|
||||
if arg.startswith("--auto_restart_count=") or arg.startswith("--auto-restart-count="):
|
||||
continue
|
||||
result.append(arg)
|
||||
return result
|
||||
|
||||
|
||||
def _run_as_supervisor(max_restarts: int):
|
||||
"""
|
||||
Supervisor process that spawns and monitors child processes.
|
||||
|
||||
Similar to Uvicorn's --reload: the supervisor itself does no heavy work,
|
||||
it only launches the real process as a child and restarts it when the child
|
||||
exits with RESTART_EXIT_CODE.
|
||||
"""
|
||||
child_argv = [sys.executable] + _build_child_argv()
|
||||
restart_count = 0
|
||||
|
||||
print_status(
|
||||
f"[Supervisor] Restart mode enabled (max restarts: {max_restarts}), "
|
||||
f"child command: {' '.join(child_argv)}",
|
||||
"info",
|
||||
)
|
||||
|
||||
while True:
|
||||
print_status(
|
||||
f"[Supervisor] Launching process (restart {restart_count}/{max_restarts})...",
|
||||
"info",
|
||||
)
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(child_argv)
|
||||
exit_code = process.wait()
|
||||
except KeyboardInterrupt:
|
||||
print_status("[Supervisor] Interrupted, terminating child process...", "info")
|
||||
process.terminate()
|
||||
try:
|
||||
process.wait(timeout=10)
|
||||
except subprocess.TimeoutExpired:
|
||||
process.kill()
|
||||
process.wait()
|
||||
sys.exit(1)
|
||||
|
||||
if exit_code == RESTART_EXIT_CODE:
|
||||
restart_count += 1
|
||||
if restart_count > max_restarts:
|
||||
print_status(
|
||||
f"[Supervisor] Maximum restart count ({max_restarts}) reached, exiting",
|
||||
"warning",
|
||||
)
|
||||
sys.exit(1)
|
||||
print_status(
|
||||
f"[Supervisor] Child requested restart ({restart_count}/{max_restarts}), restarting in 2s...",
|
||||
"info",
|
||||
)
|
||||
time.sleep(2)
|
||||
else:
|
||||
if exit_code != 0:
|
||||
print_status(f"[Supervisor] Child exited with code {exit_code}", "warning")
|
||||
else:
|
||||
print_status("[Supervisor] Child exited normally", "info")
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
def load_config_from_file(config_path):
|
||||
if config_path is None:
|
||||
@@ -61,6 +145,13 @@ def parse_args():
|
||||
action="append",
|
||||
help="Path to the registry directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--devices",
|
||||
type=str,
|
||||
default=None,
|
||||
action="append",
|
||||
help="Path to Python code directory for AST-based device/resource scanning",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--working_dir",
|
||||
type=str,
|
||||
@@ -150,11 +241,52 @@ def parse_args():
|
||||
action="store_true",
|
||||
help="Skip environment dependency check on startup",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check_mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Run in check mode for CI: validates registry imports and ensures no file changes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--complete_registry",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Complete registry information",
|
||||
help="Complete and rewrite YAML registry files using AST analysis results",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no_update_feedback",
|
||||
action="store_true",
|
||||
help="Disable sending update feedback to server",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--test_mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Test mode: all actions simulate execution and return mock results without running real hardware",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--external_devices_only",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Only load external device packages (--devices), skip built-in unilabos/devices/ scanning and YAML device registry",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra_resource",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Load extra lab_ prefixed labware resources (529 auto-generated definitions from lab_resources.py)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--restart_mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Enable supervisor mode: automatically restart the process when triggered via WebSocket",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--auto_restart_count",
|
||||
type=int,
|
||||
default=500,
|
||||
help="Maximum number of automatic restarts in restart mode (default: 500)",
|
||||
)
|
||||
# workflow upload subcommand
|
||||
workflow_parser = subparsers.add_parser(
|
||||
@@ -189,6 +321,12 @@ def parse_args():
|
||||
default=False,
|
||||
help="Whether to publish the workflow (default: False)",
|
||||
)
|
||||
workflow_parser.add_argument(
|
||||
"--description",
|
||||
type=str,
|
||||
default="",
|
||||
help="Workflow description, used when publishing the workflow",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
@@ -200,61 +338,102 @@ def main():
|
||||
args = parser.parse_args()
|
||||
args_dict = vars(args)
|
||||
|
||||
# Supervisor mode: spawn child processes and monitor for restart
|
||||
if args_dict.get("restart_mode", False):
|
||||
_run_as_supervisor(args_dict.get("auto_restart_count", 5))
|
||||
return
|
||||
|
||||
# 环境检查 - 检查并自动安装必需的包 (可选)
|
||||
if not args_dict.get("skip_env_check", False):
|
||||
from unilabos.utils.environment_check import check_environment
|
||||
skip_env_check = args_dict.get("skip_env_check", False)
|
||||
check_mode = args_dict.get("check_mode", False)
|
||||
|
||||
if not skip_env_check:
|
||||
from unilabos.utils.environment_check import check_environment, check_device_package_requirements
|
||||
|
||||
if not check_environment(auto_install=True):
|
||||
print_status("环境检查失败,程序退出", "error")
|
||||
os._exit(1)
|
||||
|
||||
# 第一次设备包依赖检查:build_registry 之前,确保 import map 可用
|
||||
devices_dirs_for_req = args_dict.get("devices", None)
|
||||
if devices_dirs_for_req:
|
||||
if not check_device_package_requirements(devices_dirs_for_req):
|
||||
print_status("设备包依赖检查失败,程序退出", "error")
|
||||
os._exit(1)
|
||||
else:
|
||||
print_status("跳过环境依赖检查", "warning")
|
||||
|
||||
# 加载配置文件,优先加载config,然后从env读取
|
||||
config_path = args_dict.get("config")
|
||||
if os.getcwd().endswith("unilabos_data"):
|
||||
working_dir = os.path.abspath(os.getcwd())
|
||||
else:
|
||||
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
||||
|
||||
if args_dict.get("working_dir"):
|
||||
working_dir = args_dict.get("working_dir", "")
|
||||
if config_path and not os.path.exists(config_path):
|
||||
config_path = os.path.join(working_dir, "local_config.py")
|
||||
if not os.path.exists(config_path):
|
||||
print_status(
|
||||
f"当前工作目录 {working_dir} 未找到local_config.py,请通过 --config 传入 local_config.py 文件路径",
|
||||
"error",
|
||||
)
|
||||
os._exit(1)
|
||||
# === 解析 working_dir ===
|
||||
# 规则1: working_dir 传入 → 检测 unilabos_data 子目录,已是则不修改
|
||||
# 规则2: 仅 config_path 传入 → 用其父目录作为 working_dir
|
||||
# 规则4: 两者都传入 → 各用各的,但 working_dir 仍做 unilabos_data 子目录检测
|
||||
raw_working_dir = args_dict.get("working_dir")
|
||||
if raw_working_dir:
|
||||
working_dir = os.path.abspath(raw_working_dir)
|
||||
elif config_path and os.path.exists(config_path):
|
||||
working_dir = os.path.dirname(config_path)
|
||||
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
|
||||
config_path = os.path.join(working_dir, "local_config.py")
|
||||
elif not config_path and (
|
||||
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
|
||||
):
|
||||
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||
print_status(f"您是否为第一次使用?并将当前路径 {working_dir} 作为工作目录? (Y/n)", "info")
|
||||
if input() != "n":
|
||||
os.makedirs(working_dir, exist_ok=True)
|
||||
config_path = os.path.join(working_dir, "local_config.py")
|
||||
shutil.copy(
|
||||
os.path.join(os.path.dirname(os.path.dirname(__file__)), "config", "example_config.py"), config_path
|
||||
)
|
||||
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
||||
working_dir = os.path.dirname(os.path.abspath(config_path))
|
||||
else:
|
||||
working_dir = os.path.abspath(os.getcwd())
|
||||
|
||||
# unilabos_data 子目录自动检测
|
||||
if os.path.basename(working_dir) != "unilabos_data":
|
||||
unilabos_data_sub = os.path.join(working_dir, "unilabos_data")
|
||||
if os.path.isdir(unilabos_data_sub):
|
||||
working_dir = unilabos_data_sub
|
||||
elif not raw_working_dir and not (config_path and os.path.exists(config_path)):
|
||||
# 未显式指定路径,默认使用 cwd/unilabos_data
|
||||
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
|
||||
|
||||
# === 解析 config_path ===
|
||||
if config_path and not os.path.exists(config_path):
|
||||
# config_path 传入但不存在,尝试在 working_dir 中查找
|
||||
candidate = os.path.join(working_dir, "local_config.py")
|
||||
if os.path.exists(candidate):
|
||||
config_path = candidate
|
||||
print_status(f"在工作目录中发现配置文件: {config_path}", "info")
|
||||
else:
|
||||
print_status(
|
||||
f"配置文件 {config_path} 不存在,工作目录 {working_dir} 中也未找到 local_config.py,"
|
||||
f"请通过 --config 传入 local_config.py 文件路径",
|
||||
"error",
|
||||
)
|
||||
os._exit(1)
|
||||
# 加载配置文件
|
||||
elif not config_path:
|
||||
# 规则3: 未传入 config_path,尝试 working_dir/local_config.py
|
||||
candidate = os.path.join(working_dir, "local_config.py")
|
||||
if os.path.exists(candidate):
|
||||
config_path = candidate
|
||||
print_status(f"发现本地配置文件: {config_path}", "info")
|
||||
else:
|
||||
print_status(f"未指定config路径,可通过 --config 传入 local_config.py 文件路径", "info")
|
||||
print_status(f"您是否为第一次使用?并将当前路径 {working_dir} 作为工作目录? (Y/n)", "info")
|
||||
if check_mode or input() != "n":
|
||||
os.makedirs(working_dir, exist_ok=True)
|
||||
config_path = os.path.join(working_dir, "local_config.py")
|
||||
shutil.copy(
|
||||
os.path.join(os.path.dirname(os.path.dirname(__file__)), "config", "example_config.py"),
|
||||
config_path,
|
||||
)
|
||||
print_status(f"已创建 local_config.py 路径: {config_path}", "info")
|
||||
else:
|
||||
os._exit(1)
|
||||
|
||||
# 加载配置文件 (check_mode 跳过)
|
||||
print_status(f"当前工作目录为 {working_dir}", "info")
|
||||
load_config_from_file(config_path)
|
||||
if not check_mode:
|
||||
load_config_from_file(config_path)
|
||||
|
||||
# 根据配置重新设置日志级别
|
||||
from unilabos.utils.log import configure_logger, logger
|
||||
|
||||
if hasattr(BasicConfig, "log_level"):
|
||||
logger.info(f"Log level set to '{BasicConfig.log_level}' from config file.")
|
||||
configure_logger(loglevel=BasicConfig.log_level, working_dir=working_dir)
|
||||
file_path = configure_logger(loglevel=BasicConfig.log_level, working_dir=working_dir)
|
||||
if file_path is not None:
|
||||
logger.info(f"[LOG_FILE] {file_path}")
|
||||
|
||||
if args.addr != parser.get_default("addr"):
|
||||
if args.addr == "test":
|
||||
@@ -297,41 +476,67 @@ def main():
|
||||
BasicConfig.is_host_mode = not args_dict.get("is_slave", False)
|
||||
BasicConfig.slave_no_host = args_dict.get("slave_no_host", False)
|
||||
BasicConfig.upload_registry = args_dict.get("upload_registry", False)
|
||||
BasicConfig.no_update_feedback = args_dict.get("no_update_feedback", False)
|
||||
BasicConfig.test_mode = args_dict.get("test_mode", False)
|
||||
if BasicConfig.test_mode:
|
||||
print_status("启用测试模式:所有动作将模拟执行,不调用真实硬件", "warning")
|
||||
BasicConfig.extra_resource = args_dict.get("extra_resource", False)
|
||||
if BasicConfig.extra_resource:
|
||||
print_status("启用额外资源加载:将加载lab_开头的labware资源定义", "info")
|
||||
BasicConfig.communication_protocol = "websocket"
|
||||
machine_name = os.popen("hostname").read().strip()
|
||||
machine_name = platform.node()
|
||||
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
|
||||
BasicConfig.machine_name = machine_name
|
||||
BasicConfig.vis_2d_enable = args_dict["2d_vis"]
|
||||
BasicConfig.check_mode = check_mode
|
||||
|
||||
from unilabos.resources.graphio import (
|
||||
read_node_link_json,
|
||||
read_graphml,
|
||||
dict_from_graph,
|
||||
)
|
||||
from unilabos.app.communication import get_communication_client
|
||||
from unilabos.registry.registry import build_registry
|
||||
from unilabos.app.backend import start_backend
|
||||
from unilabos.app.web import http_client
|
||||
from unilabos.app.web import start_server
|
||||
from unilabos.app.register import register_devices_and_resources
|
||||
from unilabos.resources.graphio import modify_to_backend_format
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet, ResourceDict
|
||||
|
||||
# 显示启动横幅
|
||||
print_unilab_banner(args_dict)
|
||||
|
||||
# 注册表
|
||||
# Step 0: AST 分析优先 + YAML 注册表加载
|
||||
# check_mode 和 upload_registry 都会执行实际 import 验证
|
||||
devices_dirs = args_dict.get("devices", None)
|
||||
complete_registry = args_dict.get("complete_registry", False) or check_mode
|
||||
external_only = args_dict.get("external_devices_only", False)
|
||||
lab_registry = build_registry(
|
||||
args_dict["registry_path"], args_dict.get("complete_registry", False), BasicConfig.upload_registry
|
||||
registry_paths=args_dict["registry_path"],
|
||||
devices_dirs=devices_dirs,
|
||||
upload_registry=BasicConfig.upload_registry,
|
||||
check_mode=check_mode,
|
||||
complete_registry=complete_registry,
|
||||
external_only=external_only,
|
||||
)
|
||||
|
||||
# Check mode: 注册表验证完成后直接退出
|
||||
if check_mode:
|
||||
device_count = len(lab_registry.device_type_registry)
|
||||
resource_count = len(lab_registry.resource_type_registry)
|
||||
print_status(f"Check mode: 注册表验证完成 ({device_count} 设备, {resource_count} 资源),退出", "info")
|
||||
os._exit(0)
|
||||
|
||||
# 以下导入依赖 ROS2 环境,check_mode 已退出不需要
|
||||
from unilabos.resources.graphio import (
|
||||
read_node_link_json,
|
||||
read_graphml,
|
||||
dict_from_graph,
|
||||
modify_to_backend_format,
|
||||
)
|
||||
from unilabos.app.communication import get_communication_client
|
||||
from unilabos.app.backend import start_backend
|
||||
from unilabos.app.web import http_client
|
||||
from unilabos.app.web import start_server
|
||||
from unilabos.app.register import register_devices_and_resources
|
||||
from unilabos.resources.resource_tracker import ResourceTreeSet, ResourceDict
|
||||
|
||||
# Step 1: 上传全部注册表到服务端,同步保存到 unilabos_data
|
||||
if BasicConfig.upload_registry:
|
||||
# 设备注册到服务端 - 需要 ak 和 sk
|
||||
if BasicConfig.ak and BasicConfig.sk:
|
||||
print_status("开始注册设备到服务端...", "info")
|
||||
# print_status("开始注册设备到服务端...", "info")
|
||||
try:
|
||||
register_devices_and_resources(lab_registry)
|
||||
print_status("设备注册完成", "info")
|
||||
# print_status("设备注册完成", "info")
|
||||
except Exception as e:
|
||||
print_status(f"设备注册失败: {e}", "error")
|
||||
else:
|
||||
@@ -348,8 +553,13 @@ def main():
|
||||
os._exit(0)
|
||||
|
||||
if not BasicConfig.ak or not BasicConfig.sk:
|
||||
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
||||
os._exit(1)
|
||||
if BasicConfig.test_mode:
|
||||
print_status("测试模式:跳过 ak/sk 检查,使用占位凭据", "warning")
|
||||
BasicConfig.ak = BasicConfig.ak or "test_ak"
|
||||
BasicConfig.sk = BasicConfig.sk or "test_sk"
|
||||
else:
|
||||
print_status("后续运行必须拥有一个实验室,请前往 https://uni-lab.bohrium.com 注册实验室!", "warning")
|
||||
os._exit(1)
|
||||
graph: nx.Graph
|
||||
resource_tree_set: ResourceTreeSet
|
||||
resource_links: List[Dict[str, Any]]
|
||||
@@ -416,12 +626,16 @@ def main():
|
||||
continue
|
||||
|
||||
# 如果从远端获取了物料信息,则与本地物料进行同步
|
||||
if request_startup_json and "nodes" in request_startup_json:
|
||||
if file_path is not None and request_startup_json and "nodes" in request_startup_json:
|
||||
print_status("开始同步远端物料到本地...", "info")
|
||||
remote_tree_set = ResourceTreeSet.from_raw_list(request_startup_json["nodes"])
|
||||
remote_tree_set = ResourceTreeSet.from_raw_dict_list(request_startup_json["nodes"])
|
||||
resource_tree_set.merge_remote_resources(remote_tree_set)
|
||||
print_status("远端物料同步完成", "info")
|
||||
|
||||
# 第二次设备包依赖检查:云端物料同步后,community 包可能引入新的 requirements
|
||||
# TODO: 当 community device package 功能上线后,在这里调用
|
||||
# install_requirements_txt(community_pkg_path / "requirements.txt", label="community.xxx")
|
||||
|
||||
# 使用 ResourceTreeSet 代替 list
|
||||
args_dict["resources_config"] = resource_tree_set
|
||||
args_dict["devices_config"] = resource_tree_set
|
||||
@@ -497,16 +711,26 @@ def main():
|
||||
time.sleep(1)
|
||||
else:
|
||||
start_backend(**args_dict)
|
||||
start_server(
|
||||
restart_requested = start_server(
|
||||
open_browser=not args_dict["disable_browser"],
|
||||
port=BasicConfig.port,
|
||||
)
|
||||
if restart_requested:
|
||||
print_status("[Main] Restart requested, cleaning up...", "info")
|
||||
cleanup_for_restart()
|
||||
return
|
||||
else:
|
||||
start_backend(**args_dict)
|
||||
start_server(
|
||||
|
||||
# 启动服务器(默认支持WebSocket触发重启)
|
||||
restart_requested = start_server(
|
||||
open_browser=not args_dict["disable_browser"],
|
||||
port=BasicConfig.port,
|
||||
)
|
||||
if restart_requested:
|
||||
print_status("[Main] Restart requested, cleaning up...", "info")
|
||||
cleanup_for_restart()
|
||||
os._exit(RESTART_EXIT_CODE)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -54,6 +54,7 @@ class JobAddReq(BaseModel):
|
||||
action_type: str = Field(
|
||||
examples=["unilabos_msgs.action._str_single_input.StrSingleInput"], description="action type", default=""
|
||||
)
|
||||
sample_material: dict = Field(examples=[{"string": "string"}], description="sample uuid to material uuid")
|
||||
action_args: dict = Field(examples=[{"string": "string"}], description="action arguments", default_factory=dict)
|
||||
task_id: str = Field(examples=["task_id"], description="task uuid (auto-generated if empty)", default="")
|
||||
job_id: str = Field(examples=["job_id"], description="goal uuid (auto-generated if empty)", default="")
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import json
|
||||
import time
|
||||
from typing import Optional, Tuple, Dict, Any
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
|
||||
from unilabos.utils.log import logger
|
||||
from unilabos.utils.type_check import TypeEncoder
|
||||
from unilabos.utils.tools import normalize_json as _normalize_device
|
||||
|
||||
|
||||
def register_devices_and_resources(lab_registry, gather_only=False) -> Optional[Tuple[Dict[str, Any], Dict[str, Any]]]:
|
||||
@@ -11,50 +10,63 @@ def register_devices_and_resources(lab_registry, gather_only=False) -> Optional[
|
||||
注册设备和资源到服务器(仅支持HTTP)
|
||||
"""
|
||||
|
||||
# 注册资源信息 - 使用HTTP方式
|
||||
from unilabos.app.web.client import http_client
|
||||
|
||||
logger.info("[UniLab Register] 开始注册设备和资源...")
|
||||
|
||||
# 注册设备信息
|
||||
devices_to_register = {}
|
||||
for device_info in lab_registry.obtain_registry_device_info():
|
||||
devices_to_register[device_info["id"]] = json.loads(
|
||||
json.dumps(device_info, ensure_ascii=False, cls=TypeEncoder)
|
||||
)
|
||||
logger.debug(f"[UniLab Register] 收集设备: {device_info['id']}")
|
||||
devices_to_register[device_info["id"]] = _normalize_device(device_info)
|
||||
logger.trace(f"[UniLab Register] 收集设备: {device_info['id']}")
|
||||
|
||||
resources_to_register = {}
|
||||
for resource_info in lab_registry.obtain_registry_resource_info():
|
||||
resources_to_register[resource_info["id"]] = resource_info
|
||||
logger.debug(f"[UniLab Register] 收集资源: {resource_info['id']}")
|
||||
logger.trace(f"[UniLab Register] 收集资源: {resource_info['id']}")
|
||||
|
||||
if gather_only:
|
||||
return devices_to_register, resources_to_register
|
||||
# 注册设备
|
||||
|
||||
if devices_to_register:
|
||||
try:
|
||||
start_time = time.time()
|
||||
response = http_client.resource_registry({"resources": list(devices_to_register.values())})
|
||||
response = http_client.resource_registry(
|
||||
{"resources": list(devices_to_register.values())},
|
||||
tag="device_registry",
|
||||
)
|
||||
cost_time = time.time() - start_time
|
||||
if response.status_code in [200, 201]:
|
||||
logger.info(f"[UniLab Register] 成功注册 {len(devices_to_register)} 个设备 {cost_time}ms")
|
||||
res_data = response.json() if response.status_code == 200 else {}
|
||||
skipped = res_data.get("data", {}).get("skipped", False)
|
||||
if skipped:
|
||||
logger.info(
|
||||
f"[UniLab Register] 设备注册跳过(内容未变化)"
|
||||
f" {len(devices_to_register)} 个 {cost_time:.3f}s"
|
||||
)
|
||||
elif response.status_code in [200, 201]:
|
||||
logger.info(f"[UniLab Register] 成功注册 {len(devices_to_register)} 个设备 {cost_time:.3f}s")
|
||||
else:
|
||||
logger.error(f"[UniLab Register] 设备注册失败: {response.status_code}, {response.text} {cost_time}ms")
|
||||
logger.error(f"[UniLab Register] 设备注册失败: {response.status_code}, {response.text} {cost_time:.3f}s")
|
||||
except Exception as e:
|
||||
logger.error(f"[UniLab Register] 设备注册异常: {e}")
|
||||
|
||||
# 注册资源
|
||||
if resources_to_register:
|
||||
try:
|
||||
start_time = time.time()
|
||||
response = http_client.resource_registry({"resources": list(resources_to_register.values())})
|
||||
response = http_client.resource_registry(
|
||||
{"resources": list(resources_to_register.values())},
|
||||
tag="resource_registry",
|
||||
)
|
||||
cost_time = time.time() - start_time
|
||||
if response.status_code in [200, 201]:
|
||||
logger.info(f"[UniLab Register] 成功注册 {len(resources_to_register)} 个资源 {cost_time}ms")
|
||||
res_data = response.json() if response.status_code == 200 else {}
|
||||
skipped = res_data.get("data", {}).get("skipped", False)
|
||||
if skipped:
|
||||
logger.info(
|
||||
f"[UniLab Register] 资源注册跳过(内容未变化)"
|
||||
f" {len(resources_to_register)} 个 {cost_time:.3f}s"
|
||||
)
|
||||
elif response.status_code in [200, 201]:
|
||||
logger.info(f"[UniLab Register] 成功注册 {len(resources_to_register)} 个资源 {cost_time:.3f}s")
|
||||
else:
|
||||
logger.error(f"[UniLab Register] 资源注册失败: {response.status_code}, {response.text} {cost_time}ms")
|
||||
logger.error(f"[UniLab Register] 资源注册失败: {response.status_code}, {response.text} {cost_time:.3f}s")
|
||||
except Exception as e:
|
||||
logger.error(f"[UniLab Register] 资源注册异常: {e}")
|
||||
|
||||
logger.info("[UniLab Register] 设备和资源注册完成.")
|
||||
|
||||
176
unilabos/app/utils.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
UniLabOS 应用工具函数
|
||||
|
||||
提供清理、重启等工具函数
|
||||
"""
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
def patch_rclpy_dll_windows():
|
||||
"""在 Windows + conda 环境下为 rclpy 打 DLL 加载补丁"""
|
||||
if sys.platform != "win32" or not os.environ.get("CONDA_PREFIX"):
|
||||
return
|
||||
try:
|
||||
import rclpy
|
||||
|
||||
return
|
||||
except ImportError as e:
|
||||
if not str(e).startswith("DLL load failed"):
|
||||
return
|
||||
cp = os.environ["CONDA_PREFIX"]
|
||||
impl = os.path.join(cp, "Lib", "site-packages", "rclpy", "impl", "implementation_singleton.py")
|
||||
pyd = glob.glob(os.path.join(cp, "Lib", "site-packages", "rclpy", "_rclpy_pybind11*.pyd"))
|
||||
if not os.path.exists(impl) or not pyd:
|
||||
return
|
||||
with open(impl, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
lib_bin = os.path.join(cp, "Library", "bin").replace("\\", "/")
|
||||
patch = f'# UniLabOS DLL Patch\nimport os,ctypes\nos.add_dll_directory("{lib_bin}") if hasattr(os,"add_dll_directory") else None\ntry: ctypes.CDLL("{pyd[0].replace(chr(92),"/")}")\nexcept: pass\n# End Patch\n'
|
||||
shutil.copy2(impl, impl + ".bak")
|
||||
with open(impl, "w", encoding="utf-8") as f:
|
||||
f.write(patch + content)
|
||||
|
||||
|
||||
patch_rclpy_dll_windows()
|
||||
|
||||
import gc
|
||||
import threading
|
||||
import time
|
||||
|
||||
from unilabos.utils.banner_print import print_status
|
||||
|
||||
|
||||
def cleanup_for_restart() -> bool:
|
||||
"""
|
||||
Clean up all resources for restart without exiting the process.
|
||||
|
||||
This function prepares the system for re-initialization by:
|
||||
1. Stopping all communication clients
|
||||
2. Destroying ROS nodes
|
||||
3. Resetting singletons
|
||||
4. Waiting for threads to finish
|
||||
|
||||
Returns:
|
||||
bool: True if cleanup was successful, False otherwise
|
||||
"""
|
||||
print_status("[Restart] Starting cleanup for restart...", "info")
|
||||
|
||||
# Step 1: Stop WebSocket communication client
|
||||
print_status("[Restart] Step 1: Stopping WebSocket client...", "info")
|
||||
try:
|
||||
from unilabos.app.communication import get_communication_client
|
||||
|
||||
comm_client = get_communication_client()
|
||||
if comm_client is not None:
|
||||
comm_client.stop()
|
||||
print_status("[Restart] WebSocket client stopped", "info")
|
||||
except Exception as e:
|
||||
print_status(f"[Restart] Error stopping WebSocket: {e}", "warning")
|
||||
|
||||
# Step 2: Get HostNode and cleanup ROS
|
||||
print_status("[Restart] Step 2: Cleaning up ROS nodes...", "info")
|
||||
try:
|
||||
from unilabos.ros.nodes.presets.host_node import HostNode
|
||||
import rclpy
|
||||
from rclpy.timer import Timer
|
||||
|
||||
host_instance = HostNode.get_instance(timeout=5)
|
||||
if host_instance is not None:
|
||||
print_status(f"[Restart] Found HostNode: {host_instance.device_id}", "info")
|
||||
|
||||
# Gracefully shutdown background threads
|
||||
print_status("[Restart] Shutting down background threads...", "info")
|
||||
HostNode.shutdown_background_threads(timeout=5.0)
|
||||
print_status("[Restart] Background threads shutdown complete", "info")
|
||||
|
||||
# Stop discovery timer
|
||||
if hasattr(host_instance, "_discovery_timer") and isinstance(host_instance._discovery_timer, Timer):
|
||||
host_instance._discovery_timer.cancel()
|
||||
print_status("[Restart] Discovery timer cancelled", "info")
|
||||
|
||||
# Destroy device nodes
|
||||
device_count = len(host_instance.devices_instances)
|
||||
print_status(f"[Restart] Destroying {device_count} device instances...", "info")
|
||||
for device_id, device_node in list(host_instance.devices_instances.items()):
|
||||
try:
|
||||
if hasattr(device_node, "ros_node_instance") and device_node.ros_node_instance is not None:
|
||||
device_node.ros_node_instance.destroy_node()
|
||||
print_status(f"[Restart] Device {device_id} destroyed", "info")
|
||||
except Exception as e:
|
||||
print_status(f"[Restart] Error destroying device {device_id}: {e}", "warning")
|
||||
|
||||
# Clear devices instances
|
||||
host_instance.devices_instances.clear()
|
||||
host_instance.devices_names.clear()
|
||||
|
||||
# Destroy host node
|
||||
try:
|
||||
host_instance.destroy_node()
|
||||
print_status("[Restart] HostNode destroyed", "info")
|
||||
except Exception as e:
|
||||
print_status(f"[Restart] Error destroying HostNode: {e}", "warning")
|
||||
|
||||
# Reset HostNode state
|
||||
HostNode.reset_state()
|
||||
print_status("[Restart] HostNode state reset", "info")
|
||||
|
||||
# Shutdown executor first (to stop executor.spin() gracefully)
|
||||
if hasattr(rclpy, "__executor") and rclpy.__executor is not None:
|
||||
try:
|
||||
rclpy.__executor.shutdown()
|
||||
rclpy.__executor = None # Clear for restart
|
||||
print_status("[Restart] ROS executor shutdown complete", "info")
|
||||
except Exception as e:
|
||||
print_status(f"[Restart] Error shutting down executor: {e}", "warning")
|
||||
|
||||
# Shutdown rclpy
|
||||
if rclpy.ok():
|
||||
rclpy.shutdown()
|
||||
print_status("[Restart] rclpy shutdown complete", "info")
|
||||
|
||||
except ImportError as e:
|
||||
print_status(f"[Restart] ROS modules not available: {e}", "warning")
|
||||
except Exception as e:
|
||||
print_status(f"[Restart] Error in ROS cleanup: {e}", "warning")
|
||||
return False
|
||||
|
||||
# Step 3: Reset communication client singleton
|
||||
print_status("[Restart] Step 3: Resetting singletons...", "info")
|
||||
try:
|
||||
from unilabos.app import communication
|
||||
|
||||
if hasattr(communication, "_communication_client"):
|
||||
communication._communication_client = None
|
||||
print_status("[Restart] Communication client singleton reset", "info")
|
||||
except Exception as e:
|
||||
print_status(f"[Restart] Error resetting communication singleton: {e}", "warning")
|
||||
|
||||
# Step 4: Wait for threads to finish
|
||||
print_status("[Restart] Step 4: Waiting for threads to finish...", "info")
|
||||
time.sleep(3) # Give threads time to finish
|
||||
|
||||
# Check remaining threads
|
||||
remaining_threads = []
|
||||
for t in threading.enumerate():
|
||||
if t.name != "MainThread" and t.is_alive():
|
||||
remaining_threads.append(t.name)
|
||||
|
||||
if remaining_threads:
|
||||
print_status(
|
||||
f"[Restart] Warning: {len(remaining_threads)} threads still running: {remaining_threads}", "warning"
|
||||
)
|
||||
else:
|
||||
print_status("[Restart] All threads stopped", "info")
|
||||
|
||||
# Step 5: Force garbage collection
|
||||
print_status("[Restart] Step 5: Running garbage collection...", "info")
|
||||
gc.collect()
|
||||
gc.collect() # Run twice for weak references
|
||||
print_status("[Restart] Garbage collection complete", "info")
|
||||
|
||||
print_status("[Restart] Cleanup complete. Ready for re-initialization.", "info")
|
||||
return True
|
||||
@@ -1052,7 +1052,7 @@ async def handle_file_import(websocket: WebSocket, request_data: dict):
|
||||
"result": {},
|
||||
"schema": lab_registry._generate_unilab_json_command_schema(v["args"], k),
|
||||
"goal_default": {i["name"]: i["default"] for i in v["args"]},
|
||||
"handles": [],
|
||||
"handles": {},
|
||||
}
|
||||
# 不生成已配置action的动作
|
||||
for k, v in enhanced_info["action_methods"].items()
|
||||
@@ -1340,5 +1340,5 @@ def setup_api_routes(app):
|
||||
# 启动广播任务
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
asyncio.create_task(broadcast_device_status())
|
||||
asyncio.create_task(broadcast_status_page_data())
|
||||
asyncio.create_task(broadcast_device_status(), name="web-api-startup-device")
|
||||
asyncio.create_task(broadcast_status_page_data(), name="web-api-startup-status")
|
||||
|
||||
@@ -3,15 +3,15 @@ HTTP客户端模块
|
||||
|
||||
提供与远程服务器通信的客户端功能,只有host需要用
|
||||
"""
|
||||
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from threading import Thread
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from unilabos.utils.tools import fast_dumps as _fast_dumps, fast_dumps_pretty as _fast_dumps_pretty
|
||||
|
||||
import requests
|
||||
from unilabos.ros.nodes.resource_tracker import ResourceTreeSet
|
||||
from unilabos.resources.resource_tracker import ResourceTreeSet
|
||||
from unilabos.utils.log import info
|
||||
from unilabos.config.config import HTTPConfig, BasicConfig
|
||||
from unilabos.utils import logger
|
||||
@@ -282,22 +282,54 @@ class HTTPClient:
|
||||
)
|
||||
return response
|
||||
|
||||
def resource_registry(self, registry_data: Dict[str, Any] | List[Dict[str, Any]]) -> requests.Response:
|
||||
def resource_registry(
|
||||
self, registry_data: Dict[str, Any] | List[Dict[str, Any]], tag: str = "registry",
|
||||
) -> requests.Response:
|
||||
"""
|
||||
注册资源到服务器
|
||||
注册资源到服务器,同步保存请求/响应到 unilabos_data
|
||||
|
||||
Args:
|
||||
registry_data: 注册表数据,格式为 {resource_id: resource_info} / [{resource_info}]
|
||||
tag: 保存文件的标签后缀 (如 "device_registry" / "resource_registry")
|
||||
|
||||
Returns:
|
||||
Response: API响应对象
|
||||
"""
|
||||
# 序列化一次,同时用于保存和发送
|
||||
json_bytes = _fast_dumps(registry_data)
|
||||
|
||||
# 保存请求数据到 unilabos_data
|
||||
req_path = os.path.join(BasicConfig.working_dir, f"req_{tag}_upload.json")
|
||||
try:
|
||||
os.makedirs(BasicConfig.working_dir, exist_ok=True)
|
||||
with open(req_path, "wb") as f:
|
||||
f.write(_fast_dumps_pretty(registry_data))
|
||||
logger.trace(f"注册表请求数据已保存: {req_path}")
|
||||
except Exception as e:
|
||||
logger.warning(f"保存注册表请求数据失败: {e}")
|
||||
|
||||
compressed_body = gzip.compress(json_bytes)
|
||||
headers = {
|
||||
"Authorization": f"Lab {self.auth}",
|
||||
"Content-Type": "application/json",
|
||||
"Content-Encoding": "gzip",
|
||||
}
|
||||
response = requests.post(
|
||||
f"{self.remote_addr}/lab/resource",
|
||||
json=registry_data,
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
data=compressed_body,
|
||||
headers=headers,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
# 保存响应数据到 unilabos_data
|
||||
res_path = os.path.join(BasicConfig.working_dir, f"res_{tag}_upload.json")
|
||||
try:
|
||||
with open(res_path, "w", encoding="utf-8") as f:
|
||||
f.write(f"{response.status_code}\n{response.text}")
|
||||
logger.trace(f"注册表响应数据已保存: {res_path}")
|
||||
except Exception as e:
|
||||
logger.warning(f"保存注册表响应数据失败: {e}")
|
||||
|
||||
if response.status_code not in [200, 201]:
|
||||
logger.error(f"注册资源失败: {response.status_code}, {response.text}")
|
||||
if response.status_code == 200:
|
||||
@@ -345,9 +377,10 @@ class HTTPClient:
|
||||
edges: List[Dict[str, Any]],
|
||||
tags: Optional[List[str]] = None,
|
||||
published: bool = False,
|
||||
description: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
导入工作流到服务器
|
||||
导入工作流到服务器,如果 published 为 True,则额外发起发布请求
|
||||
|
||||
Args:
|
||||
name: 工作流名称(顶层)
|
||||
@@ -357,13 +390,12 @@ class HTTPClient:
|
||||
edges: 工作流边列表
|
||||
tags: 工作流标签列表,默认为空列表
|
||||
published: 是否发布工作流,默认为False
|
||||
description: 工作流描述,发布时使用
|
||||
|
||||
Returns:
|
||||
Dict: API响应数据,包含 code 和 data (uuid, name)
|
||||
"""
|
||||
# target_lab_uuid 暂时使用默认值,后续由后端根据 ak/sk 获取
|
||||
payload = {
|
||||
"target_lab_uuid": "28c38bb0-63f6-4352-b0d8-b5b8eb1766d5",
|
||||
"name": name,
|
||||
"data": {
|
||||
"workflow_uuid": workflow_uuid,
|
||||
@@ -371,7 +403,6 @@ class HTTPClient:
|
||||
"nodes": nodes,
|
||||
"edges": edges,
|
||||
"tags": tags if tags is not None else [],
|
||||
"published": published,
|
||||
},
|
||||
}
|
||||
# 保存请求到文件
|
||||
@@ -392,11 +423,51 @@ class HTTPClient:
|
||||
res = response.json()
|
||||
if "code" in res and res["code"] != 0:
|
||||
logger.error(f"导入工作流失败: {response.text}")
|
||||
return res
|
||||
# 导入成功后,如果需要发布则额外发起发布请求
|
||||
if published:
|
||||
imported_uuid = res.get("data", {}).get("uuid", workflow_uuid)
|
||||
publish_res = self.workflow_publish(imported_uuid, description)
|
||||
res["publish_result"] = publish_res
|
||||
return res
|
||||
else:
|
||||
logger.error(f"导入工作流失败: {response.status_code}, {response.text}")
|
||||
return {"code": response.status_code, "message": response.text}
|
||||
|
||||
def workflow_publish(self, workflow_uuid: str, description: str = "") -> Dict[str, Any]:
|
||||
"""
|
||||
发布工作流
|
||||
|
||||
Args:
|
||||
workflow_uuid: 工作流UUID
|
||||
description: 工作流描述
|
||||
|
||||
Returns:
|
||||
Dict: API响应数据
|
||||
"""
|
||||
payload = {
|
||||
"uuid": workflow_uuid,
|
||||
"description": description,
|
||||
"published": True,
|
||||
}
|
||||
logger.info(f"正在发布工作流: {workflow_uuid}")
|
||||
response = requests.patch(
|
||||
f"{self.remote_addr}/lab/workflow/owner",
|
||||
json=payload,
|
||||
headers={"Authorization": f"Lab {self.auth}"},
|
||||
timeout=60,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
res = response.json()
|
||||
if "code" in res and res["code"] != 0:
|
||||
logger.error(f"发布工作流失败: {response.text}")
|
||||
else:
|
||||
logger.info(f"工作流发布成功: {workflow_uuid}")
|
||||
return res
|
||||
else:
|
||||
logger.error(f"发布工作流失败: {response.status_code}, {response.text}")
|
||||
return {"code": response.status_code, "message": response.text}
|
||||
|
||||
|
||||
# 创建默认客户端实例
|
||||
http_client = HTTPClient()
|
||||
|
||||
@@ -58,14 +58,14 @@ class JobResultStore:
|
||||
feedback=feedback or {},
|
||||
timestamp=time.time(),
|
||||
)
|
||||
logger.debug(f"[JobResultStore] Stored result for job {job_id[:8]}, status={status}")
|
||||
logger.trace(f"[JobResultStore] Stored result for job {job_id[:8]}, status={status}")
|
||||
|
||||
def get_and_remove(self, job_id: str) -> Optional[JobResult]:
|
||||
"""获取并删除任务结果"""
|
||||
with self._results_lock:
|
||||
result = self._results.pop(job_id, None)
|
||||
if result:
|
||||
logger.debug(f"[JobResultStore] Retrieved and removed result for job {job_id[:8]}")
|
||||
logger.trace(f"[JobResultStore] Retrieved and removed result for job {job_id[:8]}")
|
||||
return result
|
||||
|
||||
def get_result(self, job_id: str) -> Optional[JobResult]:
|
||||
@@ -327,6 +327,7 @@ def job_add(req: JobAddReq) -> JobData:
|
||||
queue_item,
|
||||
action_type=action_type,
|
||||
action_kwargs=action_args,
|
||||
sample_material=req.sample_material,
|
||||
server_info=server_info,
|
||||
)
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ Web服务器模块
|
||||
|
||||
import webbrowser
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from starlette.responses import Response
|
||||
@@ -87,7 +86,7 @@ def setup_server() -> FastAPI:
|
||||
# 设置页面路由
|
||||
try:
|
||||
setup_web_pages(pages)
|
||||
info("[Web] 已加载Web UI模块")
|
||||
# info("[Web] 已加载Web UI模块")
|
||||
except ImportError as e:
|
||||
info(f"[Web] 未找到Web页面模块: {str(e)}")
|
||||
except Exception as e:
|
||||
@@ -96,7 +95,7 @@ def setup_server() -> FastAPI:
|
||||
return app
|
||||
|
||||
|
||||
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> None:
|
||||
def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = True) -> bool:
|
||||
"""
|
||||
启动服务器
|
||||
|
||||
@@ -104,7 +103,14 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
||||
host: 服务器主机
|
||||
port: 服务器端口
|
||||
open_browser: 是否自动打开浏览器
|
||||
|
||||
Returns:
|
||||
bool: True if restart was requested, False otherwise
|
||||
"""
|
||||
import threading
|
||||
import time
|
||||
from uvicorn import Config, Server
|
||||
|
||||
# 设置服务器
|
||||
setup_server()
|
||||
|
||||
@@ -123,7 +129,37 @@ def start_server(host: str = "0.0.0.0", port: int = 8002, open_browser: bool = T
|
||||
|
||||
# 启动服务器
|
||||
info(f"[Web] 启动FastAPI服务器: {host}:{port}")
|
||||
uvicorn.run(app, host=host, port=port, log_config=log_config)
|
||||
|
||||
# 使用支持重启的模式
|
||||
config = Config(app=app, host=host, port=port, log_config=log_config)
|
||||
server = Server(config)
|
||||
|
||||
# 启动服务器线程
|
||||
server_thread = threading.Thread(target=server.run, daemon=True, name="uvicorn_server")
|
||||
server_thread.start()
|
||||
|
||||
# info("[Web] Server started, monitoring for restart requests...")
|
||||
|
||||
# 监控重启标志
|
||||
import unilabos.app.main as main_module
|
||||
|
||||
while server_thread.is_alive():
|
||||
if hasattr(main_module, "_restart_requested") and main_module._restart_requested:
|
||||
info(
|
||||
f"[Web] Restart requested via WebSocket, reason: {getattr(main_module, '_restart_reason', 'unknown')}"
|
||||
)
|
||||
main_module._restart_requested = False
|
||||
|
||||
# 停止服务器
|
||||
server.should_exit = True
|
||||
server_thread.join(timeout=5)
|
||||
|
||||
info("[Web] Server stopped, ready for restart")
|
||||
return True
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# 当脚本直接运行时启动服务器
|
||||
|
||||
@@ -23,9 +23,10 @@ from typing import Optional, Dict, Any, List
|
||||
from urllib.parse import urlparse
|
||||
from enum import Enum
|
||||
|
||||
from jedi.inference.gradual.typing import TypedDict
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from unilabos.app.model import JobAddReq
|
||||
from unilabos.resources.resource_tracker import ResourceDictType
|
||||
from unilabos.ros.nodes.presets.host_node import HostNode
|
||||
from unilabos.utils.type_check import serialize_result_info
|
||||
from unilabos.app.communication import BaseCommunicationClient
|
||||
@@ -76,6 +77,7 @@ class JobInfo:
|
||||
start_time: float
|
||||
last_update_time: float = field(default_factory=time.time)
|
||||
ready_timeout: Optional[float] = None # READY状态的超时时间
|
||||
always_free: bool = False # 是否为永久闲置动作(不受排队限制)
|
||||
|
||||
def update_timestamp(self):
|
||||
"""更新最后更新时间"""
|
||||
@@ -127,6 +129,15 @@ class DeviceActionManager:
|
||||
# 总是将job添加到all_jobs中
|
||||
self.all_jobs[job_info.job_id] = job_info
|
||||
|
||||
# always_free的动作不受排队限制,直接设为READY
|
||||
if job_info.always_free:
|
||||
job_info.status = JobStatus.READY
|
||||
job_info.update_timestamp()
|
||||
job_info.set_ready_timeout(10)
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.trace(f"[DeviceActionManager] Job {job_log} always_free, start immediately")
|
||||
return True
|
||||
|
||||
# 检查是否有正在执行或准备执行的任务
|
||||
if device_key in self.active_jobs:
|
||||
# 有正在执行或准备执行的任务,加入队列
|
||||
@@ -154,7 +165,7 @@ class DeviceActionManager:
|
||||
job_info.set_ready_timeout(10) # 设置10秒超时
|
||||
self.active_jobs[device_key] = job_info
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.info(f"[DeviceActionManager] Job {job_log} can start immediately for {device_key}")
|
||||
logger.trace(f"[DeviceActionManager] Job {job_log} can start immediately for {device_key}")
|
||||
return True
|
||||
|
||||
def start_job(self, job_id: str) -> bool:
|
||||
@@ -176,11 +187,15 @@ class DeviceActionManager:
|
||||
logger.error(f"[DeviceActionManager] Job {job_log} is not in READY status, current: {job_info.status}")
|
||||
return False
|
||||
|
||||
# 检查设备上是否是这个job
|
||||
if device_key not in self.active_jobs or self.active_jobs[device_key].job_id != job_id:
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.error(f"[DeviceActionManager] Job {job_log} is not the active job for {device_key}")
|
||||
return False
|
||||
# always_free的job不需要检查active_jobs
|
||||
if not job_info.always_free:
|
||||
# 检查设备上是否是这个job
|
||||
if device_key not in self.active_jobs or self.active_jobs[device_key].job_id != job_id:
|
||||
job_log = format_job_log(
|
||||
job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name
|
||||
)
|
||||
logger.error(f"[DeviceActionManager] Job {job_log} is not the active job for {device_key}")
|
||||
return False
|
||||
|
||||
# 开始执行任务,将状态从READY转换为STARTED
|
||||
job_info.status = JobStatus.STARTED
|
||||
@@ -203,6 +218,13 @@ class DeviceActionManager:
|
||||
job_info = self.all_jobs[job_id]
|
||||
device_key = job_info.device_action_key
|
||||
|
||||
# always_free的job直接清理,不影响队列
|
||||
if job_info.always_free:
|
||||
job_info.status = JobStatus.ENDED
|
||||
job_info.update_timestamp()
|
||||
del self.all_jobs[job_id]
|
||||
return None
|
||||
|
||||
# 移除活跃任务
|
||||
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
||||
del self.active_jobs[device_key]
|
||||
@@ -210,8 +232,9 @@ class DeviceActionManager:
|
||||
job_info.update_timestamp()
|
||||
# 从all_jobs中移除已结束的job
|
||||
del self.all_jobs[job_id]
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.info(f"[DeviceActionManager] Job {job_log} ended for {device_key}")
|
||||
# job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
# logger.debug(f"[DeviceActionManager] Job {job_log} ended for {device_key}")
|
||||
pass
|
||||
else:
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.warning(f"[DeviceActionManager] Job {job_log} was not active for {device_key}")
|
||||
@@ -227,15 +250,20 @@ class DeviceActionManager:
|
||||
next_job_log = format_job_log(
|
||||
next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name
|
||||
)
|
||||
logger.info(f"[DeviceActionManager] Next job {next_job_log} can start for {device_key}")
|
||||
logger.trace(f"[DeviceActionManager] Next job {next_job_log} can start for {device_key}")
|
||||
return next_job
|
||||
|
||||
return None
|
||||
|
||||
def get_active_jobs(self) -> List[JobInfo]:
|
||||
"""获取所有正在执行的任务"""
|
||||
"""获取所有正在执行的任务(含active_jobs和always_free的STARTED job)"""
|
||||
with self.lock:
|
||||
return list(self.active_jobs.values())
|
||||
jobs = list(self.active_jobs.values())
|
||||
# 补充 always_free 的 STARTED job(它们不在 active_jobs 中)
|
||||
for job in self.all_jobs.values():
|
||||
if job.always_free and job.status == JobStatus.STARTED and job not in jobs:
|
||||
jobs.append(job)
|
||||
return jobs
|
||||
|
||||
def get_queued_jobs(self) -> List[JobInfo]:
|
||||
"""获取所有排队中的任务"""
|
||||
@@ -260,6 +288,14 @@ class DeviceActionManager:
|
||||
job_info = self.all_jobs[job_id]
|
||||
device_key = job_info.device_action_key
|
||||
|
||||
# always_free的job直接清理
|
||||
if job_info.always_free:
|
||||
job_info.status = JobStatus.ENDED
|
||||
del self.all_jobs[job_id]
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.trace(f"[DeviceActionManager] Always-free job {job_log} cancelled")
|
||||
return True
|
||||
|
||||
# 如果是正在执行的任务
|
||||
if device_key in self.active_jobs and self.active_jobs[device_key].job_id == job_id:
|
||||
# 清理active job状态
|
||||
@@ -268,7 +304,7 @@ class DeviceActionManager:
|
||||
# 从all_jobs中移除
|
||||
del self.all_jobs[job_id]
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
logger.info(f"[DeviceActionManager] Active job {job_log} cancelled for {device_key}")
|
||||
logger.trace(f"[DeviceActionManager] Active job {job_log} cancelled for {device_key}")
|
||||
|
||||
# 启动下一个任务
|
||||
if device_key in self.device_queues and self.device_queues[device_key]:
|
||||
@@ -281,7 +317,7 @@ class DeviceActionManager:
|
||||
next_job_log = format_job_log(
|
||||
next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name
|
||||
)
|
||||
logger.info(f"[DeviceActionManager] Next job {next_job_log} can start after cancel")
|
||||
logger.trace(f"[DeviceActionManager] Next job {next_job_log} can start after cancel")
|
||||
return True
|
||||
|
||||
# 如果是排队中的任务
|
||||
@@ -295,7 +331,7 @@ class DeviceActionManager:
|
||||
job_log = format_job_log(
|
||||
job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name
|
||||
)
|
||||
logger.info(f"[DeviceActionManager] Queued job {job_log} cancelled for {device_key}")
|
||||
logger.trace(f"[DeviceActionManager] Queued job {job_log} cancelled for {device_key}")
|
||||
return True
|
||||
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
@@ -333,13 +369,18 @@ class DeviceActionManager:
|
||||
timeout_jobs = []
|
||||
|
||||
with self.lock:
|
||||
# 统计READY状态的任务数量
|
||||
ready_jobs_count = sum(1 for job in self.active_jobs.values() if job.status == JobStatus.READY)
|
||||
# 收集所有需要检查的 READY 任务(active_jobs + always_free READY jobs)
|
||||
ready_candidates = list(self.active_jobs.values())
|
||||
for job in self.all_jobs.values():
|
||||
if job.always_free and job.status == JobStatus.READY and job not in ready_candidates:
|
||||
ready_candidates.append(job)
|
||||
|
||||
ready_jobs_count = sum(1 for job in ready_candidates if job.status == JobStatus.READY)
|
||||
if ready_jobs_count > 0:
|
||||
logger.trace(f"[DeviceActionManager] Checking {ready_jobs_count} READY jobs for timeout") # type: ignore # noqa: E501
|
||||
|
||||
# 找到所有超时的READY任务(只检测,不处理)
|
||||
for job_info in self.active_jobs.values():
|
||||
for job_info in ready_candidates:
|
||||
if job_info.is_ready_timeout():
|
||||
timeout_jobs.append(job_info)
|
||||
job_log = format_job_log(
|
||||
@@ -359,7 +400,7 @@ class MessageProcessor:
|
||||
self.device_manager = device_manager
|
||||
self.queue_processor = None # 延迟设置
|
||||
self.websocket_client = None # 延迟设置
|
||||
self.session_id = ""
|
||||
self.session_id = str(uuid.uuid4())[:6] # 产生一个随机的session_id
|
||||
|
||||
# WebSocket连接
|
||||
self.websocket = None
|
||||
@@ -368,6 +409,7 @@ class MessageProcessor:
|
||||
# 线程控制
|
||||
self.is_running = False
|
||||
self.thread = None
|
||||
self._loop = None # asyncio event loop引用,用于外部关闭websocket
|
||||
self.reconnect_count = 0
|
||||
|
||||
logger.info(f"[MessageProcessor] Initialized for URL: {websocket_url}")
|
||||
@@ -394,22 +436,31 @@ class MessageProcessor:
|
||||
def stop(self) -> None:
|
||||
"""停止消息处理线程"""
|
||||
self.is_running = False
|
||||
# 主动关闭websocket以快速中断消息接收循环
|
||||
ws = self.websocket
|
||||
loop = self._loop
|
||||
if ws and loop and loop.is_running():
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(ws.close(), loop)
|
||||
except Exception:
|
||||
pass
|
||||
if self.thread and self.thread.is_alive():
|
||||
self.thread.join(timeout=2)
|
||||
logger.info("[MessageProcessor] Stopped")
|
||||
|
||||
def _run(self):
|
||||
"""运行消息处理主循环"""
|
||||
loop = asyncio.new_event_loop()
|
||||
self._loop = asyncio.new_event_loop()
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(self._connection_handler())
|
||||
asyncio.set_event_loop(self._loop)
|
||||
self._loop.run_until_complete(self._connection_handler())
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Thread error: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
finally:
|
||||
if loop:
|
||||
loop.close()
|
||||
if self._loop:
|
||||
self._loop.close()
|
||||
self._loop = None
|
||||
|
||||
async def _connection_handler(self):
|
||||
"""处理WebSocket连接和重连逻辑"""
|
||||
@@ -426,8 +477,10 @@ class MessageProcessor:
|
||||
async with websockets.connect(
|
||||
self.websocket_url,
|
||||
ssl=ssl_context,
|
||||
open_timeout=20,
|
||||
ping_interval=WSConfig.ping_interval,
|
||||
ping_timeout=10,
|
||||
close_timeout=5,
|
||||
additional_headers={
|
||||
"Authorization": f"Lab {BasicConfig.auth_secret()}",
|
||||
"EdgeSession": f"{self.session_id}",
|
||||
@@ -438,68 +491,94 @@ class MessageProcessor:
|
||||
self.connected = True
|
||||
self.reconnect_count = 0
|
||||
|
||||
logger.trace(f"[MessageProcessor] Connected to {self.websocket_url}")
|
||||
logger.info(f"[MessageProcessor] 已连接到 {self.websocket_url}")
|
||||
|
||||
# 启动发送协程
|
||||
send_task = asyncio.create_task(self._send_handler())
|
||||
send_task = asyncio.create_task(self._send_handler(), name="websocket-send_task")
|
||||
|
||||
# 每次连接(含重连)后重新向服务端注册,
|
||||
# 否则服务端不知道客户端已上线,不会推送消息。
|
||||
if self.websocket_client:
|
||||
self.websocket_client.publish_host_ready()
|
||||
|
||||
try:
|
||||
# 接收消息循环
|
||||
await self._message_handler()
|
||||
finally:
|
||||
# 必须在 async with __aexit__ 之前停止 send_task,
|
||||
# 否则 send_task 会在关闭握手期间继续发送数据,
|
||||
# 干扰 websockets 库的内部清理,导致 task 泄漏。
|
||||
self.connected = False
|
||||
send_task.cancel()
|
||||
try:
|
||||
await send_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self.connected = False
|
||||
|
||||
except websockets.exceptions.ConnectionClosed:
|
||||
logger.warning("[MessageProcessor] Connection closed")
|
||||
self.connected = False
|
||||
logger.warning("[MessageProcessor] 与服务端连接中断")
|
||||
except TimeoutError:
|
||||
logger.warning(
|
||||
f"[MessageProcessor] 与服务端连接通信超时 (已尝试 {self.reconnect_count + 1} 次),请检查您的网络状况"
|
||||
)
|
||||
except websockets.exceptions.InvalidStatus as e:
|
||||
logger.warning(
|
||||
f"[MessageProcessor] 收到服务端注册码 {e.response.status_code}, 上一进程可能还未退出"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Connection error: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
self.connected = False
|
||||
logger.error(f"[MessageProcessor] 尝试重连时出错 {str(e)}")
|
||||
finally:
|
||||
self.connected = False
|
||||
self.websocket = None
|
||||
|
||||
# 重连逻辑
|
||||
if self.is_running and self.reconnect_count < WSConfig.max_reconnect_attempts:
|
||||
if not self.is_running:
|
||||
break
|
||||
if self.reconnect_count < WSConfig.max_reconnect_attempts:
|
||||
self.reconnect_count += 1
|
||||
backoff = WSConfig.reconnect_interval
|
||||
logger.info(
|
||||
f"[MessageProcessor] Reconnecting in {WSConfig.reconnect_interval}s "
|
||||
f"(attempt {self.reconnect_count}/{WSConfig.max_reconnect_attempts})"
|
||||
f"[MessageProcessor] 即将在 {backoff} 秒后重连 (已尝试 {self.reconnect_count}/{WSConfig.max_reconnect_attempts})"
|
||||
)
|
||||
await asyncio.sleep(WSConfig.reconnect_interval)
|
||||
elif self.reconnect_count >= WSConfig.max_reconnect_attempts:
|
||||
await asyncio.sleep(backoff)
|
||||
else:
|
||||
logger.error("[MessageProcessor] Max reconnection attempts reached")
|
||||
break
|
||||
else:
|
||||
self.reconnect_count -= 1
|
||||
|
||||
async def _message_handler(self):
|
||||
"""处理接收到的消息"""
|
||||
"""处理接收到的消息。
|
||||
|
||||
ConnectionClosed 不在此处捕获,让其向上传播到 _connection_handler,
|
||||
以便 async with websockets.connect() 的 __aexit__ 能感知连接已断,
|
||||
正确清理内部 task,避免 task 泄漏。
|
||||
"""
|
||||
if not self.websocket:
|
||||
logger.error("[MessageProcessor] WebSocket connection is None")
|
||||
return
|
||||
|
||||
try:
|
||||
async for message in self.websocket:
|
||||
try:
|
||||
data = json.loads(message)
|
||||
await self._process_message(data)
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Error processing message: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
except websockets.exceptions.ConnectionClosed:
|
||||
logger.info("[MessageProcessor] Message handler stopped - connection closed")
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Message handler error: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
async for message in self.websocket:
|
||||
try:
|
||||
data = json.loads(message)
|
||||
message_type = data.get("action", "")
|
||||
message_data = data.get("data")
|
||||
if self.session_id and self.session_id == data.get("edge_session"):
|
||||
await self._process_message(message_type, message_data)
|
||||
else:
|
||||
if message_type.endswith("_material"):
|
||||
logger.trace(
|
||||
f"[MessageProcessor] 收到一条归属 {data.get('edge_session')} 的旧消息:{data}"
|
||||
)
|
||||
logger.debug(
|
||||
f"[MessageProcessor] 跳过了一条归属 {data.get('edge_session')} 的旧消息: {data.get('action')}"
|
||||
)
|
||||
else:
|
||||
await self._process_message(message_type, message_data)
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f"[MessageProcessor] Invalid JSON received: {message}")
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Error processing message: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
async def _send_handler(self):
|
||||
"""处理发送队列中的消息"""
|
||||
@@ -531,7 +610,7 @@ class MessageProcessor:
|
||||
try:
|
||||
message_str = json.dumps(msg, ensure_ascii=False)
|
||||
await self.websocket.send(message_str)
|
||||
logger.trace(f"[MessageProcessor] Message sent: {msg.get('action', 'unknown')}") # type: ignore # noqa: E501
|
||||
# logger.trace(f"[MessageProcessor] Message sent: {msg.get('action', 'unknown')}") # type: ignore # noqa: E501
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Failed to send message: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
@@ -548,18 +627,16 @@ class MessageProcessor:
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.debug("[MessageProcessor] Send handler cancelled")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Fatal error in send handler: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
finally:
|
||||
logger.debug("[MessageProcessor] Send handler stopped")
|
||||
|
||||
async def _process_message(self, data: Dict[str, Any]):
|
||||
async def _process_message(self, message_type: str, message_data: Dict[str, Any]):
|
||||
"""处理收到的消息"""
|
||||
message_type = data.get("action", "")
|
||||
message_data = data.get("data")
|
||||
|
||||
logger.debug(f"[MessageProcessor] Processing message: {message_type}")
|
||||
logger.trace(f"[MessageProcessor] Processing message: {message_type}")
|
||||
|
||||
try:
|
||||
if message_type == "pong":
|
||||
@@ -571,14 +648,23 @@ class MessageProcessor:
|
||||
elif message_type == "cancel_action" or message_type == "cancel_task":
|
||||
await self._handle_cancel_action(message_data)
|
||||
elif message_type == "add_material":
|
||||
# noinspection PyTypeChecker
|
||||
await self._handle_resource_tree_update(message_data, "add")
|
||||
elif message_type == "update_material":
|
||||
# noinspection PyTypeChecker
|
||||
await self._handle_resource_tree_update(message_data, "update")
|
||||
elif message_type == "remove_material":
|
||||
# noinspection PyTypeChecker
|
||||
await self._handle_resource_tree_update(message_data, "remove")
|
||||
elif message_type == "session_id":
|
||||
self.session_id = message_data.get("session_id")
|
||||
logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
||||
# elif message_type == "session_id":
|
||||
# self.session_id = message_data.get("session_id")
|
||||
# logger.info(f"[MessageProcessor] Session ID: {self.session_id}")
|
||||
elif message_type == "add_device":
|
||||
await self._handle_device_manage(message_data, "add")
|
||||
elif message_type == "remove_device":
|
||||
await self._handle_device_manage(message_data, "remove")
|
||||
elif message_type == "request_restart":
|
||||
await self._handle_request_restart(message_data)
|
||||
else:
|
||||
logger.debug(f"[MessageProcessor] Unknown message type: {message_type}")
|
||||
|
||||
@@ -592,6 +678,24 @@ class MessageProcessor:
|
||||
if host_node:
|
||||
host_node.handle_pong_response(pong_data)
|
||||
|
||||
def _check_action_always_free(self, device_id: str, action_name: str) -> bool:
|
||||
"""检查该action是否标记为always_free,通过HostNode统一的_action_value_mappings查找"""
|
||||
try:
|
||||
host_node = HostNode.get_instance(0)
|
||||
if not host_node:
|
||||
return False
|
||||
# noinspection PyProtectedMember
|
||||
action_mappings = host_node._action_value_mappings.get(device_id)
|
||||
if not action_mappings:
|
||||
return False
|
||||
# 尝试直接匹配或 auto- 前缀匹配
|
||||
for key in [action_name, f"auto-{action_name}"]:
|
||||
if key in action_mappings:
|
||||
return action_mappings[key].get("always_free", False)
|
||||
return False
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def _handle_query_action_state(self, data: Dict[str, Any]):
|
||||
"""处理query_action_state消息"""
|
||||
device_id = data.get("device_id", "")
|
||||
@@ -606,6 +710,9 @@ class MessageProcessor:
|
||||
|
||||
device_action_key = f"/devices/{device_id}/{action_name}"
|
||||
|
||||
# 检查action是否为always_free
|
||||
action_always_free = self._check_action_always_free(device_id, action_name)
|
||||
|
||||
# 创建任务信息
|
||||
job_info = JobInfo(
|
||||
job_id=job_id,
|
||||
@@ -615,6 +722,7 @@ class MessageProcessor:
|
||||
device_action_key=device_action_key,
|
||||
status=JobStatus.QUEUE,
|
||||
start_time=time.time(),
|
||||
always_free=action_always_free,
|
||||
)
|
||||
|
||||
# 添加到设备管理器
|
||||
@@ -626,13 +734,13 @@ class MessageProcessor:
|
||||
await self._send_action_state_response(
|
||||
device_id, action_name, task_id, job_id, "query_action_status", True, 0
|
||||
)
|
||||
logger.info(f"[MessageProcessor] Job {job_log} can start immediately")
|
||||
logger.trace(f"[MessageProcessor] Job {job_log} can start immediately")
|
||||
else:
|
||||
# 需要排队
|
||||
await self._send_action_state_response(
|
||||
device_id, action_name, task_id, job_id, "query_action_status", False, 10
|
||||
)
|
||||
logger.info(f"[MessageProcessor] Job {job_log} queued")
|
||||
logger.trace(f"[MessageProcessor] Job {job_log} queued")
|
||||
|
||||
# 通知QueueProcessor有新的队列更新
|
||||
if self.queue_processor:
|
||||
@@ -641,9 +749,37 @@ class MessageProcessor:
|
||||
async def _handle_job_start(self, data: Dict[str, Any]):
|
||||
"""处理job_start消息"""
|
||||
try:
|
||||
if not data.get("sample_material"):
|
||||
data["sample_material"] = {}
|
||||
req = JobAddReq(**data)
|
||||
|
||||
job_log = format_job_log(req.job_id, req.task_id, req.device_id, req.action)
|
||||
|
||||
# 服务端对always_free动作可能跳过query_action_state直接发job_start,
|
||||
# 此时job尚未注册,需要自动补注册
|
||||
existing_job = self.device_manager.get_job_info(req.job_id)
|
||||
if not existing_job:
|
||||
action_name = req.action
|
||||
device_action_key = f"/devices/{req.device_id}/{action_name}"
|
||||
action_always_free = self._check_action_always_free(req.device_id, action_name)
|
||||
|
||||
if action_always_free:
|
||||
job_info = JobInfo(
|
||||
job_id=req.job_id,
|
||||
task_id=req.task_id,
|
||||
device_id=req.device_id,
|
||||
action_name=action_name,
|
||||
device_action_key=device_action_key,
|
||||
status=JobStatus.QUEUE,
|
||||
start_time=time.time(),
|
||||
always_free=True,
|
||||
)
|
||||
self.device_manager.add_queue_request(job_info)
|
||||
logger.info(f"[MessageProcessor] Job {job_log} always_free, auto-registered from direct job_start")
|
||||
else:
|
||||
logger.error(f"[MessageProcessor] Job {job_log} not registered (missing query_action_state)")
|
||||
return
|
||||
|
||||
success = self.device_manager.start_job(req.job_id)
|
||||
if not success:
|
||||
logger.error(f"[MessageProcessor] Failed to start job {job_log}")
|
||||
@@ -672,6 +808,7 @@ class MessageProcessor:
|
||||
queue_item,
|
||||
action_type=req.action_type,
|
||||
action_kwargs=req.action_args,
|
||||
sample_material=req.sample_material,
|
||||
server_info=req.server_info,
|
||||
)
|
||||
|
||||
@@ -836,9 +973,7 @@ class MessageProcessor:
|
||||
device_action_groups[key_add] = []
|
||||
device_action_groups[key_add].append(item["uuid"])
|
||||
|
||||
logger.info(
|
||||
f"[MessageProcessor] Resource migrated: {item['uuid'][:8]} from {device_old_id} to {device_id}"
|
||||
)
|
||||
logger.info(f"[资源同步] 跨站Transfer: {item['uuid'][:8]} from {device_old_id} to {device_id}")
|
||||
else:
|
||||
# 正常update
|
||||
key = (device_id, "update")
|
||||
@@ -852,11 +987,13 @@ class MessageProcessor:
|
||||
device_action_groups[key] = []
|
||||
device_action_groups[key].append(item["uuid"])
|
||||
|
||||
logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
|
||||
logger.trace(
|
||||
f"[资源同步] 动作 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}"
|
||||
)
|
||||
|
||||
# 为每个(device_id, action)创建独立的更新线程
|
||||
for (device_id, actual_action), items in device_action_groups.items():
|
||||
logger.info(f"设备 {device_id} 物料更新 {actual_action} 数量: {len(items)}")
|
||||
logger.trace(f"[资源同步] {device_id} 物料动作 {actual_action} 数量: {len(items)}")
|
||||
|
||||
def _notify_resource_tree(dev_id, act, item_list):
|
||||
try:
|
||||
@@ -888,6 +1025,81 @@ class MessageProcessor:
|
||||
)
|
||||
thread.start()
|
||||
|
||||
async def _handle_device_manage(self, device_list: list[ResourceDictType], action: str):
|
||||
"""Handle add_device / remove_device from LabGo server."""
|
||||
if not device_list:
|
||||
return
|
||||
|
||||
for item in device_list:
|
||||
target_node_id = item.get("target_node_id", "host_node")
|
||||
|
||||
def _notify(target_id: str, act: str, cfg: ResourceDictType):
|
||||
try:
|
||||
host_node = HostNode.get_instance(timeout=5)
|
||||
if not host_node:
|
||||
logger.error(f"[DeviceManage] HostNode not available for {act}_device")
|
||||
return
|
||||
success = host_node.notify_device_manage(target_id, act, cfg)
|
||||
if success:
|
||||
logger.info(f"[DeviceManage] {act}_device completed on {target_id}")
|
||||
else:
|
||||
logger.warning(f"[DeviceManage] {act}_device failed on {target_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"[DeviceManage] Error in {act}_device: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
thread = threading.Thread(
|
||||
target=_notify,
|
||||
args=(target_node_id, action, item),
|
||||
daemon=True,
|
||||
name=f"DeviceManage-{action}-{item.get('id', '')}",
|
||||
)
|
||||
thread.start()
|
||||
|
||||
async def _handle_request_restart(self, data: Dict[str, Any]):
|
||||
"""
|
||||
处理重启请求
|
||||
|
||||
当LabGo发送request_restart时,执行清理并触发重启
|
||||
"""
|
||||
reason = data.get("reason", "unknown")
|
||||
delay = data.get("delay", 2) # 默认延迟2秒
|
||||
logger.info(f"[MessageProcessor] Received restart request, reason: {reason}, delay: {delay}s")
|
||||
|
||||
# 发送确认消息
|
||||
self.send_message(
|
||||
{"action": "restart_acknowledged", "data": {"reason": reason, "delay": delay}}
|
||||
)
|
||||
|
||||
# 设置全局重启标志
|
||||
import unilabos.app.main as main_module
|
||||
|
||||
main_module._restart_requested = True
|
||||
main_module._restart_reason = reason
|
||||
|
||||
# 延迟后执行清理
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
# 在新线程中执行清理,避免阻塞当前事件循环
|
||||
def do_cleanup():
|
||||
import time
|
||||
|
||||
time.sleep(0.5) # 给当前消息处理完成的时间
|
||||
logger.info(f"[MessageProcessor] Starting cleanup for restart, reason: {reason}")
|
||||
try:
|
||||
from unilabos.app.utils import cleanup_for_restart
|
||||
|
||||
if cleanup_for_restart():
|
||||
logger.info("[MessageProcessor] Cleanup successful, main() will restart")
|
||||
else:
|
||||
logger.error("[MessageProcessor] Cleanup failed")
|
||||
except Exception as e:
|
||||
logger.error(f"[MessageProcessor] Error during cleanup: {e}")
|
||||
|
||||
cleanup_thread = threading.Thread(target=do_cleanup, name="RestartCleanupThread", daemon=True)
|
||||
cleanup_thread.start()
|
||||
logger.info(f"[MessageProcessor] Restart cleanup scheduled")
|
||||
|
||||
async def _send_action_state_response(
|
||||
self, device_id: str, action_name: str, task_id: str, job_id: str, typ: str, free: bool, need_more: int
|
||||
):
|
||||
@@ -959,6 +1171,7 @@ class QueueProcessor:
|
||||
def stop(self) -> None:
|
||||
"""停止队列处理线程"""
|
||||
self.is_running = False
|
||||
self.queue_update_event.set() # 立即唤醒等待中的线程
|
||||
if self.thread and self.thread.is_alive():
|
||||
self.thread.join(timeout=2)
|
||||
logger.info("[QueueProcessor] Stopped")
|
||||
@@ -1059,6 +1272,11 @@ class QueueProcessor:
|
||||
logger.debug(f"[QueueProcessor] Sending busy status for {len(queued_jobs)} queued jobs")
|
||||
|
||||
for job_info in queued_jobs:
|
||||
# 快照可能已过期:在遍历过程中 end_job() 可能已将此 job 移至 READY,
|
||||
# 此时不应再发送 busy/need_more,否则会覆盖已发出的 free=True 通知
|
||||
if job_info.status != JobStatus.QUEUE:
|
||||
continue
|
||||
|
||||
message = {
|
||||
"action": "report_action_state",
|
||||
"data": {
|
||||
@@ -1074,7 +1292,7 @@ class QueueProcessor:
|
||||
success = self.message_processor.send_message(message)
|
||||
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
|
||||
if success:
|
||||
logger.debug(f"[QueueProcessor] Sent busy/need_more for queued job {job_log}")
|
||||
logger.trace(f"[QueueProcessor] Sent busy/need_more for queued job {job_log}")
|
||||
else:
|
||||
logger.warning(f"[QueueProcessor] Failed to send busy status for job {job_log}")
|
||||
|
||||
@@ -1097,7 +1315,7 @@ class QueueProcessor:
|
||||
job_info.action_name,
|
||||
)
|
||||
|
||||
logger.info(f"[QueueProcessor] Job {job_log} completed with status: {status}")
|
||||
logger.trace(f"[QueueProcessor] Job {job_log} completed with status: {status}")
|
||||
|
||||
# 结束任务,获取下一个可执行的任务
|
||||
next_job = self.device_manager.end_job(job_id)
|
||||
@@ -1117,8 +1335,8 @@ class QueueProcessor:
|
||||
},
|
||||
}
|
||||
self.message_processor.send_message(message)
|
||||
next_job_log = format_job_log(next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name)
|
||||
logger.info(f"[QueueProcessor] Notified next job {next_job_log} can start")
|
||||
# next_job_log = format_job_log(next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name)
|
||||
# logger.debug(f"[QueueProcessor] Notified next job {next_job_log} can start")
|
||||
|
||||
# 立即触发下一轮状态检查
|
||||
self.notify_queue_update()
|
||||
@@ -1207,8 +1425,8 @@ class WebSocketClient(BaseCommunicationClient):
|
||||
message = {"action": "normal_exit", "data": {"session_id": session_id}}
|
||||
self.message_processor.send_message(message)
|
||||
logger.info(f"[WebSocketClient] Sent normal_exit message with session_id: {session_id}")
|
||||
# 给一点时间让消息发送出去
|
||||
time.sleep(1)
|
||||
# send_handler 每100ms检查一次队列,等300ms足以让消息发出
|
||||
time.sleep(0.3)
|
||||
except Exception as e:
|
||||
logger.warning(f"[WebSocketClient] Failed to send normal_exit message: {str(e)}")
|
||||
|
||||
@@ -1240,7 +1458,7 @@ class WebSocketClient(BaseCommunicationClient):
|
||||
},
|
||||
}
|
||||
self.message_processor.send_message(message)
|
||||
logger.debug(f"[WebSocketClient] Device status published: {device_id}.{property_name}")
|
||||
# logger.trace(f"[WebSocketClient] Device status published: {device_id}.{property_name}")
|
||||
|
||||
def publish_job_status(
|
||||
self, feedback_data: dict, item: QueueItem, status: str, return_info: Optional[dict] = None
|
||||
@@ -1260,7 +1478,7 @@ class WebSocketClient(BaseCommunicationClient):
|
||||
except (KeyError, AttributeError):
|
||||
logger.warning(f"[WebSocketClient] Failed to remove job {item.job_id} from HostNode status")
|
||||
|
||||
logger.info(f"[WebSocketClient] Intercepting final status for job_id: {item.job_id} - {status}")
|
||||
# logger.debug(f"[WebSocketClient] Intercepting final status for job_id: {item.job_id} - {status}")
|
||||
|
||||
# 通知队列处理器job完成(包括timeout的job)
|
||||
self.queue_processor.handle_job_completed(item.job_id, status)
|
||||
@@ -1282,7 +1500,7 @@ class WebSocketClient(BaseCommunicationClient):
|
||||
self.message_processor.send_message(message)
|
||||
|
||||
job_log = format_job_log(item.job_id, item.task_id, item.device_id, item.action_name)
|
||||
logger.debug(f"[WebSocketClient] Job status published: {job_log} - {status}")
|
||||
logger.trace(f"[WebSocketClient] Job status published: {job_log} - {status}")
|
||||
|
||||
def send_ping(self, ping_id: str, timestamp: float) -> None:
|
||||
"""发送ping消息"""
|
||||
@@ -1313,17 +1531,59 @@ class WebSocketClient(BaseCommunicationClient):
|
||||
logger.warning(f"[WebSocketClient] Failed to cancel job {job_log}")
|
||||
|
||||
def publish_host_ready(self) -> None:
|
||||
"""发布host_node ready信号"""
|
||||
"""发布host_node ready信号,包含设备和动作信息"""
|
||||
if self.is_disabled or not self.is_connected():
|
||||
logger.debug("[WebSocketClient] Not connected, cannot publish host ready signal")
|
||||
return
|
||||
|
||||
# 收集设备信息
|
||||
devices = []
|
||||
machine_name = BasicConfig.machine_name
|
||||
|
||||
try:
|
||||
host_node = HostNode.get_instance(0)
|
||||
if host_node:
|
||||
# 获取设备信息
|
||||
for device_id, namespace in host_node.devices_names.items():
|
||||
device_key = (
|
||||
f"{namespace}/{device_id}" if namespace.startswith("/") else f"/{namespace}/{device_id}"
|
||||
)
|
||||
is_online = device_key in host_node._online_devices
|
||||
|
||||
# 获取设备的动作信息
|
||||
actions = {}
|
||||
for action_id, client in host_node._action_clients.items():
|
||||
# action_id 格式: /namespace/device_id/action_name
|
||||
if device_id in action_id:
|
||||
action_name = action_id.split("/")[-1]
|
||||
actions[action_name] = {
|
||||
"action_path": action_id,
|
||||
"action_type": str(type(client).__name__),
|
||||
}
|
||||
|
||||
devices.append(
|
||||
{
|
||||
"device_id": device_id,
|
||||
"namespace": namespace,
|
||||
"device_key": device_key,
|
||||
"is_online": is_online,
|
||||
"machine_name": host_node.device_machine_names.get(device_id, machine_name),
|
||||
"actions": actions,
|
||||
}
|
||||
)
|
||||
|
||||
logger.info(f"[WebSocketClient] Collected {len(devices)} devices for host_ready")
|
||||
except Exception as e:
|
||||
logger.warning(f"[WebSocketClient] Error collecting device info: {e}")
|
||||
|
||||
message = {
|
||||
"action": "host_node_ready",
|
||||
"data": {
|
||||
"status": "ready",
|
||||
"timestamp": time.time(),
|
||||
"machine_name": machine_name,
|
||||
"devices": devices,
|
||||
},
|
||||
}
|
||||
self.message_processor.send_message(message)
|
||||
logger.info("[WebSocketClient] Host node ready signal published")
|
||||
logger.info(f"[WebSocketClient] Host node ready signal published with {len(devices)} devices")
|
||||
|
||||
@@ -5,6 +5,7 @@ from .separate_protocol import generate_separate_protocol
|
||||
from .evaporate_protocol import generate_evaporate_protocol
|
||||
from .evacuateandrefill_protocol import generate_evacuateandrefill_protocol
|
||||
from .agv_transfer_protocol import generate_agv_transfer_protocol
|
||||
from .batch_transfer_protocol import generate_batch_transfer_protocol
|
||||
from .add_protocol import generate_add_protocol
|
||||
from .centrifuge_protocol import generate_centrifuge_protocol
|
||||
from .filter_protocol import generate_filter_protocol
|
||||
@@ -31,6 +32,7 @@ from .hydrogenate_protocol import generate_hydrogenate_protocol
|
||||
action_protocol_generators = {
|
||||
AddProtocol: generate_add_protocol,
|
||||
AGVTransferProtocol: generate_agv_transfer_protocol,
|
||||
BatchTransferProtocol: generate_batch_transfer_protocol,
|
||||
AdjustPHProtocol: generate_adjust_ph_protocol,
|
||||
CentrifugeProtocol: generate_centrifuge_protocol,
|
||||
CleanProtocol: generate_clean_protocol,
|
||||
|
||||
127
unilabos/compile/_agv_utils.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
AGV 编译器共用工具函数
|
||||
|
||||
从 physical_setup_graph 中发现 AGV 节点配置,
|
||||
供 agv_transfer_protocol 和 batch_transfer_protocol 复用。
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import networkx as nx
|
||||
|
||||
|
||||
def find_agv_config(G: nx.Graph, agv_id: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""从设备图中发现 AGV 节点,返回其配置
|
||||
|
||||
查找策略:
|
||||
1. 如果指定 agv_id,直接读取该节点
|
||||
2. 否则查找 class 为 "agv_transport_station" 的节点
|
||||
3. 兜底查找 config 中包含 device_roles 的 workstation 节点
|
||||
|
||||
Returns:
|
||||
{
|
||||
"agv_id": str,
|
||||
"device_roles": {"navigator": "...", "arm": "..."},
|
||||
"route_table": {"A->B": {"nav_command": ..., "arm_pick": ..., "arm_place": ...}},
|
||||
"capacity": int,
|
||||
}
|
||||
"""
|
||||
if agv_id and agv_id in G.nodes:
|
||||
node_data = G.nodes[agv_id]
|
||||
config = _extract_config(node_data)
|
||||
if config and "device_roles" in config:
|
||||
return _build_agv_cfg(agv_id, config, G)
|
||||
|
||||
# 查找 agv_transport_station 类型
|
||||
for nid, ndata in G.nodes(data=True):
|
||||
node_class = _get_node_class(ndata)
|
||||
if node_class == "agv_transport_station":
|
||||
config = _extract_config(ndata)
|
||||
return _build_agv_cfg(nid, config or {}, G)
|
||||
|
||||
# 兜底:查找带有 device_roles 的 workstation
|
||||
for nid, ndata in G.nodes(data=True):
|
||||
node_class = _get_node_class(ndata)
|
||||
if node_class == "workstation":
|
||||
config = _extract_config(ndata)
|
||||
if config and "device_roles" in config:
|
||||
return _build_agv_cfg(nid, config, G)
|
||||
|
||||
raise ValueError("设备图中未找到 AGV 节点(需 class=agv_transport_station 或 config.device_roles)")
|
||||
|
||||
|
||||
def get_agv_capacity(G: nx.Graph, agv_id: str) -> int:
|
||||
"""从 AGV 的 Warehouse 子节点计算载具容量"""
|
||||
for neighbor in G.successors(agv_id) if G.is_directed() else G.neighbors(agv_id):
|
||||
ndata = G.nodes[neighbor]
|
||||
node_type = _get_node_type(ndata)
|
||||
if node_type == "warehouse":
|
||||
config = _extract_config(ndata)
|
||||
if config:
|
||||
x = config.get("num_items_x", 1)
|
||||
y = config.get("num_items_y", 1)
|
||||
z = config.get("num_items_z", 1)
|
||||
return x * y * z
|
||||
# 如果没有 warehouse 子节点,尝试从配置中读取
|
||||
return 0
|
||||
|
||||
|
||||
def split_batches(items: list, capacity: int) -> List[list]:
|
||||
"""按 AGV 容量分批
|
||||
|
||||
Args:
|
||||
items: 待转运的物料列表
|
||||
capacity: AGV 单批次容量
|
||||
|
||||
Returns:
|
||||
分批后的列表的列表
|
||||
"""
|
||||
if capacity <= 0:
|
||||
raise ValueError(f"AGV 容量必须 > 0,当前: {capacity}")
|
||||
return [items[i:i + capacity] for i in range(0, len(items), capacity)]
|
||||
|
||||
|
||||
def _extract_config(node_data: dict) -> Optional[dict]:
|
||||
"""从节点数据中提取 config 字段,兼容多种格式"""
|
||||
# 直接 config 字段
|
||||
config = node_data.get("config")
|
||||
if isinstance(config, dict):
|
||||
return config
|
||||
# res_content 嵌套格式
|
||||
res_content = node_data.get("res_content")
|
||||
if hasattr(res_content, "config"):
|
||||
return res_content.config if isinstance(res_content.config, dict) else None
|
||||
if isinstance(res_content, dict):
|
||||
return res_content.get("config")
|
||||
return None
|
||||
|
||||
|
||||
def _get_node_class(node_data: dict) -> str:
|
||||
"""获取节点的 class 字段"""
|
||||
res_content = node_data.get("res_content")
|
||||
if hasattr(res_content, "model_dump"):
|
||||
d = res_content.model_dump()
|
||||
return d.get("class_", d.get("class", ""))
|
||||
if isinstance(res_content, dict):
|
||||
return res_content.get("class_", res_content.get("class", ""))
|
||||
return node_data.get("class_", node_data.get("class", ""))
|
||||
|
||||
|
||||
def _get_node_type(node_data: dict) -> str:
|
||||
"""获取节点的 type 字段"""
|
||||
res_content = node_data.get("res_content")
|
||||
if hasattr(res_content, "type"):
|
||||
return res_content.type or ""
|
||||
if isinstance(res_content, dict):
|
||||
return res_content.get("type", "")
|
||||
return node_data.get("type", "")
|
||||
|
||||
|
||||
def _build_agv_cfg(agv_id: str, config: dict, G: nx.Graph) -> Dict[str, Any]:
|
||||
"""构建标准化的 AGV 配置"""
|
||||
return {
|
||||
"agv_id": agv_id,
|
||||
"device_roles": config.get("device_roles", {}),
|
||||
"route_table": config.get("route_table", {}),
|
||||
"capacity": get_agv_capacity(G, agv_id),
|
||||
}
|
||||
@@ -2,20 +2,13 @@ from functools import partial
|
||||
|
||||
import networkx as nx
|
||||
import re
|
||||
import logging
|
||||
from typing import List, Dict, Any, Union
|
||||
|
||||
from .utils.unit_parser import parse_volume_input, parse_mass_input, parse_time_input
|
||||
from .utils.vessel_parser import get_vessel, find_solid_dispenser, find_connected_stirrer, find_reagent_vessel
|
||||
from .utils.logger_util import action_log
|
||||
from .utils.logger_util import action_log, debug_print
|
||||
from .pump_protocol import generate_pump_protocol_with_rinsing
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def debug_print(message):
|
||||
"""调试输出"""
|
||||
logger.info(f"[ADD] {message}")
|
||||
|
||||
|
||||
# 🆕 创建进度日志动作
|
||||
create_action_log = partial(action_log, prefix="[ADD]")
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
from functools import partial
|
||||
|
||||
import networkx as nx
|
||||
import logging
|
||||
from typing import List, Dict, Any, Union
|
||||
from .utils.vessel_parser import get_vessel
|
||||
from .utils.vessel_parser import get_vessel, find_connected_stirrer
|
||||
from .utils.logger_util import action_log, debug_print
|
||||
from .pump_protocol import generate_pump_protocol_with_rinsing
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def debug_print(message):
|
||||
"""调试输出"""
|
||||
logger.info(f"[ADJUST_PH] {message}")
|
||||
create_action_log = partial(action_log, prefix="[ADJUST_PH]")
|
||||
|
||||
def find_acid_base_vessel(G: nx.DiGraph, reagent: str) -> str:
|
||||
"""
|
||||
@@ -21,8 +19,6 @@ def find_acid_base_vessel(G: nx.DiGraph, reagent: str) -> str:
|
||||
Returns:
|
||||
str: 试剂容器ID
|
||||
"""
|
||||
debug_print(f"🔍 正在查找试剂 '{reagent}' 的容器...")
|
||||
|
||||
# 常见酸碱试剂的别名映射
|
||||
reagent_aliases = {
|
||||
"hydrochloric acid": ["HCl", "hydrochloric_acid", "hcl", "muriatic_acid"],
|
||||
@@ -36,17 +32,13 @@ def find_acid_base_vessel(G: nx.DiGraph, reagent: str) -> str:
|
||||
|
||||
# 构建搜索名称列表
|
||||
search_names = [reagent.lower()]
|
||||
debug_print(f"📋 基础搜索名称: {reagent.lower()}")
|
||||
|
||||
|
||||
# 添加别名
|
||||
for base_name, aliases in reagent_aliases.items():
|
||||
if reagent.lower() in base_name.lower() or base_name.lower() in reagent.lower():
|
||||
search_names.extend([alias.lower() for alias in aliases])
|
||||
debug_print(f"🔗 添加别名: {aliases}")
|
||||
break
|
||||
|
||||
debug_print(f"📝 完整搜索列表: {search_names}")
|
||||
|
||||
# 构建可能的容器名称
|
||||
possible_names = []
|
||||
for name in search_names:
|
||||
@@ -61,17 +53,15 @@ def find_acid_base_vessel(G: nx.DiGraph, reagent: str) -> str:
|
||||
name_clean
|
||||
])
|
||||
|
||||
debug_print(f"🎯 可能的容器名称 (前5个): {possible_names[:5]}... (共{len(possible_names)}个)")
|
||||
|
||||
debug_print(f"搜索容器: {len(possible_names)} 个候选名称")
|
||||
|
||||
# 第一步:通过容器名称匹配
|
||||
debug_print(f"📋 方法1: 精确名称匹配...")
|
||||
for vessel_name in possible_names:
|
||||
if vessel_name in G.nodes():
|
||||
debug_print(f"✅ 通过名称匹配找到容器: {vessel_name} 🎯")
|
||||
debug_print(f"通过名称匹配找到容器: {vessel_name}")
|
||||
return vessel_name
|
||||
|
||||
|
||||
# 第二步:通过模糊匹配
|
||||
debug_print(f"📋 方法2: 模糊名称匹配...")
|
||||
for node_id in G.nodes():
|
||||
if G.nodes[node_id].get('type') == 'container':
|
||||
node_name = G.nodes[node_id].get('name', '').lower()
|
||||
@@ -79,11 +69,10 @@ def find_acid_base_vessel(G: nx.DiGraph, reagent: str) -> str:
|
||||
# 检查是否包含任何搜索名称
|
||||
for search_name in search_names:
|
||||
if search_name in node_id.lower() or search_name in node_name:
|
||||
debug_print(f"✅ 通过模糊匹配找到容器: {node_id} 🔍")
|
||||
debug_print(f"通过模糊匹配找到容器: {node_id}")
|
||||
return node_id
|
||||
|
||||
|
||||
# 第三步:通过液体类型匹配
|
||||
debug_print(f"📋 方法3: 液体类型匹配...")
|
||||
for node_id in G.nodes():
|
||||
if G.nodes[node_id].get('type') == 'container':
|
||||
vessel_data = G.nodes[node_id].get('data', {})
|
||||
@@ -96,56 +85,15 @@ def find_acid_base_vessel(G: nx.DiGraph, reagent: str) -> str:
|
||||
|
||||
for search_name in search_names:
|
||||
if search_name in liquid_type or search_name in reagent_name:
|
||||
debug_print(f"✅ 通过液体类型匹配找到容器: {node_id} 💧")
|
||||
debug_print(f"通过液体类型匹配找到容器: {node_id}")
|
||||
return node_id
|
||||
|
||||
# 列出可用容器帮助调试
|
||||
debug_print(f"📊 列出可用容器帮助调试...")
|
||||
available_containers = []
|
||||
for node_id in G.nodes():
|
||||
if G.nodes[node_id].get('type') == 'container':
|
||||
vessel_data = G.nodes[node_id].get('data', {})
|
||||
liquids = vessel_data.get('liquid', [])
|
||||
liquid_types = [liquid.get('liquid_type', '') or liquid.get('name', '')
|
||||
for liquid in liquids if isinstance(liquid, dict)]
|
||||
|
||||
available_containers.append({
|
||||
'id': node_id,
|
||||
'name': G.nodes[node_id].get('name', ''),
|
||||
'liquids': liquid_types,
|
||||
'reagent_name': vessel_data.get('reagent_name', '')
|
||||
})
|
||||
|
||||
debug_print(f"📋 可用容器列表:")
|
||||
for container in available_containers:
|
||||
debug_print(f" - 🧪 {container['id']}: {container['name']}")
|
||||
debug_print(f" 💧 液体: {container['liquids']}")
|
||||
debug_print(f" 🏷️ 试剂: {container['reagent_name']}")
|
||||
|
||||
debug_print(f"❌ 所有匹配方法都失败了")
|
||||
available_containers = [node_id for node_id in G.nodes()
|
||||
if G.nodes[node_id].get('type') == 'container']
|
||||
debug_print(f"所有匹配方法失败,可用容器: {available_containers}")
|
||||
raise ValueError(f"找不到试剂 '{reagent}' 对应的容器。尝试了: {possible_names[:10]}...")
|
||||
|
||||
def find_connected_stirrer(G: nx.DiGraph, vessel: str) -> str:
|
||||
"""查找与容器相连的搅拌器"""
|
||||
debug_print(f"🔍 查找连接到容器 '{vessel}' 的搅拌器...")
|
||||
|
||||
stirrer_nodes = [node for node in G.nodes()
|
||||
if (G.nodes[node].get('class') or '') == 'virtual_stirrer']
|
||||
|
||||
debug_print(f"📊 发现 {len(stirrer_nodes)} 个搅拌器: {stirrer_nodes}")
|
||||
|
||||
for stirrer in stirrer_nodes:
|
||||
if G.has_edge(stirrer, vessel) or G.has_edge(vessel, stirrer):
|
||||
debug_print(f"✅ 找到连接的搅拌器: {stirrer} 🔗")
|
||||
return stirrer
|
||||
|
||||
if stirrer_nodes:
|
||||
debug_print(f"⚠️ 未找到直接连接的搅拌器,使用第一个: {stirrer_nodes[0]} 🔄")
|
||||
return stirrer_nodes[0]
|
||||
|
||||
debug_print(f"❌ 未找到任何搅拌器")
|
||||
return None
|
||||
|
||||
def calculate_reagent_volume(target_ph_value: float, reagent: str, vessel_volume: float = 100.0) -> float:
|
||||
"""
|
||||
估算需要的试剂体积来调节pH
|
||||
@@ -158,44 +106,30 @@ def calculate_reagent_volume(target_ph_value: float, reagent: str, vessel_volume
|
||||
Returns:
|
||||
float: 估算的试剂体积 (mL)
|
||||
"""
|
||||
debug_print(f"🧮 计算试剂体积...")
|
||||
debug_print(f" 📍 目标pH: {target_ph_value}")
|
||||
debug_print(f" 🧪 试剂: {reagent}")
|
||||
debug_print(f" 📏 容器体积: {vessel_volume}mL")
|
||||
|
||||
# 简化的pH调节体积估算(实际应用中需要更精确的计算)
|
||||
debug_print(f"计算试剂体积: pH={target_ph_value}, reagent={reagent}, vessel={vessel_volume}mL")
|
||||
|
||||
# 简化的pH调节体积估算
|
||||
if "acid" in reagent.lower() or "hcl" in reagent.lower():
|
||||
debug_print(f"🍋 检测到酸性试剂")
|
||||
# 酸性试剂:pH越低需要的体积越大
|
||||
if target_ph_value < 3:
|
||||
volume = vessel_volume * 0.05 # 5%
|
||||
debug_print(f" 💪 强酸性 (pH<3): 使用 5% 体积")
|
||||
volume = vessel_volume * 0.05
|
||||
elif target_ph_value < 5:
|
||||
volume = vessel_volume * 0.02 # 2%
|
||||
debug_print(f" 🔸 中酸性 (pH<5): 使用 2% 体积")
|
||||
volume = vessel_volume * 0.02
|
||||
else:
|
||||
volume = vessel_volume * 0.01 # 1%
|
||||
debug_print(f" 🔹 弱酸性 (pH≥5): 使用 1% 体积")
|
||||
|
||||
volume = vessel_volume * 0.01
|
||||
|
||||
elif "hydroxide" in reagent.lower() or "naoh" in reagent.lower():
|
||||
debug_print(f"🧂 检测到碱性试剂")
|
||||
# 碱性试剂:pH越高需要的体积越大
|
||||
if target_ph_value > 11:
|
||||
volume = vessel_volume * 0.05 # 5%
|
||||
debug_print(f" 💪 强碱性 (pH>11): 使用 5% 体积")
|
||||
volume = vessel_volume * 0.05
|
||||
elif target_ph_value > 9:
|
||||
volume = vessel_volume * 0.02 # 2%
|
||||
debug_print(f" 🔸 中碱性 (pH>9): 使用 2% 体积")
|
||||
volume = vessel_volume * 0.02
|
||||
else:
|
||||
volume = vessel_volume * 0.01 # 1%
|
||||
debug_print(f" 🔹 弱碱性 (pH≤9): 使用 1% 体积")
|
||||
|
||||
volume = vessel_volume * 0.01
|
||||
|
||||
else:
|
||||
# 未知试剂,使用默认值
|
||||
volume = vessel_volume * 0.01
|
||||
debug_print(f"❓ 未知试剂类型,使用默认 1% 体积")
|
||||
|
||||
debug_print(f"📊 计算结果: {volume:.2f}mL")
|
||||
|
||||
debug_print(f"估算试剂体积: {volume:.2f}mL")
|
||||
return volume
|
||||
|
||||
def generate_adjust_ph_protocol(
|
||||
@@ -220,96 +154,67 @@ def generate_adjust_ph_protocol(
|
||||
"""
|
||||
|
||||
vessel_id, vessel_data = get_vessel(vessel)
|
||||
|
||||
|
||||
if not vessel_id:
|
||||
debug_print(f"❌ vessel 参数无效,必须包含id字段或直接提供容器ID. vessel: {vessel}")
|
||||
raise ValueError("vessel 参数无效,必须包含id字段或直接提供容器ID")
|
||||
|
||||
debug_print("=" * 60)
|
||||
debug_print("🧪 开始生成pH调节协议")
|
||||
debug_print(f"📋 原始参数:")
|
||||
debug_print(f" 🥼 vessel: {vessel} (ID: {vessel_id})")
|
||||
debug_print(f" 📊 ph_value: {ph_value}")
|
||||
debug_print(f" 🧪 reagent: '{reagent}'")
|
||||
debug_print(f" 📦 kwargs: {kwargs}")
|
||||
debug_print("=" * 60)
|
||||
|
||||
|
||||
debug_print(f"pH调节协议: vessel={vessel_id}, ph={ph_value}, reagent='{reagent}'")
|
||||
|
||||
action_sequence = []
|
||||
|
||||
# 从kwargs中获取可选参数,如果没有则使用默认值
|
||||
volume = kwargs.get('volume', 0.0) # 自动估算体积
|
||||
stir = kwargs.get('stir', True) # 默认搅拌
|
||||
stir_speed = kwargs.get('stir_speed', 300.0) # 默认搅拌速度
|
||||
stir_time = kwargs.get('stir_time', 60.0) # 默认搅拌时间
|
||||
settling_time = kwargs.get('settling_time', 30.0) # 默认平衡时间
|
||||
|
||||
debug_print(f"🔧 处理后的参数:")
|
||||
debug_print(f" 📏 volume: {volume}mL (0.0表示自动估算)")
|
||||
debug_print(f" 🌪️ stir: {stir}")
|
||||
debug_print(f" 🔄 stir_speed: {stir_speed}rpm")
|
||||
debug_print(f" ⏱️ stir_time: {stir_time}s")
|
||||
debug_print(f" ⏳ settling_time: {settling_time}s")
|
||||
|
||||
|
||||
# 从kwargs中获取可选参数
|
||||
volume = kwargs.get('volume', 0.0)
|
||||
stir = kwargs.get('stir', True)
|
||||
stir_speed = kwargs.get('stir_speed', 300.0)
|
||||
stir_time = kwargs.get('stir_time', 60.0)
|
||||
settling_time = kwargs.get('settling_time', 30.0)
|
||||
|
||||
# 开始处理
|
||||
action_sequence.append(create_action_log(f"开始调节pH至 {ph_value}", "🧪"))
|
||||
action_sequence.append(create_action_log(f"目标容器: {vessel_id}", "🥼"))
|
||||
action_sequence.append(create_action_log(f"使用试剂: {reagent}", "⚗️"))
|
||||
|
||||
|
||||
# 1. 验证目标容器存在
|
||||
debug_print(f"🔍 步骤1: 验证目标容器...")
|
||||
if vessel_id not in G.nodes():
|
||||
debug_print(f"❌ 目标容器 '{vessel_id}' 不存在于系统中")
|
||||
raise ValueError(f"目标容器 '{vessel_id}' 不存在于系统中")
|
||||
|
||||
debug_print(f"✅ 目标容器验证通过")
|
||||
|
||||
action_sequence.append(create_action_log("目标容器验证通过", "✅"))
|
||||
|
||||
|
||||
# 2. 查找酸碱试剂容器
|
||||
debug_print(f"🔍 步骤2: 查找试剂容器...")
|
||||
action_sequence.append(create_action_log("正在查找试剂容器...", "🔍"))
|
||||
|
||||
try:
|
||||
reagent_vessel = find_acid_base_vessel(G, reagent)
|
||||
debug_print(f"✅ 找到试剂容器: {reagent_vessel}")
|
||||
action_sequence.append(create_action_log(f"找到试剂容器: {reagent_vessel}", "🧪"))
|
||||
except ValueError as e:
|
||||
debug_print(f"❌ 无法找到试剂容器: {str(e)}")
|
||||
action_sequence.append(create_action_log(f"试剂容器查找失败: {str(e)}", "❌"))
|
||||
raise ValueError(f"无法找到试剂 '{reagent}': {str(e)}")
|
||||
|
||||
|
||||
# 3. 体积估算
|
||||
debug_print(f"🔍 步骤3: 体积处理...")
|
||||
if volume <= 0:
|
||||
action_sequence.append(create_action_log("开始自动估算试剂体积", "🧮"))
|
||||
|
||||
# 获取目标容器的体积信息
|
||||
vessel_data = G.nodes[vessel_id].get('data', {})
|
||||
vessel_volume = vessel_data.get('max_volume', 100.0) # 默认100mL
|
||||
debug_print(f"📏 容器最大体积: {vessel_volume}mL")
|
||||
|
||||
vessel_volume = vessel_data.get('max_volume', 100.0)
|
||||
|
||||
estimated_volume = calculate_reagent_volume(ph_value, reagent, vessel_volume)
|
||||
volume = estimated_volume
|
||||
debug_print(f"✅ 自动估算试剂体积: {volume:.2f} mL")
|
||||
action_sequence.append(create_action_log(f"估算试剂体积: {volume:.2f}mL", "📊"))
|
||||
else:
|
||||
debug_print(f"📏 使用指定体积: {volume}mL")
|
||||
action_sequence.append(create_action_log(f"使用指定体积: {volume}mL", "📏"))
|
||||
|
||||
|
||||
# 4. 验证路径存在
|
||||
debug_print(f"🔍 步骤4: 路径验证...")
|
||||
action_sequence.append(create_action_log("验证转移路径...", "🛤️"))
|
||||
|
||||
try:
|
||||
path = nx.shortest_path(G, source=reagent_vessel, target=vessel_id)
|
||||
debug_print(f"✅ 找到路径: {' → '.join(path)}")
|
||||
action_sequence.append(create_action_log(f"找到转移路径: {' → '.join(path)}", "🛤️"))
|
||||
action_sequence.append(create_action_log(f"找到转移路径: {' -> '.join(path)}", "🛤️"))
|
||||
except nx.NetworkXNoPath:
|
||||
debug_print(f"❌ 无法找到转移路径")
|
||||
action_sequence.append(create_action_log("转移路径不存在", "❌"))
|
||||
raise ValueError(f"从试剂容器 '{reagent_vessel}' 到目标容器 '{vessel_id}' 没有可用路径")
|
||||
|
||||
|
||||
# 5. 搅拌器设置
|
||||
debug_print(f"🔍 步骤5: 搅拌器设置...")
|
||||
stirrer_id = None
|
||||
if stir:
|
||||
action_sequence.append(create_action_log("准备启动搅拌器", "🌪️"))
|
||||
@@ -318,7 +223,6 @@ def generate_adjust_ph_protocol(
|
||||
stirrer_id = find_connected_stirrer(G, vessel_id)
|
||||
|
||||
if stirrer_id:
|
||||
debug_print(f"✅ 找到搅拌器 {stirrer_id},启动搅拌")
|
||||
action_sequence.append(create_action_log(f"启动搅拌器 {stirrer_id} (速度: {stir_speed}rpm)", "🔄"))
|
||||
|
||||
action_sequence.append({
|
||||
@@ -338,23 +242,18 @@ def generate_adjust_ph_protocol(
|
||||
"action_kwargs": {"time": 5}
|
||||
})
|
||||
else:
|
||||
debug_print(f"⚠️ 未找到搅拌器,继续执行")
|
||||
action_sequence.append(create_action_log("未找到搅拌器,跳过搅拌", "⚠️"))
|
||||
|
||||
|
||||
except Exception as e:
|
||||
debug_print(f"❌ 搅拌器配置出错: {str(e)}")
|
||||
action_sequence.append(create_action_log(f"搅拌器配置失败: {str(e)}", "❌"))
|
||||
else:
|
||||
debug_print(f"📋 跳过搅拌设置")
|
||||
action_sequence.append(create_action_log("跳过搅拌设置", "⏭️"))
|
||||
|
||||
|
||||
# 6. 试剂添加
|
||||
debug_print(f"🔍 步骤6: 试剂添加...")
|
||||
action_sequence.append(create_action_log(f"开始添加试剂 {volume:.2f}mL", "🚰"))
|
||||
|
||||
# 计算添加时间(pH调节需要缓慢添加)
|
||||
addition_time = max(30.0, volume * 2.0) # 至少30秒,每mL需要2秒
|
||||
debug_print(f"⏱️ 计算添加时间: {addition_time}s (缓慢注入)")
|
||||
addition_time = max(30.0, volume * 2.0)
|
||||
action_sequence.append(create_action_log(f"设置添加时间: {addition_time:.0f}s (缓慢注入)", "⏱️"))
|
||||
|
||||
try:
|
||||
@@ -377,35 +276,28 @@ def generate_adjust_ph_protocol(
|
||||
)
|
||||
|
||||
action_sequence.extend(pump_actions)
|
||||
debug_print(f"✅ 泵协议生成完成,添加了 {len(pump_actions)} 个动作")
|
||||
action_sequence.append(create_action_log(f"试剂转移完成 ({len(pump_actions)} 个操作)", "✅"))
|
||||
|
||||
# 🔧 修复体积运算 - 试剂添加成功后更新容器液体体积
|
||||
debug_print(f"🔧 更新容器液体体积...")
|
||||
|
||||
# 体积运算 - 试剂添加成功后更新容器液体体积
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
debug_print(f"📊 添加前容器体积: {current_volume}")
|
||||
|
||||
|
||||
# 处理不同的体积数据格式
|
||||
if isinstance(current_volume, list):
|
||||
if len(current_volume) > 0:
|
||||
# 增加体积(添加试剂)
|
||||
vessel["data"]["liquid_volume"][0] += volume
|
||||
debug_print(f"📊 添加后容器体积: {vessel['data']['liquid_volume'][0]:.2f}mL (+{volume:.2f}mL)")
|
||||
else:
|
||||
# 如果列表为空,创建新的体积记录
|
||||
vessel["data"]["liquid_volume"] = [volume]
|
||||
debug_print(f"📊 初始化容器体积: {volume:.2f}mL")
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
# 直接数值类型
|
||||
vessel["data"]["liquid_volume"] += volume
|
||||
debug_print(f"📊 添加后容器体积: {vessel['data']['liquid_volume']:.2f}mL (+{volume:.2f}mL)")
|
||||
else:
|
||||
debug_print(f"⚠️ 未知的体积数据格式: {type(current_volume)}")
|
||||
debug_print(f"未知的体积数据格式: {type(current_volume)}")
|
||||
# 创建新的体积记录
|
||||
vessel["data"]["liquid_volume"] = volume
|
||||
else:
|
||||
debug_print(f"📊 容器无液体体积数据,创建新记录: {volume:.2f}mL")
|
||||
# 确保vessel有data字段
|
||||
if "data" not in vessel:
|
||||
vessel["data"] = {}
|
||||
@@ -423,19 +315,16 @@ def generate_adjust_ph_protocol(
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = [volume]
|
||||
else:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = current_node_volume + volume
|
||||
|
||||
debug_print(f"✅ 图节点体积数据已更新")
|
||||
|
||||
|
||||
action_sequence.append(create_action_log(f"容器体积已更新 (+{volume:.2f}mL)", "📊"))
|
||||
|
||||
except Exception as e:
|
||||
debug_print(f"❌ 生成泵协议时出错: {str(e)}")
|
||||
debug_print(f"生成泵协议时出错: {str(e)}")
|
||||
action_sequence.append(create_action_log(f"泵协议生成失败: {str(e)}", "❌"))
|
||||
raise ValueError(f"生成泵协议时出错: {str(e)}")
|
||||
|
||||
# 7. 混合搅拌
|
||||
if stir and stirrer_id:
|
||||
debug_print(f"🔍 步骤7: 混合搅拌...")
|
||||
action_sequence.append(create_action_log(f"开始混合搅拌 {stir_time:.0f}s", "🌀"))
|
||||
|
||||
action_sequence.append({
|
||||
@@ -448,14 +337,10 @@ def generate_adjust_ph_protocol(
|
||||
"purpose": f"pH调节: 混合试剂,目标pH={ph_value}"
|
||||
}
|
||||
})
|
||||
|
||||
debug_print(f"✅ 混合搅拌设置完成")
|
||||
else:
|
||||
debug_print(f"⏭️ 跳过混合搅拌")
|
||||
action_sequence.append(create_action_log("跳过混合搅拌", "⏭️"))
|
||||
|
||||
|
||||
# 8. 等待平衡
|
||||
debug_print(f"🔍 步骤8: 反应平衡...")
|
||||
action_sequence.append(create_action_log(f"等待pH平衡 {settling_time:.0f}s", "⚖️"))
|
||||
|
||||
action_sequence.append({
|
||||
@@ -468,17 +353,7 @@ def generate_adjust_ph_protocol(
|
||||
|
||||
# 9. 完成总结
|
||||
total_time = addition_time + stir_time + settling_time
|
||||
|
||||
debug_print("=" * 60)
|
||||
debug_print(f"🎉 pH调节协议生成完成")
|
||||
debug_print(f"📊 协议统计:")
|
||||
debug_print(f" 📋 总动作数: {len(action_sequence)}")
|
||||
debug_print(f" ⏱️ 预计总时间: {total_time:.0f}s ({total_time/60:.1f}分钟)")
|
||||
debug_print(f" 🧪 试剂: {reagent}")
|
||||
debug_print(f" 📏 体积: {volume:.2f}mL")
|
||||
debug_print(f" 📊 目标pH: {ph_value}")
|
||||
debug_print(f" 🥼 目标容器: {vessel_id}")
|
||||
debug_print("=" * 60)
|
||||
debug_print(f"pH调节协议完成: {len(action_sequence)} 个动作, {total_time:.0f}s, {volume:.2f}mL {reagent} → {vessel_id} pH {ph_value}")
|
||||
|
||||
# 添加完成日志
|
||||
summary_msg = f"pH调节协议完成: {vessel_id} → pH {ph_value} (使用 {volume:.2f}mL {reagent})"
|
||||
@@ -510,28 +385,18 @@ def generate_adjust_ph_protocol_stepwise(
|
||||
"""
|
||||
# 🔧 核心修改:从字典中提取容器ID
|
||||
vessel_id = vessel["id"]
|
||||
|
||||
debug_print("=" * 60)
|
||||
debug_print(f"🔄 开始分步pH调节")
|
||||
debug_print(f"📋 分步参数:")
|
||||
debug_print(f" 🥼 vessel: {vessel} (ID: {vessel_id})")
|
||||
debug_print(f" 📊 ph_value: {ph_value}")
|
||||
debug_print(f" 🧪 reagent: {reagent}")
|
||||
debug_print(f" 📏 max_volume: {max_volume}mL")
|
||||
debug_print(f" 🔢 steps: {steps}")
|
||||
debug_print("=" * 60)
|
||||
|
||||
debug_print(f"分步pH调节: vessel={vessel_id}, ph={ph_value}, reagent={reagent}, max_volume={max_volume}mL, steps={steps}")
|
||||
|
||||
action_sequence = []
|
||||
|
||||
# 每步添加的体积
|
||||
step_volume = max_volume / steps
|
||||
debug_print(f"📊 每步体积: {step_volume:.2f}mL")
|
||||
|
||||
action_sequence.append(create_action_log(f"开始分步pH调节 ({steps}步)", "🔄"))
|
||||
action_sequence.append(create_action_log(f"每步添加: {step_volume:.2f}mL", "📏"))
|
||||
|
||||
for i in range(steps):
|
||||
debug_print(f"🔄 执行第 {i+1}/{steps} 步,添加 {step_volume:.2f}mL")
|
||||
action_sequence.append(create_action_log(f"第 {i+1}/{steps} 步开始", "🚀"))
|
||||
|
||||
# 生成单步协议
|
||||
@@ -548,12 +413,10 @@ def generate_adjust_ph_protocol_stepwise(
|
||||
)
|
||||
|
||||
action_sequence.extend(step_actions)
|
||||
debug_print(f"✅ 第 {i+1}/{steps} 步完成,添加了 {len(step_actions)} 个动作")
|
||||
action_sequence.append(create_action_log(f"第 {i+1}/{steps} 步完成", "✅"))
|
||||
|
||||
# 步骤间等待
|
||||
if i < steps - 1:
|
||||
debug_print(f"⏳ 步骤间等待30s")
|
||||
action_sequence.append(create_action_log("步骤间等待...", "⏳"))
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
@@ -563,7 +426,7 @@ def generate_adjust_ph_protocol_stepwise(
|
||||
}
|
||||
})
|
||||
|
||||
debug_print(f"🎉 分步pH调节完成,共 {len(action_sequence)} 个动作")
|
||||
debug_print(f"分步pH调节完成: {len(action_sequence)} 个动作")
|
||||
action_sequence.append(create_action_log("分步pH调节全部完成", "🎉"))
|
||||
|
||||
return action_sequence
|
||||
@@ -577,7 +440,7 @@ def generate_acidify_protocol(
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""酸化协议"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"🍋 生成酸化协议: {vessel_id} → pH {target_ph} (使用 {acid})")
|
||||
debug_print(f"酸化协议: {vessel_id} → pH {target_ph} ({acid})")
|
||||
return generate_adjust_ph_protocol(
|
||||
G, vessel, target_ph, acid
|
||||
)
|
||||
@@ -590,7 +453,7 @@ def generate_basify_protocol(
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""碱化协议"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"🧂 生成碱化协议: {vessel_id} → pH {target_ph} (使用 {base})")
|
||||
debug_print(f"碱化协议: {vessel_id} → pH {target_ph} ({base})")
|
||||
return generate_adjust_ph_protocol(
|
||||
G, vessel, target_ph, base
|
||||
)
|
||||
@@ -602,7 +465,7 @@ def generate_neutralize_protocol(
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""中和协议(pH=7)"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"⚖️ 生成中和协议: {vessel_id} → pH 7.0 (使用 {reagent})")
|
||||
debug_print(f"中和协议: {vessel_id} → pH 7.0 ({reagent})")
|
||||
return generate_adjust_ph_protocol(
|
||||
G, vessel, 7.0, reagent
|
||||
)
|
||||
@@ -610,10 +473,7 @@ def generate_neutralize_protocol(
|
||||
# 测试函数
|
||||
def test_adjust_ph_protocol():
|
||||
"""测试pH调节协议"""
|
||||
debug_print("=== ADJUST PH PROTOCOL 增强版测试 ===")
|
||||
|
||||
# 测试体积计算
|
||||
debug_print("🧮 测试体积计算...")
|
||||
test_cases = [
|
||||
(2.0, "hydrochloric acid", 100.0),
|
||||
(4.0, "hydrochloric acid", 100.0),
|
||||
@@ -621,12 +481,12 @@ def test_adjust_ph_protocol():
|
||||
(10.0, "sodium hydroxide", 100.0),
|
||||
(7.0, "unknown reagent", 100.0)
|
||||
]
|
||||
|
||||
|
||||
for ph, reagent, volume in test_cases:
|
||||
result = calculate_reagent_volume(ph, reagent, volume)
|
||||
debug_print(f"📊 {reagent} → pH {ph}: {result:.2f}mL")
|
||||
|
||||
debug_print("✅ 测试完成")
|
||||
debug_print(f"{reagent} → pH {ph}: {result:.2f}mL")
|
||||
|
||||
debug_print("测试完成")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_adjust_ph_protocol()
|
||||
@@ -1,4 +1,12 @@
|
||||
"""
|
||||
AGV 单物料转运编译器
|
||||
|
||||
从 physical_setup_graph 中查询 AGV 配置(device_roles, route_table),
|
||||
不再硬编码 device_id 和路由表。
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from unilabos.compile._agv_utils import find_agv_config
|
||||
|
||||
|
||||
def generate_agv_transfer_protocol(
|
||||
@@ -17,37 +25,32 @@ def generate_agv_transfer_protocol(
|
||||
from_repo_id = from_repo_["id"]
|
||||
to_repo_id = to_repo_["id"]
|
||||
|
||||
wf_list = {
|
||||
("AiChemEcoHiWo", "zhixing_agv"): {"nav_command" : '{"target" : "LM14"}',
|
||||
"arm_command": '{"task_name" : "camera/250111_biaozhi.urp"}'},
|
||||
("AiChemEcoHiWo", "AGV"): {"nav_command" : '{"target" : "LM14"}',
|
||||
"arm_command": '{"task_name" : "camera/250111_biaozhi.urp"}'},
|
||||
# 从 G 中查询 AGV 配置
|
||||
agv_cfg = find_agv_config(G)
|
||||
device_roles = agv_cfg["device_roles"]
|
||||
route_table = agv_cfg["route_table"]
|
||||
|
||||
("zhixing_agv", "Revvity"): {"nav_command" : '{"target" : "LM13"}',
|
||||
"arm_command": '{"task_name" : "camera/250111_put_board.urp"}'},
|
||||
route_key = f"{from_repo_id}->{to_repo_id}"
|
||||
if route_key not in route_table:
|
||||
raise KeyError(f"AGV 路由表中未找到路线: {route_key},可用路线: {list(route_table.keys())}")
|
||||
|
||||
("AGV", "Revvity"): {"nav_command" : '{"target" : "LM13"}',
|
||||
"arm_command": '{"task_name" : "camera/250111_put_board.urp"}'},
|
||||
route = route_table[route_key]
|
||||
nav_device = device_roles.get("navigator", device_roles.get("nav"))
|
||||
arm_device = device_roles.get("arm")
|
||||
|
||||
("Revvity", "HPLC"): {"nav_command": '{"target" : "LM13"}',
|
||||
"arm_command": '{"task_name" : "camera/250111_hplc.urp"}'},
|
||||
|
||||
("HPLC", "Revvity"): {"nav_command": '{"target" : "LM13"}',
|
||||
"arm_command": '{"task_name" : "camera/250111_lfp.urp"}'},
|
||||
}
|
||||
return [
|
||||
{
|
||||
"device_id": "zhixing_agv",
|
||||
"device_id": nav_device,
|
||||
"action_name": "send_nav_task",
|
||||
"action_kwargs": {
|
||||
"command": wf_list[(from_repo_id, to_repo_id)]["nav_command"]
|
||||
"command": route["nav_command"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"device_id": "zhixing_ur_arm",
|
||||
"device_id": arm_device,
|
||||
"action_name": "move_pos_task",
|
||||
"action_kwargs": {
|
||||
"command": wf_list[(from_repo_id, to_repo_id)]["arm_command"]
|
||||
"command": route.get("arm_command", route.get("arm_place", ""))
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
228
unilabos/compile/batch_transfer_protocol.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""
|
||||
批量物料转运编译器
|
||||
|
||||
将 BatchTransferProtocol 编译为多批次的 nav → pick × N → nav → place × N 动作序列。
|
||||
自动按 AGV 容量分批,全程维护三方 children dict 的物料系统一致性。
|
||||
"""
|
||||
|
||||
import copy
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import networkx as nx
|
||||
|
||||
from unilabos.compile._agv_utils import find_agv_config, split_batches
|
||||
|
||||
|
||||
def generate_batch_transfer_protocol(
|
||||
G: nx.Graph,
|
||||
from_repo: dict,
|
||||
to_repo: dict,
|
||||
transfer_resources: list,
|
||||
from_positions: list,
|
||||
to_positions: list,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""编译批量转运协议为可执行的 action steps
|
||||
|
||||
Args:
|
||||
G: 设备图 (physical_setup_graph)
|
||||
from_repo: 来源工站资源 dict({station_id: {..., children: {...}}})
|
||||
to_repo: 目标工站资源 dict(含堆栈和位置信息)
|
||||
transfer_resources: 被转运的物料列表(Resource dict)
|
||||
from_positions: 来源 slot 位置列表(与 transfer_resources 平行)
|
||||
to_positions: 目标 slot 位置列表(与 transfer_resources 平行)
|
||||
|
||||
Returns:
|
||||
action steps 列表,ROS2WorkstationNode 按序执行
|
||||
"""
|
||||
if not transfer_resources:
|
||||
return []
|
||||
|
||||
n = len(transfer_resources)
|
||||
if len(from_positions) != n or len(to_positions) != n:
|
||||
raise ValueError(
|
||||
f"transfer_resources({n}), from_positions({len(from_positions)}), "
|
||||
f"to_positions({len(to_positions)}) 长度不一致"
|
||||
)
|
||||
|
||||
# 组合为内部 transfer_items 便于分批处理
|
||||
transfer_items = []
|
||||
for i in range(n):
|
||||
res = transfer_resources[i] if isinstance(transfer_resources[i], dict) else {}
|
||||
transfer_items.append({
|
||||
"resource_id": res.get("id", res.get("name", "")),
|
||||
"resource_uuid": res.get("sample_id", ""),
|
||||
"from_position": from_positions[i],
|
||||
"to_position": to_positions[i],
|
||||
"resource": res,
|
||||
})
|
||||
|
||||
# 查询 AGV 配置
|
||||
agv_cfg = find_agv_config(G)
|
||||
agv_id = agv_cfg["agv_id"]
|
||||
device_roles = agv_cfg["device_roles"]
|
||||
route_table = agv_cfg["route_table"]
|
||||
capacity = agv_cfg["capacity"]
|
||||
|
||||
if capacity <= 0:
|
||||
raise ValueError(f"AGV {agv_id} 容量为 0,请检查 Warehouse 子节点配置")
|
||||
|
||||
nav_device = device_roles.get("navigator", device_roles.get("nav"))
|
||||
arm_device = device_roles.get("arm")
|
||||
if not nav_device or not arm_device:
|
||||
raise ValueError(f"AGV {agv_id} device_roles 缺少 navigator 或 arm: {device_roles}")
|
||||
|
||||
from_repo_ = list(from_repo.values())[0]
|
||||
to_repo_ = list(to_repo.values())[0]
|
||||
from_station_id = from_repo_["id"]
|
||||
to_station_id = to_repo_["id"]
|
||||
|
||||
# 查找路由
|
||||
route_to_source = _find_route(route_table, agv_id, from_station_id)
|
||||
route_to_target = _find_route(route_table, from_station_id, to_station_id)
|
||||
|
||||
# 构建 AGV carrier 的 children dict(用于 compile 阶段状态追踪)
|
||||
agv_carrier_children: Dict[str, Any] = {}
|
||||
|
||||
# 计算 slot 名称(A01, A02, B01, ...)
|
||||
agv_slot_names = _get_agv_slot_names(G, agv_cfg)
|
||||
|
||||
# 分批
|
||||
batches = split_batches(transfer_items, capacity)
|
||||
|
||||
steps: List[Dict[str, Any]] = []
|
||||
|
||||
for batch_idx, batch in enumerate(batches):
|
||||
is_last_batch = (batch_idx == len(batches) - 1)
|
||||
|
||||
# 阶段 1: AGV 导航到来源工站
|
||||
steps.append({
|
||||
"device_id": nav_device,
|
||||
"action_name": "send_nav_task",
|
||||
"action_kwargs": {
|
||||
"command": route_to_source.get("nav_command", "")
|
||||
},
|
||||
"_comment": f"批次{batch_idx + 1}/{len(batches)}: AGV 导航至来源 {from_station_id}"
|
||||
})
|
||||
|
||||
# 阶段 2: 逐个 pick
|
||||
for item_idx, item in enumerate(batch):
|
||||
from_pos = item["from_position"]
|
||||
slot = agv_slot_names[item_idx] if item_idx < len(agv_slot_names) else f"S{item_idx + 1}"
|
||||
|
||||
# compile 阶段更新 children dict
|
||||
if from_pos in from_repo_.get("children", {}):
|
||||
resource_data = from_repo_["children"].pop(from_pos)
|
||||
resource_data["parent"] = agv_id
|
||||
agv_carrier_children[slot] = resource_data
|
||||
|
||||
steps.append({
|
||||
"device_id": arm_device,
|
||||
"action_name": "move_pos_task",
|
||||
"action_kwargs": {
|
||||
"command": route_to_source.get("arm_pick", route_to_source.get("arm_command", ""))
|
||||
},
|
||||
"_transfer_meta": {
|
||||
"phase": "pick",
|
||||
"resource_uuid": item.get("resource_uuid", ""),
|
||||
"resource_id": item.get("resource_id", ""),
|
||||
"from_parent": from_station_id,
|
||||
"from_position": from_pos,
|
||||
"agv_slot": slot,
|
||||
},
|
||||
"_comment": f"Pick {item.get('resource_id', from_pos)} → AGV.{slot}"
|
||||
})
|
||||
|
||||
# 阶段 3: AGV 导航到目标工站
|
||||
steps.append({
|
||||
"device_id": nav_device,
|
||||
"action_name": "send_nav_task",
|
||||
"action_kwargs": {
|
||||
"command": route_to_target.get("nav_command", "")
|
||||
},
|
||||
"_comment": f"批次{batch_idx + 1}: AGV 导航至目标 {to_station_id}"
|
||||
})
|
||||
|
||||
# 阶段 4: 逐个 place
|
||||
for item_idx, item in enumerate(batch):
|
||||
to_pos = item["to_position"]
|
||||
slot = agv_slot_names[item_idx] if item_idx < len(agv_slot_names) else f"S{item_idx + 1}"
|
||||
|
||||
# compile 阶段更新 children dict
|
||||
if slot in agv_carrier_children:
|
||||
resource_data = agv_carrier_children.pop(slot)
|
||||
resource_data["parent"] = to_repo_["id"]
|
||||
to_repo_["children"][to_pos] = resource_data
|
||||
|
||||
steps.append({
|
||||
"device_id": arm_device,
|
||||
"action_name": "move_pos_task",
|
||||
"action_kwargs": {
|
||||
"command": route_to_target.get("arm_place", route_to_target.get("arm_command", ""))
|
||||
},
|
||||
"_transfer_meta": {
|
||||
"phase": "place",
|
||||
"resource_uuid": item.get("resource_uuid", ""),
|
||||
"resource_id": item.get("resource_id", ""),
|
||||
"to_parent": to_station_id,
|
||||
"to_position": to_pos,
|
||||
"agv_slot": slot,
|
||||
},
|
||||
"_comment": f"Place AGV.{slot} → {to_station_id}.{to_pos}"
|
||||
})
|
||||
|
||||
# 如果还有下一批,AGV 需要返回来源取料
|
||||
if not is_last_batch:
|
||||
steps.append({
|
||||
"device_id": nav_device,
|
||||
"action_name": "send_nav_task",
|
||||
"action_kwargs": {
|
||||
"command": route_to_source.get("nav_command", "")
|
||||
},
|
||||
"_comment": f"AGV 返回来源 {from_station_id} 取下一批"
|
||||
})
|
||||
|
||||
return steps
|
||||
|
||||
|
||||
def _find_route(route_table: Dict[str, Any], from_id: str, to_id: str) -> Dict[str, str]:
|
||||
"""在路由表中查找路线,支持 A->B 和 (A, B) 两种 key 格式"""
|
||||
# 优先 "A->B" 格式
|
||||
key = f"{from_id}->{to_id}"
|
||||
if key in route_table:
|
||||
return route_table[key]
|
||||
# 兼容 tuple key(JSON 中以逗号分隔字符串表示)
|
||||
tuple_key = f"({from_id}, {to_id})"
|
||||
if tuple_key in route_table:
|
||||
return route_table[tuple_key]
|
||||
raise KeyError(f"路由表中未找到: {key},可用路线: {list(route_table.keys())}")
|
||||
|
||||
|
||||
def _get_agv_slot_names(G: nx.Graph, agv_cfg: dict) -> List[str]:
|
||||
"""从设备图中获取 AGV Warehouse 的 slot 名称列表"""
|
||||
agv_id = agv_cfg["agv_id"]
|
||||
neighbors = G.successors(agv_id) if G.is_directed() else G.neighbors(agv_id)
|
||||
for neighbor in neighbors:
|
||||
ndata = G.nodes[neighbor]
|
||||
node_type = ndata.get("type", "")
|
||||
res_content = ndata.get("res_content")
|
||||
if hasattr(res_content, "type"):
|
||||
node_type = res_content.type or node_type
|
||||
elif isinstance(res_content, dict):
|
||||
node_type = res_content.get("type", node_type)
|
||||
if node_type == "warehouse":
|
||||
config = ndata.get("config", {})
|
||||
if hasattr(res_content, "config") and isinstance(res_content.config, dict):
|
||||
config = res_content.config
|
||||
elif isinstance(res_content, dict):
|
||||
config = res_content.get("config", config)
|
||||
num_x = config.get("num_items_x", 1)
|
||||
num_y = config.get("num_items_y", 1)
|
||||
num_z = config.get("num_items_z", 1)
|
||||
# 与 warehouse_factory 一致的命名
|
||||
letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
len_x = num_x if num_z == 1 else (num_y if num_x == 1 else num_x)
|
||||
len_y = num_y if num_z == 1 else (num_z if num_x == 1 else num_z)
|
||||
return [f"{letters[j]}{i + 1:02d}" for i in range(len_x) for j in range(len_y)]
|
||||
# 兜底生成通用名称
|
||||
capacity = agv_cfg.get("capacity", 4)
|
||||
return [f"S{i + 1}" for i in range(capacity)]
|
||||
@@ -1,7 +1,9 @@
|
||||
from typing import List, Dict, Any
|
||||
import networkx as nx
|
||||
from .utils.vessel_parser import get_vessel, find_solvent_vessel
|
||||
from .utils.vessel_parser import get_vessel, find_solvent_vessel, find_connected_heatchill
|
||||
from .utils.logger_util import debug_print
|
||||
from .pump_protocol import generate_pump_protocol
|
||||
from .utils.resource_helper import get_resource_liquid_volume
|
||||
|
||||
|
||||
def find_solvent_vessel_by_any_match(G: nx.DiGraph, solvent: str) -> str:
|
||||
@@ -17,43 +19,23 @@ def find_waste_vessel(G: nx.DiGraph) -> str:
|
||||
"""
|
||||
possible_waste_names = [
|
||||
"waste_workup",
|
||||
"flask_waste",
|
||||
"flask_waste",
|
||||
"bottle_waste",
|
||||
"waste",
|
||||
"waste_vessel",
|
||||
"waste_container"
|
||||
]
|
||||
|
||||
|
||||
for waste_name in possible_waste_names:
|
||||
if waste_name in G.nodes():
|
||||
return waste_name
|
||||
|
||||
|
||||
raise ValueError(f"未找到废液容器。尝试了以下名称: {possible_waste_names}")
|
||||
|
||||
|
||||
def find_connected_heatchill(G: nx.DiGraph, vessel: str) -> str:
|
||||
"""
|
||||
查找与指定容器相连的加热冷却设备
|
||||
"""
|
||||
# 查找所有加热冷却设备节点
|
||||
heatchill_nodes = [node for node in G.nodes()
|
||||
if (G.nodes[node].get('class') or '') == 'virtual_heatchill']
|
||||
|
||||
# 检查哪个加热设备与目标容器相连(机械连接)
|
||||
for heatchill in heatchill_nodes:
|
||||
if G.has_edge(heatchill, vessel) or G.has_edge(vessel, heatchill):
|
||||
return heatchill
|
||||
|
||||
# 如果没有直接连接,返回第一个可用的加热设备
|
||||
if heatchill_nodes:
|
||||
return heatchill_nodes[0]
|
||||
|
||||
return None # 没有加热设备也可以工作,只是不能加热
|
||||
|
||||
|
||||
def generate_clean_vessel_protocol(
|
||||
G: nx.DiGraph,
|
||||
vessel: dict, # 🔧 修改:从字符串改为字典类型
|
||||
vessel: dict,
|
||||
solvent: str,
|
||||
volume: float,
|
||||
temp: float,
|
||||
@@ -61,7 +43,7 @@ def generate_clean_vessel_protocol(
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
生成容器清洗操作的协议序列,复用 pump_protocol 的成熟算法
|
||||
|
||||
|
||||
清洗流程:
|
||||
1. 查找溶剂容器和废液容器
|
||||
2. 如果需要加热,启动加热设备
|
||||
@@ -70,63 +52,50 @@ def generate_clean_vessel_protocol(
|
||||
b. (可选) 等待清洗作用时间
|
||||
c. 使用 pump_protocol 将清洗液从目标容器转移到废液容器
|
||||
4. 如果加热了,停止加热
|
||||
|
||||
|
||||
Args:
|
||||
G: 有向图,节点为设备和容器,边为流体管道
|
||||
vessel: 要清洗的容器字典(包含id字段)
|
||||
solvent: 用于清洗的溶剂名称
|
||||
solvent: 用于清洗的溶剂名称
|
||||
volume: 每次清洗使用的溶剂体积
|
||||
temp: 清洗时的温度
|
||||
repeats: 清洗操作的重复次数,默认为 1
|
||||
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: 容器清洗操作的动作序列
|
||||
|
||||
Raises:
|
||||
ValueError: 当找不到必要的容器或设备时抛出异常
|
||||
|
||||
Examples:
|
||||
clean_protocol = generate_clean_vessel_protocol(G, {"id": "main_reactor"}, "water", 100.0, 60.0, 2)
|
||||
"""
|
||||
# 🔧 核心修改:从字典中提取容器ID
|
||||
vessel_id, vessel_data = get_vessel(vessel)
|
||||
|
||||
|
||||
action_sequence = []
|
||||
|
||||
print(f"CLEAN_VESSEL: 开始生成容器清洗协议")
|
||||
print(f" - 目标容器: {vessel} (ID: {vessel_id})")
|
||||
print(f" - 清洗溶剂: {solvent}")
|
||||
print(f" - 清洗体积: {volume} mL")
|
||||
print(f" - 清洗温度: {temp}°C")
|
||||
print(f" - 重复次数: {repeats}")
|
||||
|
||||
|
||||
debug_print(f"开始生成容器清洗协议: vessel={vessel_id}, solvent={solvent}, volume={volume}mL, temp={temp}°C, repeats={repeats}")
|
||||
|
||||
# 验证目标容器存在
|
||||
if vessel_id not in G.nodes():
|
||||
raise ValueError(f"目标容器 '{vessel_id}' 不存在于系统中")
|
||||
|
||||
|
||||
# 查找溶剂容器
|
||||
try:
|
||||
solvent_vessel = find_solvent_vessel(G, solvent)
|
||||
print(f"CLEAN_VESSEL: 找到溶剂容器: {solvent_vessel}")
|
||||
debug_print(f"找到溶剂容器: {solvent_vessel}")
|
||||
except ValueError as e:
|
||||
raise ValueError(f"无法找到溶剂容器: {str(e)}")
|
||||
|
||||
|
||||
# 查找废液容器
|
||||
try:
|
||||
waste_vessel = find_waste_vessel(G)
|
||||
print(f"CLEAN_VESSEL: 找到废液容器: {waste_vessel}")
|
||||
debug_print(f"找到废液容器: {waste_vessel}")
|
||||
except ValueError as e:
|
||||
raise ValueError(f"无法找到废液容器: {str(e)}")
|
||||
|
||||
|
||||
# 查找加热设备(可选)
|
||||
heatchill_id = find_connected_heatchill(G, vessel_id) # 🔧 使用 vessel_id
|
||||
heatchill_id = find_connected_heatchill(G, vessel_id)
|
||||
if heatchill_id:
|
||||
print(f"CLEAN_VESSEL: 找到加热设备: {heatchill_id}")
|
||||
debug_print(f"找到加热设备: {heatchill_id}")
|
||||
else:
|
||||
print(f"CLEAN_VESSEL: 未找到加热设备,将在室温下清洗")
|
||||
|
||||
# 🔧 新增:记录清洗前的容器状态
|
||||
print(f"CLEAN_VESSEL: 记录清洗前容器状态...")
|
||||
debug_print(f"未找到加热设备,将在室温下清洗")
|
||||
|
||||
# 记录清洗前的容器状态
|
||||
original_liquid_volume = 0.0
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
@@ -134,79 +103,69 @@ def generate_clean_vessel_protocol(
|
||||
original_liquid_volume = current_volume[0]
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
original_liquid_volume = current_volume
|
||||
print(f"CLEAN_VESSEL: 清洗前液体体积: {original_liquid_volume:.2f}mL")
|
||||
|
||||
|
||||
# 第一步:如果需要加热且有加热设备,启动加热
|
||||
if temp > 25.0 and heatchill_id:
|
||||
print(f"CLEAN_VESSEL: 启动加热至 {temp}°C")
|
||||
debug_print(f"启动加热至 {temp}°C")
|
||||
heatchill_start_action = {
|
||||
"device_id": heatchill_id,
|
||||
"action_name": "heat_chill_start",
|
||||
"action_kwargs": {
|
||||
"vessel": {"id": vessel_id}, # 🔧 使用 vessel_id
|
||||
"vessel": {"id": vessel_id},
|
||||
"temp": temp,
|
||||
"purpose": f"cleaning with {solvent}"
|
||||
}
|
||||
}
|
||||
action_sequence.append(heatchill_start_action)
|
||||
|
||||
# 等待温度稳定
|
||||
|
||||
wait_action = {
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": 30} # 等待30秒让温度稳定
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": 30}
|
||||
}
|
||||
action_sequence.append(wait_action)
|
||||
|
||||
|
||||
# 第二步:重复清洗操作
|
||||
for repeat in range(repeats):
|
||||
print(f"CLEAN_VESSEL: 执行第 {repeat + 1} 次清洗")
|
||||
|
||||
debug_print(f"执行第 {repeat + 1}/{repeats} 次清洗")
|
||||
|
||||
# 2a. 使用 pump_protocol 将溶剂转移到目标容器
|
||||
print(f"CLEAN_VESSEL: 将 {volume} mL {solvent} 转移到 {vessel_id}")
|
||||
try:
|
||||
# 调用成熟的 pump_protocol 算法
|
||||
add_solvent_actions = generate_pump_protocol(
|
||||
G=G,
|
||||
from_vessel=solvent_vessel,
|
||||
to_vessel=vessel_id, # 🔧 使用 vessel_id
|
||||
to_vessel=vessel_id,
|
||||
volume=volume,
|
||||
flowrate=2.5, # 适中的流速,避免飞溅
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=2.5
|
||||
)
|
||||
action_sequence.extend(add_solvent_actions)
|
||||
|
||||
# 🔧 新增:更新容器体积(添加清洗溶剂)
|
||||
print(f"CLEAN_VESSEL: 更新容器体积 - 添加清洗溶剂 {volume:.2f}mL")
|
||||
|
||||
# 更新容器体积(添加清洗溶剂)
|
||||
if "data" not in vessel:
|
||||
vessel["data"] = {}
|
||||
|
||||
|
||||
if "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
if isinstance(current_volume, list):
|
||||
if len(current_volume) > 0:
|
||||
vessel["data"]["liquid_volume"][0] += volume
|
||||
print(f"CLEAN_VESSEL: 添加溶剂后体积: {vessel['data']['liquid_volume'][0]:.2f}mL (+{volume:.2f}mL)")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = [volume]
|
||||
print(f"CLEAN_VESSEL: 初始化清洗体积: {volume:.2f}mL")
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
vessel["data"]["liquid_volume"] += volume
|
||||
print(f"CLEAN_VESSEL: 添加溶剂后体积: {vessel['data']['liquid_volume']:.2f}mL (+{volume:.2f}mL)")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = volume
|
||||
print(f"CLEAN_VESSEL: 重置体积为: {volume:.2f}mL")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = volume
|
||||
print(f"CLEAN_VESSEL: 创建新体积记录: {volume:.2f}mL")
|
||||
|
||||
# 🔧 同时更新图中的容器数据
|
||||
|
||||
# 同时更新图中的容器数据
|
||||
if vessel_id in G.nodes():
|
||||
if 'data' not in G.nodes[vessel_id]:
|
||||
G.nodes[vessel_id]['data'] = {}
|
||||
|
||||
|
||||
vessel_node_data = G.nodes[vessel_id]['data']
|
||||
current_node_volume = vessel_node_data.get('liquid_volume', 0.0)
|
||||
|
||||
|
||||
if isinstance(current_node_volume, list):
|
||||
if len(current_node_volume) > 0:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'][0] += volume
|
||||
@@ -214,58 +173,48 @@ def generate_clean_vessel_protocol(
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = [volume]
|
||||
else:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = current_node_volume + volume
|
||||
|
||||
print(f"CLEAN_VESSEL: 图节点体积数据已更新")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(f"无法将溶剂转移到容器: {str(e)}")
|
||||
|
||||
# 2b. 等待清洗作用时间(让溶剂充分清洗容器)
|
||||
cleaning_wait_time = 60 if temp > 50.0 else 30 # 高温下等待更久
|
||||
print(f"CLEAN_VESSEL: 等待清洗作用 {cleaning_wait_time} 秒")
|
||||
|
||||
# 2b. 等待清洗作用时间
|
||||
cleaning_wait_time = 60 if temp > 50.0 else 30
|
||||
wait_action = {
|
||||
"action_name": "wait",
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": cleaning_wait_time}
|
||||
}
|
||||
action_sequence.append(wait_action)
|
||||
|
||||
|
||||
# 2c. 使用 pump_protocol 将清洗液转移到废液容器
|
||||
print(f"CLEAN_VESSEL: 将清洗液从 {vessel_id} 转移到废液容器")
|
||||
try:
|
||||
# 调用成熟的 pump_protocol 算法
|
||||
remove_waste_actions = generate_pump_protocol(
|
||||
G=G,
|
||||
from_vessel=vessel_id, # 🔧 使用 vessel_id
|
||||
from_vessel=vessel_id,
|
||||
to_vessel=waste_vessel,
|
||||
volume=volume,
|
||||
flowrate=2.5, # 适中的流速
|
||||
flowrate=2.5,
|
||||
transfer_flowrate=2.5
|
||||
)
|
||||
action_sequence.extend(remove_waste_actions)
|
||||
|
||||
# 🔧 新增:更新容器体积(移除清洗液)
|
||||
print(f"CLEAN_VESSEL: 更新容器体积 - 移除清洗液 {volume:.2f}mL")
|
||||
|
||||
# 更新容器体积(移除清洗液)
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
if isinstance(current_volume, list):
|
||||
if len(current_volume) > 0:
|
||||
vessel["data"]["liquid_volume"][0] = max(0.0, vessel["data"]["liquid_volume"][0] - volume)
|
||||
print(f"CLEAN_VESSEL: 移除清洗液后体积: {vessel['data']['liquid_volume'][0]:.2f}mL (-{volume:.2f}mL)")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = [0.0]
|
||||
print(f"CLEAN_VESSEL: 重置体积为0mL")
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
vessel["data"]["liquid_volume"] = max(0.0, current_volume - volume)
|
||||
print(f"CLEAN_VESSEL: 移除清洗液后体积: {vessel['data']['liquid_volume']:.2f}mL (-{volume:.2f}mL)")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = 0.0
|
||||
print(f"CLEAN_VESSEL: 重置体积为0mL")
|
||||
|
||||
# 🔧 同时更新图中的容器数据
|
||||
|
||||
# 同时更新图中的容器数据
|
||||
if vessel_id in G.nodes():
|
||||
vessel_node_data = G.nodes[vessel_id].get('data', {})
|
||||
current_node_volume = vessel_node_data.get('liquid_volume', 0.0)
|
||||
|
||||
|
||||
if isinstance(current_node_volume, list):
|
||||
if len(current_node_volume) > 0:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'][0] = max(0.0, current_node_volume[0] - volume)
|
||||
@@ -273,34 +222,30 @@ def generate_clean_vessel_protocol(
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = [0.0]
|
||||
else:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = max(0.0, current_node_volume - volume)
|
||||
|
||||
print(f"CLEAN_VESSEL: 图节点体积数据已更新")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(f"无法将清洗液转移到废液容器: {str(e)}")
|
||||
|
||||
|
||||
# 2d. 清洗循环间的短暂等待
|
||||
if repeat < repeats - 1: # 不是最后一次清洗
|
||||
print(f"CLEAN_VESSEL: 清洗循环间等待")
|
||||
if repeat < repeats - 1:
|
||||
wait_action = {
|
||||
"action_name": "wait",
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": 10}
|
||||
}
|
||||
action_sequence.append(wait_action)
|
||||
|
||||
|
||||
# 第三步:如果加热了,停止加热
|
||||
if temp > 25.0 and heatchill_id:
|
||||
print(f"CLEAN_VESSEL: 停止加热")
|
||||
heatchill_stop_action = {
|
||||
"device_id": heatchill_id,
|
||||
"action_name": "heat_chill_stop",
|
||||
"action_kwargs": {
|
||||
"vessel": {"id": vessel_id}, # 🔧 使用 vessel_id
|
||||
"vessel": {"id": vessel_id},
|
||||
}
|
||||
}
|
||||
action_sequence.append(heatchill_stop_action)
|
||||
|
||||
# 🔧 新增:清洗完成后的状态报告
|
||||
|
||||
# 清洗完成后的状态
|
||||
final_liquid_volume = 0.0
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
@@ -308,20 +253,17 @@ def generate_clean_vessel_protocol(
|
||||
final_liquid_volume = current_volume[0]
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
final_liquid_volume = current_volume
|
||||
|
||||
print(f"CLEAN_VESSEL: 清洗完成")
|
||||
print(f" - 清洗前体积: {original_liquid_volume:.2f}mL")
|
||||
print(f" - 清洗后体积: {final_liquid_volume:.2f}mL")
|
||||
print(f" - 生成了 {len(action_sequence)} 个动作")
|
||||
|
||||
|
||||
debug_print(f"清洗完成: {len(action_sequence)} 个动作, 体积 {original_liquid_volume:.2f} -> {final_liquid_volume:.2f}mL")
|
||||
|
||||
return action_sequence
|
||||
|
||||
|
||||
# 便捷函数:常用清洗方案
|
||||
# 便捷函数
|
||||
def generate_quick_clean_protocol(
|
||||
G: nx.DiGraph,
|
||||
vessel: dict, # 🔧 修改:从字符串改为字典类型
|
||||
solvent: str = "water",
|
||||
G: nx.DiGraph,
|
||||
vessel: dict,
|
||||
solvent: str = "water",
|
||||
volume: float = 100.0
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""快速清洗:室温,单次清洗"""
|
||||
@@ -329,9 +271,9 @@ def generate_quick_clean_protocol(
|
||||
|
||||
|
||||
def generate_thorough_clean_protocol(
|
||||
G: nx.DiGraph,
|
||||
vessel: dict, # 🔧 修改:从字符串改为字典类型
|
||||
solvent: str = "water",
|
||||
G: nx.DiGraph,
|
||||
vessel: dict,
|
||||
solvent: str = "water",
|
||||
volume: float = 150.0,
|
||||
temp: float = 60.0
|
||||
) -> List[Dict[str, Any]]:
|
||||
@@ -340,13 +282,13 @@ def generate_thorough_clean_protocol(
|
||||
|
||||
|
||||
def generate_organic_clean_protocol(
|
||||
G: nx.DiGraph,
|
||||
vessel: dict, # 🔧 修改:从字符串改为字典类型
|
||||
G: nx.DiGraph,
|
||||
vessel: dict,
|
||||
volume: float = 100.0
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""有机清洗:先用有机溶剂,再用水清洗"""
|
||||
action_sequence = []
|
||||
|
||||
|
||||
# 第一步:有机溶剂清洗
|
||||
try:
|
||||
organic_actions = generate_clean_vessel_protocol(
|
||||
@@ -354,96 +296,71 @@ def generate_organic_clean_protocol(
|
||||
)
|
||||
action_sequence.extend(organic_actions)
|
||||
except ValueError:
|
||||
# 如果没有丙酮,尝试乙醇
|
||||
try:
|
||||
organic_actions = generate_clean_vessel_protocol(
|
||||
G, vessel, "ethanol", volume, 25.0, 2
|
||||
)
|
||||
action_sequence.extend(organic_actions)
|
||||
except ValueError:
|
||||
print("警告:未找到有机溶剂,跳过有机清洗步骤")
|
||||
|
||||
debug_print("未找到有机溶剂,跳过有机清洗步骤")
|
||||
|
||||
# 第二步:水清洗
|
||||
water_actions = generate_clean_vessel_protocol(
|
||||
G, vessel, "water", volume, 25.0, 2
|
||||
)
|
||||
action_sequence.extend(water_actions)
|
||||
|
||||
|
||||
return action_sequence
|
||||
|
||||
|
||||
def get_vessel_liquid_volume(G: nx.DiGraph, vessel: str) -> float:
|
||||
"""获取容器中的液体体积(修复版)"""
|
||||
if vessel not in G.nodes():
|
||||
return 0.0
|
||||
|
||||
vessel_data = G.nodes[vessel].get('data', {})
|
||||
liquids = vessel_data.get('liquid', [])
|
||||
|
||||
total_volume = 0.0
|
||||
for liquid in liquids:
|
||||
if isinstance(liquid, dict):
|
||||
# 支持两种格式:新格式 (name, volume) 和旧格式 (liquid_type, liquid_volume)
|
||||
volume = liquid.get('volume') or liquid.get('liquid_volume', 0.0)
|
||||
total_volume += volume
|
||||
|
||||
return total_volume
|
||||
|
||||
|
||||
def get_vessel_liquid_types(G: nx.DiGraph, vessel: str) -> List[str]:
|
||||
"""获取容器中所有液体的类型"""
|
||||
if vessel not in G.nodes():
|
||||
return []
|
||||
|
||||
|
||||
vessel_data = G.nodes[vessel].get('data', {})
|
||||
liquids = vessel_data.get('liquid', [])
|
||||
|
||||
|
||||
liquid_types = []
|
||||
for liquid in liquids:
|
||||
if isinstance(liquid, dict):
|
||||
# 支持两种格式的液体类型字段
|
||||
liquid_type = liquid.get('liquid_type') or liquid.get('name', '')
|
||||
if liquid_type:
|
||||
liquid_types.append(liquid_type)
|
||||
|
||||
|
||||
return liquid_types
|
||||
|
||||
|
||||
def find_vessel_by_content(G: nx.DiGraph, content: str) -> List[str]:
|
||||
"""
|
||||
根据内容物查找所有匹配的容器
|
||||
返回匹配容器的ID列表
|
||||
"""
|
||||
matching_vessels = []
|
||||
|
||||
|
||||
for node_id in G.nodes():
|
||||
if G.nodes[node_id].get('type') == 'container':
|
||||
# 检查容器名称匹配
|
||||
node_name = G.nodes[node_id].get('name', '').lower()
|
||||
if content.lower() in node_id.lower() or content.lower() in node_name:
|
||||
matching_vessels.append(node_id)
|
||||
continue
|
||||
|
||||
# 检查液体类型匹配
|
||||
|
||||
vessel_data = G.nodes[node_id].get('data', {})
|
||||
liquids = vessel_data.get('liquid', [])
|
||||
config_data = G.nodes[node_id].get('config', {})
|
||||
|
||||
# 检查 reagent_name 和 config.reagent
|
||||
|
||||
reagent_name = vessel_data.get('reagent_name', '').lower()
|
||||
config_reagent = config_data.get('reagent', '').lower()
|
||||
|
||||
if (content.lower() == reagent_name or
|
||||
|
||||
if (content.lower() == reagent_name or
|
||||
content.lower() == config_reagent):
|
||||
matching_vessels.append(node_id)
|
||||
continue
|
||||
|
||||
# 检查液体列表
|
||||
|
||||
for liquid in liquids:
|
||||
if isinstance(liquid, dict):
|
||||
liquid_type = liquid.get('liquid_type') or liquid.get('name', '')
|
||||
if liquid_type.lower() == content.lower():
|
||||
matching_vessels.append(node_id)
|
||||
break
|
||||
|
||||
return matching_vessels
|
||||
|
||||
return matching_vessels
|
||||
|
||||
@@ -1,402 +1,19 @@
|
||||
from functools import partial
|
||||
|
||||
import networkx as nx
|
||||
import re
|
||||
import logging
|
||||
from typing import List, Dict, Any, Union
|
||||
|
||||
from .utils.vessel_parser import get_vessel
|
||||
from .utils.logger_util import action_log
|
||||
from .utils.logger_util import debug_print, action_log
|
||||
from .utils.unit_parser import parse_volume_input, parse_mass_input, parse_time_input, parse_temperature_input
|
||||
from .utils.vessel_parser import get_vessel, find_solvent_vessel, find_connected_heatchill, find_connected_stirrer, find_solid_dispenser
|
||||
from .pump_protocol import generate_pump_protocol_with_rinsing
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def debug_print(message):
|
||||
"""调试输出"""
|
||||
logger.info(f"[DISSOLVE] {message}")
|
||||
|
||||
# 🆕 创建进度日志动作
|
||||
# 创建进度日志动作
|
||||
create_action_log = partial(action_log, prefix="[DISSOLVE]")
|
||||
|
||||
def parse_volume_input(volume_input: Union[str, float]) -> float:
|
||||
"""
|
||||
解析体积输入,支持带单位的字符串
|
||||
|
||||
Args:
|
||||
volume_input: 体积输入(如 "10 mL", "?", 10.0)
|
||||
|
||||
Returns:
|
||||
float: 体积(毫升)
|
||||
"""
|
||||
if isinstance(volume_input, (int, float)):
|
||||
debug_print(f"📏 体积输入为数值: {volume_input}")
|
||||
return float(volume_input)
|
||||
|
||||
if not volume_input or not str(volume_input).strip():
|
||||
debug_print(f"⚠️ 体积输入为空,返回0.0mL")
|
||||
return 0.0
|
||||
|
||||
volume_str = str(volume_input).lower().strip()
|
||||
debug_print(f"🔍 解析体积输入: '{volume_str}'")
|
||||
|
||||
# 处理未知体积
|
||||
if volume_str in ['?', 'unknown', 'tbd', 'to be determined']:
|
||||
default_volume = 50.0 # 默认50mL
|
||||
debug_print(f"❓ 检测到未知体积,使用默认值: {default_volume}mL 🎯")
|
||||
return default_volume
|
||||
|
||||
# 移除空格并提取数字和单位
|
||||
volume_clean = re.sub(r'\s+', '', volume_str)
|
||||
|
||||
# 匹配数字和单位的正则表达式
|
||||
match = re.match(r'([0-9]*\.?[0-9]+)\s*(ml|l|μl|ul|microliter|milliliter|liter)?', volume_clean)
|
||||
|
||||
if not match:
|
||||
debug_print(f"❌ 无法解析体积: '{volume_str}',使用默认值50mL")
|
||||
return 50.0
|
||||
|
||||
value = float(match.group(1))
|
||||
unit = match.group(2) or 'ml' # 默认单位为毫升
|
||||
|
||||
# 转换为毫升
|
||||
if unit in ['l', 'liter']:
|
||||
volume = value * 1000.0 # L -> mL
|
||||
debug_print(f"🔄 体积转换: {value}L → {volume}mL")
|
||||
elif unit in ['μl', 'ul', 'microliter']:
|
||||
volume = value / 1000.0 # μL -> mL
|
||||
debug_print(f"🔄 体积转换: {value}μL → {volume}mL")
|
||||
else: # ml, milliliter 或默认
|
||||
volume = value # 已经是mL
|
||||
debug_print(f"✅ 体积已为mL: {volume}mL")
|
||||
|
||||
return volume
|
||||
|
||||
def parse_mass_input(mass_input: Union[str, float]) -> float:
|
||||
"""
|
||||
解析质量输入,支持带单位的字符串
|
||||
|
||||
Args:
|
||||
mass_input: 质量输入(如 "2.9 g", "?", 2.5)
|
||||
|
||||
Returns:
|
||||
float: 质量(克)
|
||||
"""
|
||||
if isinstance(mass_input, (int, float)):
|
||||
debug_print(f"⚖️ 质量输入为数值: {mass_input}g")
|
||||
return float(mass_input)
|
||||
|
||||
if not mass_input or not str(mass_input).strip():
|
||||
debug_print(f"⚠️ 质量输入为空,返回0.0g")
|
||||
return 0.0
|
||||
|
||||
mass_str = str(mass_input).lower().strip()
|
||||
debug_print(f"🔍 解析质量输入: '{mass_str}'")
|
||||
|
||||
# 处理未知质量
|
||||
if mass_str in ['?', 'unknown', 'tbd', 'to be determined']:
|
||||
default_mass = 1.0 # 默认1g
|
||||
debug_print(f"❓ 检测到未知质量,使用默认值: {default_mass}g 🎯")
|
||||
return default_mass
|
||||
|
||||
# 移除空格并提取数字和单位
|
||||
mass_clean = re.sub(r'\s+', '', mass_str)
|
||||
|
||||
# 匹配数字和单位的正则表达式
|
||||
match = re.match(r'([0-9]*\.?[0-9]+)\s*(g|mg|kg|gram|milligram|kilogram)?', mass_clean)
|
||||
|
||||
if not match:
|
||||
debug_print(f"❌ 无法解析质量: '{mass_str}',返回0.0g")
|
||||
return 0.0
|
||||
|
||||
value = float(match.group(1))
|
||||
unit = match.group(2) or 'g' # 默认单位为克
|
||||
|
||||
# 转换为克
|
||||
if unit in ['mg', 'milligram']:
|
||||
mass = value / 1000.0 # mg -> g
|
||||
debug_print(f"🔄 质量转换: {value}mg → {mass}g")
|
||||
elif unit in ['kg', 'kilogram']:
|
||||
mass = value * 1000.0 # kg -> g
|
||||
debug_print(f"🔄 质量转换: {value}kg → {mass}g")
|
||||
else: # g, gram 或默认
|
||||
mass = value # 已经是g
|
||||
debug_print(f"✅ 质量已为g: {mass}g")
|
||||
|
||||
return mass
|
||||
|
||||
def parse_time_input(time_input: Union[str, float]) -> float:
|
||||
"""
|
||||
解析时间输入,支持带单位的字符串
|
||||
|
||||
Args:
|
||||
time_input: 时间输入(如 "30 min", "1 h", "?", 60.0)
|
||||
|
||||
Returns:
|
||||
float: 时间(秒)
|
||||
"""
|
||||
if isinstance(time_input, (int, float)):
|
||||
debug_print(f"⏱️ 时间输入为数值: {time_input}秒")
|
||||
return float(time_input)
|
||||
|
||||
if not time_input or not str(time_input).strip():
|
||||
debug_print(f"⚠️ 时间输入为空,返回0秒")
|
||||
return 0.0
|
||||
|
||||
time_str = str(time_input).lower().strip()
|
||||
debug_print(f"🔍 解析时间输入: '{time_str}'")
|
||||
|
||||
# 处理未知时间
|
||||
if time_str in ['?', 'unknown', 'tbd']:
|
||||
default_time = 600.0 # 默认10分钟
|
||||
debug_print(f"❓ 检测到未知时间,使用默认值: {default_time}s (10分钟) ⏰")
|
||||
return default_time
|
||||
|
||||
# 移除空格并提取数字和单位
|
||||
time_clean = re.sub(r'\s+', '', time_str)
|
||||
|
||||
# 匹配数字和单位的正则表达式
|
||||
match = re.match(r'([0-9]*\.?[0-9]+)\s*(s|sec|second|min|minute|h|hr|hour|d|day)?', time_clean)
|
||||
|
||||
if not match:
|
||||
debug_print(f"❌ 无法解析时间: '{time_str}',返回0s")
|
||||
return 0.0
|
||||
|
||||
value = float(match.group(1))
|
||||
unit = match.group(2) or 's' # 默认单位为秒
|
||||
|
||||
# 转换为秒
|
||||
if unit in ['min', 'minute']:
|
||||
time_sec = value * 60.0 # min -> s
|
||||
debug_print(f"🔄 时间转换: {value}分钟 → {time_sec}秒")
|
||||
elif unit in ['h', 'hr', 'hour']:
|
||||
time_sec = value * 3600.0 # h -> s
|
||||
debug_print(f"🔄 时间转换: {value}小时 → {time_sec}秒")
|
||||
elif unit in ['d', 'day']:
|
||||
time_sec = value * 86400.0 # d -> s
|
||||
debug_print(f"🔄 时间转换: {value}天 → {time_sec}秒")
|
||||
else: # s, sec, second 或默认
|
||||
time_sec = value # 已经是s
|
||||
debug_print(f"✅ 时间已为秒: {time_sec}秒")
|
||||
|
||||
return time_sec
|
||||
|
||||
def parse_temperature_input(temp_input: Union[str, float]) -> float:
|
||||
"""
|
||||
解析温度输入,支持带单位的字符串
|
||||
|
||||
Args:
|
||||
temp_input: 温度输入(如 "60 °C", "room temperature", "?", 25.0)
|
||||
|
||||
Returns:
|
||||
float: 温度(摄氏度)
|
||||
"""
|
||||
if isinstance(temp_input, (int, float)):
|
||||
debug_print(f"🌡️ 温度输入为数值: {temp_input}°C")
|
||||
return float(temp_input)
|
||||
|
||||
if not temp_input or not str(temp_input).strip():
|
||||
debug_print(f"⚠️ 温度输入为空,使用默认室温25°C")
|
||||
return 25.0 # 默认室温
|
||||
|
||||
temp_str = str(temp_input).lower().strip()
|
||||
debug_print(f"🔍 解析温度输入: '{temp_str}'")
|
||||
|
||||
# 处理特殊温度描述
|
||||
temp_aliases = {
|
||||
'room temperature': 25.0,
|
||||
'rt': 25.0,
|
||||
'ambient': 25.0,
|
||||
'cold': 4.0,
|
||||
'ice': 0.0,
|
||||
'reflux': 80.0, # 默认回流温度
|
||||
'?': 25.0,
|
||||
'unknown': 25.0
|
||||
}
|
||||
|
||||
if temp_str in temp_aliases:
|
||||
result = temp_aliases[temp_str]
|
||||
debug_print(f"🏷️ 温度别名解析: '{temp_str}' → {result}°C")
|
||||
return result
|
||||
|
||||
# 移除空格并提取数字和单位
|
||||
temp_clean = re.sub(r'\s+', '', temp_str)
|
||||
|
||||
# 匹配数字和单位的正则表达式
|
||||
match = re.match(r'([0-9]*\.?[0-9]+)\s*(°c|c|celsius|°f|f|fahrenheit|k|kelvin)?', temp_clean)
|
||||
|
||||
if not match:
|
||||
debug_print(f"❌ 无法解析温度: '{temp_str}',使用默认值25°C")
|
||||
return 25.0
|
||||
|
||||
value = float(match.group(1))
|
||||
unit = match.group(2) or 'c' # 默认单位为摄氏度
|
||||
|
||||
# 转换为摄氏度
|
||||
if unit in ['°f', 'f', 'fahrenheit']:
|
||||
temp_c = (value - 32) * 5/9 # F -> C
|
||||
debug_print(f"🔄 温度转换: {value}°F → {temp_c:.1f}°C")
|
||||
elif unit in ['k', 'kelvin']:
|
||||
temp_c = value - 273.15 # K -> C
|
||||
debug_print(f"🔄 温度转换: {value}K → {temp_c:.1f}°C")
|
||||
else: # °c, c, celsius 或默认
|
||||
temp_c = value # 已经是C
|
||||
debug_print(f"✅ 温度已为°C: {temp_c}°C")
|
||||
|
||||
return temp_c
|
||||
|
||||
def find_solvent_vessel(G: nx.DiGraph, solvent: str) -> str:
|
||||
"""增强版溶剂容器查找,支持多种匹配模式"""
|
||||
debug_print(f"🔍 开始查找溶剂 '{solvent}' 的容器...")
|
||||
|
||||
# 🔧 方法1:直接搜索 data.reagent_name 和 config.reagent
|
||||
debug_print(f"📋 方法1: 搜索reagent字段...")
|
||||
for node in G.nodes():
|
||||
node_data = G.nodes[node].get('data', {})
|
||||
node_type = G.nodes[node].get('type', '')
|
||||
config_data = G.nodes[node].get('config', {})
|
||||
|
||||
# 只搜索容器类型的节点
|
||||
if node_type == 'container':
|
||||
reagent_name = node_data.get('reagent_name', '').lower()
|
||||
config_reagent = config_data.get('reagent', '').lower()
|
||||
|
||||
# 精确匹配
|
||||
if reagent_name == solvent.lower() or config_reagent == solvent.lower():
|
||||
debug_print(f"✅ 通过reagent字段精确匹配到容器: {node} 🎯")
|
||||
return node
|
||||
|
||||
# 模糊匹配
|
||||
if (solvent.lower() in reagent_name and reagent_name) or \
|
||||
(solvent.lower() in config_reagent and config_reagent):
|
||||
debug_print(f"✅ 通过reagent字段模糊匹配到容器: {node} 🔍")
|
||||
return node
|
||||
|
||||
# 🔧 方法2:常见的容器命名规则
|
||||
debug_print(f"📋 方法2: 使用命名规则查找...")
|
||||
solvent_clean = solvent.lower().replace(' ', '_').replace('-', '_')
|
||||
possible_names = [
|
||||
solvent_clean,
|
||||
f"flask_{solvent_clean}",
|
||||
f"bottle_{solvent_clean}",
|
||||
f"vessel_{solvent_clean}",
|
||||
f"{solvent_clean}_flask",
|
||||
f"{solvent_clean}_bottle",
|
||||
f"solvent_{solvent_clean}",
|
||||
f"reagent_{solvent_clean}",
|
||||
f"reagent_bottle_{solvent_clean}",
|
||||
f"reagent_bottle_1", # 通用试剂瓶
|
||||
f"reagent_bottle_2",
|
||||
f"reagent_bottle_3"
|
||||
]
|
||||
|
||||
debug_print(f"🔍 尝试的容器名称: {possible_names[:5]}... (共{len(possible_names)}个)")
|
||||
|
||||
for name in possible_names:
|
||||
if name in G.nodes():
|
||||
node_type = G.nodes[name].get('type', '')
|
||||
if node_type == 'container':
|
||||
debug_print(f"✅ 通过命名规则找到容器: {name} 📝")
|
||||
return name
|
||||
|
||||
# 🔧 方法3:节点名称模糊匹配
|
||||
debug_print(f"📋 方法3: 节点名称模糊匹配...")
|
||||
for node_id in G.nodes():
|
||||
node_data = G.nodes[node_id]
|
||||
if node_data.get('type') == 'container':
|
||||
# 检查节点名称是否包含溶剂名称
|
||||
if solvent_clean in node_id.lower():
|
||||
debug_print(f"✅ 通过节点名称模糊匹配到容器: {node_id} 🔍")
|
||||
return node_id
|
||||
|
||||
# 检查液体类型匹配
|
||||
vessel_data = node_data.get('data', {})
|
||||
liquids = vessel_data.get('liquid', [])
|
||||
for liquid in liquids:
|
||||
if isinstance(liquid, dict):
|
||||
liquid_type = liquid.get('liquid_type') or liquid.get('name', '')
|
||||
if liquid_type.lower() == solvent.lower():
|
||||
debug_print(f"✅ 通过液体类型匹配到容器: {node_id} 💧")
|
||||
return node_id
|
||||
|
||||
# 🔧 方法4:使用第一个试剂瓶作为备选
|
||||
debug_print(f"📋 方法4: 查找备选试剂瓶...")
|
||||
for node_id in G.nodes():
|
||||
node_data = G.nodes[node_id]
|
||||
if (node_data.get('type') == 'container' and
|
||||
('reagent' in node_id.lower() or 'bottle' in node_id.lower() or 'flask' in node_id.lower())):
|
||||
debug_print(f"⚠️ 未找到专用容器,使用备选试剂瓶: {node_id} 🔄")
|
||||
return node_id
|
||||
|
||||
debug_print(f"❌ 所有方法都失败了,无法找到容器!")
|
||||
raise ValueError(f"找不到溶剂 '{solvent}' 对应的容器")
|
||||
|
||||
def find_connected_heatchill(G: nx.DiGraph, vessel: str) -> str:
|
||||
"""查找连接到指定容器的加热搅拌器"""
|
||||
debug_print(f"🔍 查找连接到容器 '{vessel}' 的加热搅拌器...")
|
||||
|
||||
heatchill_nodes = []
|
||||
for node in G.nodes():
|
||||
node_class = G.nodes[node].get('class', '').lower()
|
||||
if 'heatchill' in node_class:
|
||||
heatchill_nodes.append(node)
|
||||
debug_print(f"📋 发现加热搅拌器: {node}")
|
||||
|
||||
debug_print(f"📊 共找到 {len(heatchill_nodes)} 个加热搅拌器")
|
||||
|
||||
# 查找连接到容器的加热器
|
||||
for heatchill in heatchill_nodes:
|
||||
if G.has_edge(heatchill, vessel) or G.has_edge(vessel, heatchill):
|
||||
debug_print(f"✅ 找到连接的加热搅拌器: {heatchill} 🔗")
|
||||
return heatchill
|
||||
|
||||
# 返回第一个加热器
|
||||
if heatchill_nodes:
|
||||
debug_print(f"⚠️ 未找到直接连接的加热搅拌器,使用第一个: {heatchill_nodes[0]} 🔄")
|
||||
return heatchill_nodes[0]
|
||||
|
||||
debug_print(f"❌ 未找到任何加热搅拌器")
|
||||
return ""
|
||||
|
||||
def find_connected_stirrer(G: nx.DiGraph, vessel: str) -> str:
|
||||
"""查找连接到指定容器的搅拌器"""
|
||||
debug_print(f"🔍 查找连接到容器 '{vessel}' 的搅拌器...")
|
||||
|
||||
stirrer_nodes = []
|
||||
for node in G.nodes():
|
||||
node_class = G.nodes[node].get('class', '').lower()
|
||||
if 'stirrer' in node_class:
|
||||
stirrer_nodes.append(node)
|
||||
debug_print(f"📋 发现搅拌器: {node}")
|
||||
|
||||
debug_print(f"📊 共找到 {len(stirrer_nodes)} 个搅拌器")
|
||||
|
||||
# 查找连接到容器的搅拌器
|
||||
for stirrer in stirrer_nodes:
|
||||
if G.has_edge(stirrer, vessel) or G.has_edge(vessel, stirrer):
|
||||
debug_print(f"✅ 找到连接的搅拌器: {stirrer} 🔗")
|
||||
return stirrer
|
||||
|
||||
# 返回第一个搅拌器
|
||||
if stirrer_nodes:
|
||||
debug_print(f"⚠️ 未找到直接连接的搅拌器,使用第一个: {stirrer_nodes[0]} 🔄")
|
||||
return stirrer_nodes[0]
|
||||
|
||||
debug_print(f"❌ 未找到任何搅拌器")
|
||||
return ""
|
||||
|
||||
def find_solid_dispenser(G: nx.DiGraph) -> str:
|
||||
"""查找固体加样器"""
|
||||
debug_print(f"🔍 查找固体加样器...")
|
||||
|
||||
for node in G.nodes():
|
||||
node_class = G.nodes[node].get('class', '').lower()
|
||||
if 'solid_dispenser' in node_class or 'dispenser' in node_class:
|
||||
debug_print(f"✅ 找到固体加样器: {node} 🥄")
|
||||
return node
|
||||
|
||||
debug_print(f"❌ 未找到固体加样器")
|
||||
return ""
|
||||
|
||||
def generate_dissolve_protocol(
|
||||
G: nx.DiGraph,
|
||||
vessel: dict, # 🔧 修改:从字符串改为字典类型
|
||||
@@ -436,43 +53,21 @@ def generate_dissolve_protocol(
|
||||
- mol: "0.12 mol", "16.2 mmol"
|
||||
"""
|
||||
|
||||
# 🔧 核心修改:从字典中提取容器ID
|
||||
# 从字典中提取容器ID
|
||||
vessel_id, vessel_data = get_vessel(vessel)
|
||||
|
||||
debug_print("=" * 60)
|
||||
debug_print("🧪 开始生成溶解协议")
|
||||
debug_print(f"📋 原始参数:")
|
||||
debug_print(f" 🥼 vessel: {vessel} (ID: {vessel_id})")
|
||||
debug_print(f" 💧 solvent: '{solvent}'")
|
||||
debug_print(f" 📏 volume: {volume} (类型: {type(volume)})")
|
||||
debug_print(f" ⚖️ mass: {mass} (类型: {type(mass)})")
|
||||
debug_print(f" 🌡️ temp: {temp} (类型: {type(temp)})")
|
||||
debug_print(f" ⏱️ time: {time} (类型: {type(time)})")
|
||||
debug_print(f" 🧪 reagent: '{reagent}'")
|
||||
debug_print(f" 🧬 mol: '{mol}'")
|
||||
debug_print(f" 🎯 event: '{event}'")
|
||||
debug_print(f" 📦 kwargs: {kwargs}") # 显示额外参数
|
||||
debug_print("=" * 60)
|
||||
|
||||
|
||||
debug_print(f"溶解协议: vessel={vessel_id}, solvent='{solvent}', volume={volume}, "
|
||||
f"mass={mass}, temp={temp}, time={time}")
|
||||
|
||||
action_sequence = []
|
||||
|
||||
# === 参数验证 ===
|
||||
debug_print("🔍 步骤1: 参数验证...")
|
||||
action_sequence.append(create_action_log(f"开始溶解操作 - 容器: {vessel_id}", "🎬"))
|
||||
|
||||
|
||||
if not vessel_id:
|
||||
debug_print("❌ vessel 参数不能为空")
|
||||
raise ValueError("vessel 参数不能为空")
|
||||
|
||||
if vessel_id not in G.nodes():
|
||||
debug_print(f"❌ 容器 '{vessel_id}' 不存在于系统中")
|
||||
raise ValueError(f"容器 '{vessel_id}' 不存在于系统中")
|
||||
|
||||
debug_print("✅ 基本参数验证通过")
|
||||
action_sequence.append(create_action_log("参数验证通过", "✅"))
|
||||
|
||||
# 🔧 新增:记录溶解前的容器状态
|
||||
debug_print("🔍 记录溶解前容器状态...")
|
||||
|
||||
# 记录溶解前的容器状态
|
||||
original_liquid_volume = 0.0
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
@@ -480,30 +75,16 @@ def generate_dissolve_protocol(
|
||||
original_liquid_volume = current_volume[0]
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
original_liquid_volume = current_volume
|
||||
debug_print(f"📊 溶解前液体体积: {original_liquid_volume:.2f}mL")
|
||||
|
||||
# === 🔧 关键修复:参数解析 ===
|
||||
debug_print("🔍 步骤2: 参数解析...")
|
||||
action_sequence.append(create_action_log("正在解析溶解参数...", "🔍"))
|
||||
|
||||
# 解析各种参数为数值
|
||||
|
||||
# === 参数解析 ===
|
||||
final_volume = parse_volume_input(volume)
|
||||
final_mass = parse_mass_input(mass)
|
||||
final_temp = parse_temperature_input(temp)
|
||||
final_time = parse_time_input(time)
|
||||
|
||||
debug_print(f"📊 解析结果:")
|
||||
debug_print(f" 📏 体积: {final_volume}mL")
|
||||
debug_print(f" ⚖️ 质量: {final_mass}g")
|
||||
debug_print(f" 🌡️ 温度: {final_temp}°C")
|
||||
debug_print(f" ⏱️ 时间: {final_time}s")
|
||||
debug_print(f" 🧪 试剂: '{reagent}'")
|
||||
debug_print(f" 🧬 摩尔: '{mol}'")
|
||||
debug_print(f" 🎯 事件: '{event}'")
|
||||
|
||||
|
||||
debug_print(f"参数解析: vol={final_volume}mL, mass={final_mass}g, temp={final_temp}°C, time={final_time}s")
|
||||
|
||||
# === 判断溶解类型 ===
|
||||
debug_print("🔍 步骤3: 判断溶解类型...")
|
||||
action_sequence.append(create_action_log("正在判断溶解类型...", "🔍"))
|
||||
|
||||
# 判断是固体溶解还是液体溶解
|
||||
is_solid_dissolve = (final_mass > 0 or (mol and mol.strip() != "") or (reagent and reagent.strip() != ""))
|
||||
@@ -515,49 +96,31 @@ def generate_dissolve_protocol(
|
||||
final_volume = 50.0
|
||||
if not solvent:
|
||||
solvent = "water" # 默认溶剂
|
||||
debug_print("⚠️ 未明确指定溶解参数,默认为50mL水溶解")
|
||||
debug_print("未明确指定溶解参数,默认为50mL水溶解")
|
||||
|
||||
dissolve_type = "固体溶解" if is_solid_dissolve else "液体溶解"
|
||||
dissolve_emoji = "🧂" if is_solid_dissolve else "💧"
|
||||
debug_print(f"📋 溶解类型: {dissolve_type} {dissolve_emoji}")
|
||||
|
||||
action_sequence.append(create_action_log(f"确定溶解类型: {dissolve_type} {dissolve_emoji}", "📋"))
|
||||
|
||||
debug_print(f"溶解类型: {dissolve_type}")
|
||||
|
||||
action_sequence.append(create_action_log(f"溶解类型: {dissolve_type}", "📋"))
|
||||
|
||||
# === 查找设备 ===
|
||||
debug_print("🔍 步骤4: 查找设备...")
|
||||
action_sequence.append(create_action_log("正在查找相关设备...", "🔍"))
|
||||
|
||||
# 查找加热搅拌器
|
||||
heatchill_id = find_connected_heatchill(G, vessel_id)
|
||||
stirrer_id = find_connected_stirrer(G, vessel_id)
|
||||
|
||||
# 优先使用加热搅拌器,否则使用独立搅拌器
|
||||
stir_device_id = heatchill_id or stirrer_id
|
||||
|
||||
debug_print(f"📊 设备映射:")
|
||||
debug_print(f" 🔥 加热器: '{heatchill_id}'")
|
||||
debug_print(f" 🌪️ 搅拌器: '{stirrer_id}'")
|
||||
debug_print(f" 🎯 使用设备: '{stir_device_id}'")
|
||||
|
||||
if heatchill_id:
|
||||
action_sequence.append(create_action_log(f"找到加热搅拌器: {heatchill_id}", "🔥"))
|
||||
elif stirrer_id:
|
||||
action_sequence.append(create_action_log(f"找到搅拌器: {stirrer_id}", "🌪️"))
|
||||
else:
|
||||
debug_print(f"设备: heatchill='{heatchill_id}', stirrer='{stirrer_id}'")
|
||||
|
||||
if not stir_device_id:
|
||||
action_sequence.append(create_action_log("未找到搅拌设备,将跳过搅拌", "⚠️"))
|
||||
|
||||
# === 执行溶解流程 ===
|
||||
debug_print("🔍 步骤5: 执行溶解流程...")
|
||||
|
||||
try:
|
||||
# 步骤5.1: 启动加热搅拌(如果需要)
|
||||
# 启动加热搅拌(如果需要)
|
||||
if stir_device_id and (final_temp > 25.0 or final_time > 0 or stir_speed > 0):
|
||||
debug_print(f"🔍 5.1: 启动加热搅拌,温度: {final_temp}°C")
|
||||
action_sequence.append(create_action_log(f"准备加热搅拌 (目标温度: {final_temp}°C)", "🔥"))
|
||||
|
||||
|
||||
if heatchill_id and (final_temp > 25.0 or final_time > 0):
|
||||
# 使用加热搅拌器
|
||||
action_sequence.append(create_action_log(f"启动加热搅拌器 {heatchill_id}", "🔥"))
|
||||
|
||||
heatchill_action = {
|
||||
"device_id": heatchill_id,
|
||||
@@ -573,7 +136,6 @@ def generate_dissolve_protocol(
|
||||
# 等待温度稳定
|
||||
if final_temp > 25.0:
|
||||
wait_time = min(60, abs(final_temp - 25.0) * 1.5)
|
||||
action_sequence.append(create_action_log(f"等待温度稳定 ({wait_time:.0f}秒)", "⏳"))
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": wait_time}
|
||||
@@ -581,7 +143,6 @@ def generate_dissolve_protocol(
|
||||
|
||||
elif stirrer_id:
|
||||
# 使用独立搅拌器
|
||||
action_sequence.append(create_action_log(f"启动搅拌器 {stirrer_id} (速度: {stir_speed}rpm)", "🌪️"))
|
||||
|
||||
stir_action = {
|
||||
"device_id": stirrer_id,
|
||||
@@ -593,9 +154,8 @@ def generate_dissolve_protocol(
|
||||
}
|
||||
}
|
||||
action_sequence.append(stir_action)
|
||||
|
||||
|
||||
# 等待搅拌稳定
|
||||
action_sequence.append(create_action_log("等待搅拌稳定...", "⏳"))
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": 5}
|
||||
@@ -603,12 +163,8 @@ def generate_dissolve_protocol(
|
||||
|
||||
if is_solid_dissolve:
|
||||
# === 固体溶解路径 ===
|
||||
debug_print(f"🔍 5.2: 使用固体溶解路径")
|
||||
action_sequence.append(create_action_log("开始固体溶解流程", "🧂"))
|
||||
|
||||
solid_dispenser = find_solid_dispenser(G)
|
||||
if solid_dispenser:
|
||||
action_sequence.append(create_action_log(f"找到固体加样器: {solid_dispenser}", "🥄"))
|
||||
|
||||
# 固体加样
|
||||
add_kwargs = {
|
||||
@@ -620,42 +176,27 @@ def generate_dissolve_protocol(
|
||||
|
||||
if final_mass > 0:
|
||||
add_kwargs["mass"] = str(final_mass)
|
||||
action_sequence.append(create_action_log(f"准备添加固体: {final_mass}g", "⚖️"))
|
||||
if mol and mol.strip():
|
||||
add_kwargs["mol"] = mol
|
||||
action_sequence.append(create_action_log(f"按摩尔数添加: {mol}", "🧬"))
|
||||
|
||||
action_sequence.append(create_action_log("开始固体加样操作", "🥄"))
|
||||
|
||||
action_sequence.append({
|
||||
"device_id": solid_dispenser,
|
||||
"action_name": "add_solid",
|
||||
"action_kwargs": add_kwargs
|
||||
})
|
||||
|
||||
debug_print(f"✅ 固体加样完成")
|
||||
action_sequence.append(create_action_log("固体加样完成", "✅"))
|
||||
|
||||
# 🔧 新增:固体溶解体积运算 - 固体本身不会显著增加体积,但可能有少量变化
|
||||
debug_print(f"🔧 固体溶解 - 体积变化很小,主要是质量变化")
|
||||
# 固体通常不会显著改变液体体积,这里只记录日志
|
||||
action_sequence.append(create_action_log(f"固体已添加: {final_mass}g", "📊"))
|
||||
|
||||
# 固体溶解体积运算 - 固体本身不会显著增加体积
|
||||
|
||||
else:
|
||||
debug_print("⚠️ 未找到固体加样器,跳过固体添加")
|
||||
debug_print("未找到固体加样器,跳过固体添加")
|
||||
action_sequence.append(create_action_log("未找到固体加样器,无法添加固体", "❌"))
|
||||
|
||||
elif is_liquid_dissolve:
|
||||
# === 液体溶解路径 ===
|
||||
debug_print(f"🔍 5.3: 使用液体溶解路径")
|
||||
action_sequence.append(create_action_log("开始液体溶解流程", "💧"))
|
||||
|
||||
# 查找溶剂容器
|
||||
action_sequence.append(create_action_log("正在查找溶剂容器...", "🔍"))
|
||||
try:
|
||||
solvent_vessel = find_solvent_vessel(G, solvent)
|
||||
action_sequence.append(create_action_log(f"找到溶剂容器: {solvent_vessel}", "🧪"))
|
||||
except ValueError as e:
|
||||
debug_print(f"⚠️ {str(e)},跳过溶剂添加")
|
||||
debug_print(f"溶剂容器查找失败: {str(e)},跳过溶剂添加")
|
||||
action_sequence.append(create_action_log(f"溶剂容器查找失败: {str(e)}", "❌"))
|
||||
solvent_vessel = None
|
||||
|
||||
@@ -663,10 +204,7 @@ def generate_dissolve_protocol(
|
||||
# 计算流速 - 溶解时通常用较慢的速度,避免飞溅
|
||||
flowrate = 1.0 # 较慢的注入速度
|
||||
transfer_flowrate = 0.5 # 较慢的转移速度
|
||||
|
||||
action_sequence.append(create_action_log(f"设置流速: {flowrate}mL/min (缓慢注入)", "⚡"))
|
||||
action_sequence.append(create_action_log(f"开始转移 {final_volume}mL {solvent}", "🚰"))
|
||||
|
||||
|
||||
# 调用pump protocol
|
||||
pump_actions = generate_pump_protocol_with_rinsing(
|
||||
G=G,
|
||||
@@ -688,12 +226,9 @@ def generate_dissolve_protocol(
|
||||
**kwargs
|
||||
)
|
||||
action_sequence.extend(pump_actions)
|
||||
debug_print(f"✅ 溶剂转移完成,添加了 {len(pump_actions)} 个动作")
|
||||
action_sequence.append(create_action_log(f"溶剂转移完成 ({len(pump_actions)} 个操作)", "✅"))
|
||||
|
||||
# 🔧 新增:液体溶解体积运算 - 添加溶剂后更新容器体积
|
||||
debug_print(f"🔧 更新容器液体体积 - 添加溶剂 {final_volume:.2f}mL")
|
||||
|
||||
|
||||
# 液体溶解体积运算 - 添加溶剂后更新容器体积
|
||||
|
||||
# 确保vessel有data字段
|
||||
if "data" not in vessel:
|
||||
vessel["data"] = {}
|
||||
@@ -703,19 +238,14 @@ def generate_dissolve_protocol(
|
||||
if isinstance(current_volume, list):
|
||||
if len(current_volume) > 0:
|
||||
vessel["data"]["liquid_volume"][0] += final_volume
|
||||
debug_print(f"📊 添加溶剂后体积: {vessel['data']['liquid_volume'][0]:.2f}mL (+{final_volume:.2f}mL)")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = [final_volume]
|
||||
debug_print(f"📊 初始化溶解体积: {final_volume:.2f}mL")
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
vessel["data"]["liquid_volume"] += final_volume
|
||||
debug_print(f"📊 添加溶剂后体积: {vessel['data']['liquid_volume']:.2f}mL (+{final_volume:.2f}mL)")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = final_volume
|
||||
debug_print(f"📊 重置体积为: {final_volume:.2f}mL")
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = final_volume
|
||||
debug_print(f"📊 创建新体积记录: {final_volume:.2f}mL")
|
||||
|
||||
# 🔧 同时更新图中的容器数据
|
||||
if vessel_id in G.nodes():
|
||||
@@ -732,27 +262,19 @@ def generate_dissolve_protocol(
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = [final_volume]
|
||||
else:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = current_node_volume + final_volume
|
||||
|
||||
debug_print(f"✅ 图节点体积数据已更新")
|
||||
|
||||
action_sequence.append(create_action_log(f"容器体积已更新 (+{final_volume:.2f}mL)", "📊"))
|
||||
|
||||
|
||||
# 溶剂添加后等待
|
||||
action_sequence.append(create_action_log("溶剂添加后短暂等待...", "⏳"))
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": 5}
|
||||
})
|
||||
|
||||
# 步骤5.4: 等待溶解完成
|
||||
# 等待溶解完成
|
||||
if final_time > 0:
|
||||
debug_print(f"🔍 5.4: 等待溶解完成 - {final_time}s")
|
||||
wait_minutes = final_time / 60
|
||||
action_sequence.append(create_action_log(f"开始溶解等待 ({wait_minutes:.1f}分钟)", "⏰"))
|
||||
|
||||
|
||||
if heatchill_id:
|
||||
# 使用定时加热搅拌
|
||||
action_sequence.append(create_action_log(f"使用加热搅拌器进行定时溶解", "🔥"))
|
||||
|
||||
dissolve_action = {
|
||||
"device_id": heatchill_id,
|
||||
@@ -770,7 +292,6 @@ def generate_dissolve_protocol(
|
||||
|
||||
elif stirrer_id:
|
||||
# 使用定时搅拌
|
||||
action_sequence.append(create_action_log(f"使用搅拌器进行定时溶解", "🌪️"))
|
||||
|
||||
stir_action = {
|
||||
"device_id": stirrer_id,
|
||||
@@ -787,7 +308,6 @@ def generate_dissolve_protocol(
|
||||
|
||||
else:
|
||||
# 简单等待
|
||||
action_sequence.append(create_action_log(f"简单等待溶解完成", "⏳"))
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {"time": final_time}
|
||||
@@ -795,9 +315,7 @@ def generate_dissolve_protocol(
|
||||
|
||||
# 步骤5.5: 停止加热搅拌(如果需要)
|
||||
if heatchill_id and final_time == 0 and final_temp > 25.0:
|
||||
debug_print(f"🔍 5.5: 停止加热器")
|
||||
action_sequence.append(create_action_log("停止加热搅拌器", "🛑"))
|
||||
|
||||
|
||||
stop_action = {
|
||||
"device_id": heatchill_id,
|
||||
"action_name": "heat_chill_stop",
|
||||
@@ -808,7 +326,7 @@ def generate_dissolve_protocol(
|
||||
action_sequence.append(stop_action)
|
||||
|
||||
except Exception as e:
|
||||
debug_print(f"❌ 溶解流程执行失败: {str(e)}")
|
||||
debug_print(f"溶解流程执行失败: {str(e)}")
|
||||
action_sequence.append(create_action_log(f"溶解流程失败: {str(e)}", "❌"))
|
||||
# 添加错误日志
|
||||
action_sequence.append({
|
||||
@@ -829,23 +347,8 @@ def generate_dissolve_protocol(
|
||||
final_liquid_volume = current_volume
|
||||
|
||||
# === 最终结果 ===
|
||||
debug_print("=" * 60)
|
||||
debug_print(f"🎉 溶解协议生成完成")
|
||||
debug_print(f"📊 协议统计:")
|
||||
debug_print(f" 📋 总动作数: {len(action_sequence)}")
|
||||
debug_print(f" 🥼 容器: {vessel_id}")
|
||||
debug_print(f" {dissolve_emoji} 溶解类型: {dissolve_type}")
|
||||
if is_liquid_dissolve:
|
||||
debug_print(f" 💧 溶剂: {solvent} ({final_volume}mL)")
|
||||
if is_solid_dissolve:
|
||||
debug_print(f" 🧪 试剂: {reagent}")
|
||||
debug_print(f" ⚖️ 质量: {final_mass}g")
|
||||
debug_print(f" 🧬 摩尔: {mol}")
|
||||
debug_print(f" 🌡️ 温度: {final_temp}°C")
|
||||
debug_print(f" ⏱️ 时间: {final_time}s")
|
||||
debug_print(f" 📊 溶解前体积: {original_liquid_volume:.2f}mL")
|
||||
debug_print(f" 📊 溶解后体积: {final_liquid_volume:.2f}mL")
|
||||
debug_print("=" * 60)
|
||||
debug_print(f"溶解协议完成: {vessel_id}, 类型={dissolve_type}, "
|
||||
f"动作数={len(action_sequence)}, 体积={original_liquid_volume:.2f}→{final_liquid_volume:.2f}mL")
|
||||
|
||||
# 添加完成日志
|
||||
summary_msg = f"溶解协议完成: {vessel_id}"
|
||||
@@ -854,7 +357,7 @@ def generate_dissolve_protocol(
|
||||
if is_solid_dissolve:
|
||||
summary_msg += f" (溶解 {final_mass}g {reagent})"
|
||||
|
||||
action_sequence.append(create_action_log(summary_msg, "🎉"))
|
||||
action_sequence.append(create_action_log(summary_msg, "✅"))
|
||||
|
||||
return action_sequence
|
||||
|
||||
@@ -866,7 +369,7 @@ def dissolve_solid_by_mass(G: nx.DiGraph, vessel: dict, reagent: str, mass: Unio
|
||||
temp: Union[str, float] = 25.0, time: Union[str, float] = "10 min") -> List[Dict[str, Any]]:
|
||||
"""按质量溶解固体"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"🧂 快速固体溶解: {reagent} ({mass}) → {vessel_id}")
|
||||
debug_print(f"快速固体溶解: {reagent} ({mass}) → {vessel_id}")
|
||||
return generate_dissolve_protocol(
|
||||
G, vessel,
|
||||
mass=mass,
|
||||
@@ -879,7 +382,7 @@ def dissolve_solid_by_moles(G: nx.DiGraph, vessel: dict, reagent: str, mol: str,
|
||||
temp: Union[str, float] = 25.0, time: Union[str, float] = "10 min") -> List[Dict[str, Any]]:
|
||||
"""按摩尔数溶解固体"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"🧬 按摩尔数溶解固体: {reagent} ({mol}) → {vessel_id}")
|
||||
debug_print(f"按摩尔数溶解固体: {reagent} ({mol}) → {vessel_id}")
|
||||
return generate_dissolve_protocol(
|
||||
G, vessel,
|
||||
mol=mol,
|
||||
@@ -892,7 +395,7 @@ def dissolve_with_solvent(G: nx.DiGraph, vessel: dict, solvent: str, volume: Uni
|
||||
temp: Union[str, float] = 25.0, time: Union[str, float] = "5 min") -> List[Dict[str, Any]]:
|
||||
"""用溶剂溶解"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"💧 溶剂溶解: {solvent} ({volume}) → {vessel_id}")
|
||||
debug_print(f"溶剂溶解: {solvent} ({volume}) → {vessel_id}")
|
||||
return generate_dissolve_protocol(
|
||||
G, vessel,
|
||||
solvent=solvent,
|
||||
@@ -904,7 +407,7 @@ def dissolve_with_solvent(G: nx.DiGraph, vessel: dict, solvent: str, volume: Uni
|
||||
def dissolve_at_room_temp(G: nx.DiGraph, vessel: dict, solvent: str, volume: Union[str, float]) -> List[Dict[str, Any]]:
|
||||
"""室温溶解"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"🌡️ 室温溶解: {solvent} ({volume}) → {vessel_id}")
|
||||
debug_print(f"室温溶解: {solvent} ({volume}) → {vessel_id}")
|
||||
return generate_dissolve_protocol(
|
||||
G, vessel,
|
||||
solvent=solvent,
|
||||
@@ -917,7 +420,7 @@ def dissolve_with_heating(G: nx.DiGraph, vessel: dict, solvent: str, volume: Uni
|
||||
temp: Union[str, float] = "60 °C", time: Union[str, float] = "15 min") -> List[Dict[str, Any]]:
|
||||
"""加热溶解"""
|
||||
vessel_id = vessel["id"]
|
||||
debug_print(f"🔥 加热溶解: {solvent} ({volume}) → {vessel_id} @ {temp}")
|
||||
debug_print(f"加热溶解: {solvent} ({volume}) → {vessel_id} @ {temp}")
|
||||
return generate_dissolve_protocol(
|
||||
G, vessel,
|
||||
solvent=solvent,
|
||||
@@ -929,37 +432,31 @@ def dissolve_with_heating(G: nx.DiGraph, vessel: dict, solvent: str, volume: Uni
|
||||
# 测试函数
|
||||
def test_dissolve_protocol():
|
||||
"""测试溶解协议的各种参数解析"""
|
||||
debug_print("=== DISSOLVE PROTOCOL 增强版测试 ===")
|
||||
|
||||
# 测试体积解析
|
||||
debug_print("💧 测试体积解析...")
|
||||
volumes = ["10 mL", "?", 10.0, "1 L", "500 μL"]
|
||||
for vol in volumes:
|
||||
result = parse_volume_input(vol)
|
||||
debug_print(f"📏 体积解析: {vol} → {result}mL")
|
||||
|
||||
debug_print(f"体积解析: {vol} → {result}mL")
|
||||
|
||||
# 测试质量解析
|
||||
debug_print("⚖️ 测试质量解析...")
|
||||
masses = ["2.9 g", "?", 2.5, "500 mg"]
|
||||
for mass in masses:
|
||||
result = parse_mass_input(mass)
|
||||
debug_print(f"⚖️ 质量解析: {mass} → {result}g")
|
||||
|
||||
debug_print(f"质量解析: {mass} → {result}g")
|
||||
|
||||
# 测试温度解析
|
||||
debug_print("🌡️ 测试温度解析...")
|
||||
temps = ["60 °C", "room temperature", "?", 25.0, "reflux"]
|
||||
for temp in temps:
|
||||
result = parse_temperature_input(temp)
|
||||
debug_print(f"🌡️ 温度解析: {temp} → {result}°C")
|
||||
|
||||
debug_print(f"温度解析: {temp} → {result}°C")
|
||||
|
||||
# 测试时间解析
|
||||
debug_print("⏱️ 测试时间解析...")
|
||||
times = ["30 min", "1 h", "?", 60.0]
|
||||
for time in times:
|
||||
result = parse_time_input(time)
|
||||
debug_print(f"⏱️ 时间解析: {time} → {result}s")
|
||||
|
||||
debug_print("✅ 测试完成")
|
||||
debug_print(f"时间解析: {time} → {result}s")
|
||||
|
||||
debug_print("测试完成")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_dissolve_protocol()
|
||||
@@ -1,87 +1,40 @@
|
||||
import networkx as nx
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from unilabos.compile.utils.vessel_parser import get_vessel
|
||||
|
||||
|
||||
def find_connected_heater(G: nx.DiGraph, vessel: str) -> str:
|
||||
"""
|
||||
查找与容器相连的加热器
|
||||
|
||||
Args:
|
||||
G: 网络图
|
||||
vessel: 容器名称
|
||||
|
||||
Returns:
|
||||
str: 加热器ID,如果没有则返回None
|
||||
"""
|
||||
print(f"DRY: 正在查找与容器 '{vessel}' 相连的加热器...")
|
||||
|
||||
# 查找所有加热器节点
|
||||
heater_nodes = [node for node in G.nodes()
|
||||
if ('heater' in node.lower() or
|
||||
'heat' in node.lower() or
|
||||
G.nodes[node].get('class') == 'virtual_heatchill' or
|
||||
G.nodes[node].get('type') == 'heater')]
|
||||
|
||||
print(f"DRY: 找到的加热器节点: {heater_nodes}")
|
||||
|
||||
# 检查是否有加热器与目标容器相连
|
||||
for heater in heater_nodes:
|
||||
if G.has_edge(heater, vessel) or G.has_edge(vessel, heater):
|
||||
print(f"DRY: 找到与容器 '{vessel}' 相连的加热器: {heater}")
|
||||
return heater
|
||||
|
||||
# 如果没有直接连接,查找距离最近的加热器
|
||||
for heater in heater_nodes:
|
||||
try:
|
||||
path = nx.shortest_path(G, source=heater, target=vessel)
|
||||
if len(path) <= 3: # 最多2个中间节点
|
||||
print(f"DRY: 找到距离较近的加热器: {heater}, 路径: {' → '.join(path)}")
|
||||
return heater
|
||||
except nx.NetworkXNoPath:
|
||||
continue
|
||||
|
||||
print(f"DRY: 未找到与容器 '{vessel}' 相连的加热器")
|
||||
return None
|
||||
from .utils.vessel_parser import get_vessel, find_connected_heatchill
|
||||
from .utils.logger_util import debug_print
|
||||
|
||||
|
||||
def generate_dry_protocol(
|
||||
G: nx.DiGraph,
|
||||
vessel: dict, # 🔧 修改:从字符串改为字典类型
|
||||
compound: str = "", # 🔧 修改:参数顺序调整,并设置默认值
|
||||
**kwargs # 接收其他可能的参数但不使用
|
||||
vessel: dict,
|
||||
compound: str = "",
|
||||
**kwargs
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
生成干燥协议序列
|
||||
|
||||
|
||||
Args:
|
||||
G: 有向图,节点为容器和设备
|
||||
vessel: 目标容器字典(从XDL传入)
|
||||
compound: 化合物名称(从XDL传入,可选)
|
||||
**kwargs: 其他可选参数,但不使用
|
||||
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: 动作序列
|
||||
"""
|
||||
# 🔧 核心修改:从字典中提取容器ID
|
||||
vessel_id, vessel_data = get_vessel(vessel)
|
||||
|
||||
|
||||
action_sequence = []
|
||||
|
||||
|
||||
# 默认参数
|
||||
dry_temp = 60.0 # 默认干燥温度 60°C
|
||||
dry_time = 3600.0 # 默认干燥时间 1小时(3600秒)
|
||||
simulation_time = 60.0 # 模拟时间 1分钟
|
||||
|
||||
print(f"🌡️ DRY: 开始生成干燥协议 ✨")
|
||||
print(f" 🥽 vessel: {vessel} (ID: {vessel_id})")
|
||||
print(f" 🧪 化合物: {compound or '未指定'}")
|
||||
print(f" 🔥 干燥温度: {dry_temp}°C")
|
||||
print(f" ⏰ 干燥时间: {dry_time/60:.0f} 分钟")
|
||||
|
||||
# 🔧 新增:记录干燥前的容器状态
|
||||
print(f"🔍 记录干燥前容器状态...")
|
||||
dry_temp = 60.0
|
||||
dry_time = 3600.0
|
||||
simulation_time = 60.0
|
||||
|
||||
debug_print(f"开始生成干燥协议: vessel={vessel_id}, compound={compound or '未指定'}, temp={dry_temp}°C")
|
||||
|
||||
# 记录干燥前的容器状态
|
||||
original_liquid_volume = 0.0
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
@@ -89,39 +42,30 @@ def generate_dry_protocol(
|
||||
original_liquid_volume = current_volume[0]
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
original_liquid_volume = current_volume
|
||||
print(f"📊 干燥前液体体积: {original_liquid_volume:.2f}mL")
|
||||
|
||||
|
||||
# 1. 验证目标容器存在
|
||||
print(f"\n📋 步骤1: 验证目标容器 '{vessel_id}' 是否存在...")
|
||||
if vessel_id not in G.nodes():
|
||||
print(f"⚠️ DRY: 警告 - 容器 '{vessel_id}' 不存在于系统中,跳过干燥 😢")
|
||||
debug_print(f"容器 '{vessel_id}' 不存在于系统中,跳过干燥")
|
||||
return action_sequence
|
||||
print(f"✅ 容器 '{vessel_id}' 验证通过!")
|
||||
|
||||
|
||||
# 2. 查找相连的加热器
|
||||
print(f"\n🔍 步骤2: 查找与容器相连的加热器...")
|
||||
heater_id = find_connected_heater(G, vessel_id) # 🔧 使用 vessel_id
|
||||
|
||||
heater_id = find_connected_heatchill(G, vessel_id)
|
||||
|
||||
if heater_id is None:
|
||||
print(f"😭 DRY: 警告 - 未找到与容器 '{vessel_id}' 相连的加热器,跳过干燥")
|
||||
print(f"🎭 添加模拟干燥动作...")
|
||||
# 添加一个等待动作,表示干燥过程(模拟)
|
||||
debug_print(f"未找到与容器 '{vessel_id}' 相连的加热器,添加模拟干燥动作")
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {
|
||||
"time": 10.0, # 模拟等待时间
|
||||
"time": 10.0,
|
||||
"description": f"模拟干燥 {compound or '化合物'} (无加热器可用)"
|
||||
}
|
||||
})
|
||||
|
||||
# 🔧 新增:模拟干燥的体积变化(溶剂蒸发)
|
||||
print(f"🔧 模拟干燥过程的体积减少...")
|
||||
|
||||
# 模拟干燥的体积变化
|
||||
if original_liquid_volume > 0:
|
||||
# 假设干燥过程中损失10%的体积(溶剂蒸发)
|
||||
volume_loss = original_liquid_volume * 0.1
|
||||
new_volume = max(0.0, original_liquid_volume - volume_loss)
|
||||
|
||||
# 更新vessel字典中的体积
|
||||
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
if isinstance(current_volume, list):
|
||||
@@ -133,15 +77,14 @@ def generate_dry_protocol(
|
||||
vessel["data"]["liquid_volume"] = new_volume
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = new_volume
|
||||
|
||||
# 🔧 同时更新图中的容器数据
|
||||
|
||||
if vessel_id in G.nodes():
|
||||
if 'data' not in G.nodes[vessel_id]:
|
||||
G.nodes[vessel_id]['data'] = {}
|
||||
|
||||
|
||||
vessel_node_data = G.nodes[vessel_id]['data']
|
||||
current_node_volume = vessel_node_data.get('liquid_volume', 0.0)
|
||||
|
||||
|
||||
if isinstance(current_node_volume, list):
|
||||
if len(current_node_volume) > 0:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'][0] = new_volume
|
||||
@@ -149,33 +92,27 @@ def generate_dry_protocol(
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = [new_volume]
|
||||
else:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = new_volume
|
||||
|
||||
print(f"📊 模拟干燥体积变化: {original_liquid_volume:.2f}mL → {new_volume:.2f}mL (-{volume_loss:.2f}mL)")
|
||||
|
||||
print(f"📄 DRY: 协议生成完成,共 {len(action_sequence)} 个动作 🎯")
|
||||
|
||||
debug_print(f"模拟干燥体积变化: {original_liquid_volume:.2f}mL -> {new_volume:.2f}mL")
|
||||
|
||||
debug_print(f"协议生成完成,共 {len(action_sequence)} 个动作")
|
||||
return action_sequence
|
||||
|
||||
print(f"🎉 找到加热器: {heater_id}!")
|
||||
|
||||
|
||||
debug_print(f"找到加热器: {heater_id}")
|
||||
|
||||
# 3. 启动加热器进行干燥
|
||||
print(f"\n🚀 步骤3: 开始执行干燥流程...")
|
||||
print(f"🔥 启动加热器 {heater_id} 进行干燥")
|
||||
|
||||
# 3.1 启动加热
|
||||
print(f" ⚡ 动作1: 启动加热到 {dry_temp}°C...")
|
||||
action_sequence.append({
|
||||
"device_id": heater_id,
|
||||
"action_name": "heat_chill_start",
|
||||
"action_kwargs": {
|
||||
"vessel": {"id": vessel_id}, # 🔧 使用 vessel_id
|
||||
"vessel": {"id": vessel_id},
|
||||
"temp": dry_temp,
|
||||
"purpose": f"干燥 {compound or '化合物'}"
|
||||
}
|
||||
})
|
||||
print(f" ✅ 加热器启动命令已添加 🔥")
|
||||
|
||||
|
||||
# 3.2 等待温度稳定
|
||||
print(f" ⏳ 动作2: 等待温度稳定...")
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {
|
||||
@@ -183,34 +120,27 @@ def generate_dry_protocol(
|
||||
"description": f"等待温度稳定到 {dry_temp}°C"
|
||||
}
|
||||
})
|
||||
print(f" ✅ 温度稳定等待命令已添加 🌡️")
|
||||
|
||||
|
||||
# 3.3 保持干燥温度
|
||||
print(f" 🔄 动作3: 保持干燥温度 {simulation_time/60:.0f} 分钟...")
|
||||
action_sequence.append({
|
||||
"device_id": heater_id,
|
||||
"action_name": "heat_chill",
|
||||
"action_kwargs": {
|
||||
"vessel": {"id": vessel_id}, # 🔧 使用 vessel_id
|
||||
"vessel": {"id": vessel_id},
|
||||
"temp": dry_temp,
|
||||
"time": simulation_time,
|
||||
"purpose": f"干燥 {compound or '化合物'},保持温度 {dry_temp}°C"
|
||||
}
|
||||
})
|
||||
print(f" ✅ 温度保持命令已添加 🌡️⏰")
|
||||
|
||||
# 🔧 新增:干燥过程中的体积变化计算
|
||||
print(f"🔧 计算干燥过程中的体积变化...")
|
||||
|
||||
# 干燥过程中的体积变化计算
|
||||
if original_liquid_volume > 0:
|
||||
# 干燥过程中,溶剂会蒸发,固体保留
|
||||
# 根据温度和时间估算蒸发量
|
||||
evaporation_rate = 0.001 * dry_temp # 每秒每°C蒸发0.001mL
|
||||
total_evaporation = min(original_liquid_volume * 0.8,
|
||||
evaporation_rate * simulation_time) # 最多蒸发80%
|
||||
|
||||
evaporation_rate = 0.001 * dry_temp
|
||||
total_evaporation = min(original_liquid_volume * 0.8,
|
||||
evaporation_rate * simulation_time)
|
||||
|
||||
new_volume = max(0.0, original_liquid_volume - total_evaporation)
|
||||
|
||||
# 更新vessel字典中的体积
|
||||
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
if isinstance(current_volume, list):
|
||||
@@ -222,15 +152,14 @@ def generate_dry_protocol(
|
||||
vessel["data"]["liquid_volume"] = new_volume
|
||||
else:
|
||||
vessel["data"]["liquid_volume"] = new_volume
|
||||
|
||||
# 🔧 同时更新图中的容器数据
|
||||
|
||||
if vessel_id in G.nodes():
|
||||
if 'data' not in G.nodes[vessel_id]:
|
||||
G.nodes[vessel_id]['data'] = {}
|
||||
|
||||
|
||||
vessel_node_data = G.nodes[vessel_id]['data']
|
||||
current_node_volume = vessel_node_data.get('liquid_volume', 0.0)
|
||||
|
||||
|
||||
if isinstance(current_node_volume, list):
|
||||
if len(current_node_volume) > 0:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'][0] = new_volume
|
||||
@@ -238,37 +167,29 @@ def generate_dry_protocol(
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = [new_volume]
|
||||
else:
|
||||
G.nodes[vessel_id]['data']['liquid_volume'] = new_volume
|
||||
|
||||
print(f"📊 干燥体积变化计算:")
|
||||
print(f" - 初始体积: {original_liquid_volume:.2f}mL")
|
||||
print(f" - 蒸发量: {total_evaporation:.2f}mL")
|
||||
print(f" - 剩余体积: {new_volume:.2f}mL")
|
||||
print(f" - 蒸发率: {(total_evaporation/original_liquid_volume*100):.1f}%")
|
||||
|
||||
|
||||
debug_print(f"干燥体积变化: {original_liquid_volume:.2f}mL -> {new_volume:.2f}mL (-{total_evaporation:.2f}mL)")
|
||||
|
||||
# 3.4 停止加热
|
||||
print(f" ⏹️ 动作4: 停止加热...")
|
||||
action_sequence.append({
|
||||
"device_id": heater_id,
|
||||
"action_name": "heat_chill_stop",
|
||||
"action_kwargs": {
|
||||
"vessel": {"id": vessel_id}, # 🔧 使用 vessel_id
|
||||
"vessel": {"id": vessel_id},
|
||||
"purpose": f"干燥完成,停止加热"
|
||||
}
|
||||
})
|
||||
print(f" ✅ 停止加热命令已添加 🛑")
|
||||
|
||||
|
||||
# 3.5 等待冷却
|
||||
print(f" ❄️ 动作5: 等待冷却...")
|
||||
action_sequence.append({
|
||||
"action_name": "wait",
|
||||
"action_kwargs": {
|
||||
"time": 10.0, # 等待10秒冷却
|
||||
"time": 10.0,
|
||||
"description": f"等待 {compound or '化合物'} 冷却"
|
||||
}
|
||||
})
|
||||
print(f" ✅ 冷却等待命令已添加 🧊")
|
||||
|
||||
# 🔧 新增:干燥完成后的状态报告
|
||||
|
||||
# 最终状态
|
||||
final_liquid_volume = 0.0
|
||||
if "data" in vessel and "liquid_volume" in vessel["data"]:
|
||||
current_volume = vessel["data"]["liquid_volume"]
|
||||
@@ -276,60 +197,37 @@ def generate_dry_protocol(
|
||||
final_liquid_volume = current_volume[0]
|
||||
elif isinstance(current_volume, (int, float)):
|
||||
final_liquid_volume = current_volume
|
||||
|
||||
print(f"\n🎊 DRY: 协议生成完成,共 {len(action_sequence)} 个动作 🎯")
|
||||
print(f"⏱️ DRY: 预计总时间: {(simulation_time + 30)/60:.0f} 分钟 ⌛")
|
||||
print(f"📊 干燥结果:")
|
||||
print(f" - 容器: {vessel_id}")
|
||||
print(f" - 化合物: {compound or '未指定'}")
|
||||
print(f" - 干燥前体积: {original_liquid_volume:.2f}mL")
|
||||
print(f" - 干燥后体积: {final_liquid_volume:.2f}mL")
|
||||
print(f" - 蒸发体积: {(original_liquid_volume - final_liquid_volume):.2f}mL")
|
||||
print(f"🏁 所有动作序列准备就绪! ✨")
|
||||
|
||||
|
||||
debug_print(f"干燥协议生成完成: {len(action_sequence)} 个动作, 体积 {original_liquid_volume:.2f} -> {final_liquid_volume:.2f}mL")
|
||||
|
||||
return action_sequence
|
||||
|
||||
|
||||
# 🔧 新增:便捷函数
|
||||
def generate_quick_dry_protocol(G: nx.DiGraph, vessel: dict, compound: str = "",
|
||||
# 便捷函数
|
||||
def generate_quick_dry_protocol(G: nx.DiGraph, vessel: dict, compound: str = "",
|
||||
temp: float = 40.0, time: float = 30.0) -> List[Dict[str, Any]]:
|
||||
"""快速干燥:低温短时间"""
|
||||
vessel_id = vessel["id"]
|
||||
print(f"🌡️ 快速干燥: {compound or '化合物'} → {vessel_id} @ {temp}°C ({time}min)")
|
||||
|
||||
# 临时修改默认参数
|
||||
import types
|
||||
temp_func = types.FunctionType(
|
||||
generate_dry_protocol.__code__,
|
||||
generate_dry_protocol.__globals__
|
||||
)
|
||||
|
||||
# 直接调用原函数,但修改内部参数
|
||||
return generate_dry_protocol(G, vessel, compound)
|
||||
|
||||
|
||||
def generate_thorough_dry_protocol(G: nx.DiGraph, vessel: dict, compound: str = "",
|
||||
def generate_thorough_dry_protocol(G: nx.DiGraph, vessel: dict, compound: str = "",
|
||||
temp: float = 80.0, time: float = 120.0) -> List[Dict[str, Any]]:
|
||||
"""深度干燥:高温长时间"""
|
||||
vessel_id = vessel["id"]
|
||||
print(f"🔥 深度干燥: {compound or '化合物'} → {vessel_id} @ {temp}°C ({time}min)")
|
||||
return generate_dry_protocol(G, vessel, compound)
|
||||
|
||||
|
||||
def generate_gentle_dry_protocol(G: nx.DiGraph, vessel: dict, compound: str = "",
|
||||
def generate_gentle_dry_protocol(G: nx.DiGraph, vessel: dict, compound: str = "",
|
||||
temp: float = 30.0, time: float = 180.0) -> List[Dict[str, Any]]:
|
||||
"""温和干燥:低温长时间"""
|
||||
vessel_id = vessel["id"]
|
||||
print(f"🌡️ 温和干燥: {compound or '化合物'} → {vessel_id} @ {temp}°C ({time}min)")
|
||||
return generate_dry_protocol(G, vessel, compound)
|
||||
|
||||
|
||||
# 测试函数
|
||||
def test_dry_protocol():
|
||||
"""测试干燥协议"""
|
||||
print("=== DRY PROTOCOL 测试 ===")
|
||||
print("测试完成")
|
||||
debug_print("=== DRY PROTOCOL 测试 ===")
|
||||
debug_print("测试完成")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_dry_protocol()
|
||||
test_dry_protocol()
|
||||
|
||||