Compare commits

..

1 Commits

Author SHA1 Message Date
Andy6M
78729ef86c feat(workstation): update bioyond config migration and coin cell material search logic
- Migrate bioyond_cell config to JSON structure and remove global variable dependencies
- Implement material search confirmation dialog auto-handling
- Add documentation: 20260113_物料搜寻确认弹窗自动处理功能.md and 20260113_配置迁移修改总结.md
2026-01-14 09:55:25 +08:00
105 changed files with 4007 additions and 11620 deletions

View File

@@ -1,60 +0,0 @@
# unilabos: Production package (depends on unilabos-env + pip unilabos)
# For production deployment
package:
name: unilabos
version: 0.10.17
source:
path: ../../unilabos
target_directory: unilabos
build:
python:
entry_points:
- unilab = unilabos.app.main:main
script:
- set PIP_NO_INDEX=
- if: win
then:
- copy %RECIPE_DIR%\..\..\MANIFEST.in %SRC_DIR%
- copy %RECIPE_DIR%\..\..\setup.cfg %SRC_DIR%
- copy %RECIPE_DIR%\..\..\setup.py %SRC_DIR%
- pip install %SRC_DIR%
- if: unix
then:
- cp $RECIPE_DIR/../../MANIFEST.in $SRC_DIR
- cp $RECIPE_DIR/../../setup.cfg $SRC_DIR
- cp $RECIPE_DIR/../../setup.py $SRC_DIR
- pip install $SRC_DIR
requirements:
host:
- python ==3.11.14
- pip
- setuptools
- zstd
- zstandard
run:
- zstd
- zstandard
- networkx
- typing_extensions
- websockets
- pint
- fastapi
- jinja2
- requests
- uvicorn
- opcua # [not osx]
- pyserial
- pandas
- pymodbus
- matplotlib
- pylibftdi
- uni-lab::unilabos-env ==0.10.17
about:
repository: https://github.com/deepmodeling/Uni-Lab-OS
license: GPL-3.0-only
description: "UniLabOS - Production package with minimal ROS2 dependencies"

View File

@@ -1,39 +0,0 @@
# unilabos-env: conda environment dependencies (ROS2 + conda packages)
package:
name: unilabos-env
version: 0.10.17
build:
noarch: generic
requirements:
run:
# Python
- zstd
- zstandard
- conda-forge::python ==3.11.14
- conda-forge::opencv
# ROS2 dependencies (from ci-check.yml)
- robostack-staging::ros-humble-ros-core
- robostack-staging::ros-humble-action-msgs
- robostack-staging::ros-humble-std-msgs
- robostack-staging::ros-humble-geometry-msgs
- robostack-staging::ros-humble-control-msgs
- robostack-staging::ros-humble-nav2-msgs
- robostack-staging::ros-humble-cv-bridge
- robostack-staging::ros-humble-vision-opencv
- robostack-staging::ros-humble-tf-transformations
- robostack-staging::ros-humble-moveit-msgs
- robostack-staging::ros-humble-tf2-ros
- robostack-staging::ros-humble-tf2-ros-py
- conda-forge::transforms3d
- conda-forge::uv
# UniLabOS custom messages
- uni-lab::ros-humble-unilabos-msgs
about:
repository: https://github.com/deepmodeling/Uni-Lab-OS
license: GPL-3.0-only
description: "UniLabOS Environment - ROS2 and conda dependencies"

View File

@@ -1,42 +0,0 @@
# unilabos-full: Full package with all features
# Depends on unilabos + complete ROS2 desktop + dev tools
package:
name: unilabos-full
version: 0.10.17
build:
noarch: generic
requirements:
run:
# Base unilabos package (includes unilabos-env)
- uni-lab::unilabos ==0.10.17
# Documentation tools
- sphinx
- sphinx_rtd_theme
# Web UI
- gradio
- flask
# Interactive development
- ipython
- jupyter
- jupyros
- colcon-common-extensions
# ROS2 full desktop (includes rviz2, gazebo, etc.)
- robostack-staging::ros-humble-desktop-full
# Navigation and motion control
- ros-humble-navigation2
- ros-humble-ros2-control
- ros-humble-robot-state-publisher
- ros-humble-joint-state-publisher
# MoveIt motion planning
- ros-humble-moveit
- ros-humble-moveit-servo
# Simulation
- ros-humble-simulation
about:
repository: https://github.com/deepmodeling/Uni-Lab-OS
license: GPL-3.0-only
description: "UniLabOS Full - Complete package with ROS2 Desktop, MoveIt, Navigation2, Gazebo, Jupyter"

91
.conda/recipe.yaml Normal file
View File

@@ -0,0 +1,91 @@
package:
name: unilabos
version: 0.10.15
source:
path: ../unilabos
target_directory: unilabos
build:
python:
entry_points:
- unilab = unilabos.app.main:main
script:
- set PIP_NO_INDEX=
- if: win
then:
- copy %RECIPE_DIR%\..\MANIFEST.in %SRC_DIR%
- copy %RECIPE_DIR%\..\setup.cfg %SRC_DIR%
- copy %RECIPE_DIR%\..\setup.py %SRC_DIR%
- call %PYTHON% -m pip install %SRC_DIR%
- if: unix
then:
- cp $RECIPE_DIR/../MANIFEST.in $SRC_DIR
- cp $RECIPE_DIR/../setup.cfg $SRC_DIR
- cp $RECIPE_DIR/../setup.py $SRC_DIR
- $PYTHON -m pip install $SRC_DIR
requirements:
host:
- python ==3.11.11
- pip
- setuptools
- zstd
- zstandard
run:
- conda-forge::python ==3.11.11
- compilers
- cmake
- zstd
- zstandard
- ninja
- if: unix
then:
- make
- sphinx
- sphinx_rtd_theme
- numpy
- scipy
- pandas
- networkx
- matplotlib
- pint
- pyserial
- pyusb
- pylibftdi
- pymodbus
- python-can
- pyvisa
- opencv
- pydantic
- fastapi
- uvicorn
- gradio
- flask
- websockets
- ipython
- jupyter
- jupyros
- colcon-common-extensions
- robostack-staging::ros-humble-desktop-full
- robostack-staging::ros-humble-control-msgs
- robostack-staging::ros-humble-sensor-msgs
- robostack-staging::ros-humble-trajectory-msgs
- ros-humble-navigation2
- ros-humble-ros2-control
- ros-humble-robot-state-publisher
- ros-humble-joint-state-publisher
- ros-humble-rosbridge-server
- ros-humble-cv-bridge
- ros-humble-tf2
- ros-humble-moveit
- ros-humble-moveit-servo
- ros-humble-simulation
- ros-humble-tf-transformations
- transforms3d
- uni-lab::ros-humble-unilabos-msgs
about:
repository: https://github.com/deepmodeling/Uni-Lab-OS
license: GPL-3.0-only
description: "Uni-Lab-OS"

View File

@@ -0,0 +1,9 @@
@echo off
setlocal enabledelayedexpansion
REM upgrade pip
"%PREFIX%\python.exe" -m pip install --upgrade pip
REM install extra deps
"%PREFIX%\python.exe" -m pip install paho-mqtt opentrons_shared_data
"%PREFIX%\python.exe" -m pip install git+https://github.com/Xuwznln/pylabrobot.git

View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -euxo pipefail
# make sure pip is available
"$PREFIX/bin/python" -m pip install --upgrade pip
# install extra deps
"$PREFIX/bin/python" -m pip install paho-mqtt opentrons_shared_data
"$PREFIX/bin/python" -m pip install git+https://github.com/Xuwznln/pylabrobot.git

26
.cursorignore Normal file
View File

@@ -0,0 +1,26 @@
.conda
# .github
.idea
# .vscode
output
pylabrobot_repo
recipes
scripts
service
temp
# unilabos/test
# unilabos/app/web
unilabos/device_mesh
unilabos_data
unilabos_msgs
unilabos.egg-info
CONTRIBUTORS
# LICENSE
MANIFEST.in
pyrightconfig.json
# README.md
# README_zh.md
setup.py
setup.cfg
.gitattrubutes
**/__pycache__

View File

@@ -1,19 +0,0 @@
version: 2
updates:
# GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
target-branch: "dev"
schedule:
interval: "weekly"
day: "monday"
time: "06:00"
open-pull-requests-limit: 5
reviewers:
- "msgcenterpy-team"
labels:
- "dependencies"
- "github-actions"
commit-message:
prefix: "ci"
include: "scope"

View File

@@ -1,67 +0,0 @@
name: CI Check
on:
push:
branches: [main, dev]
pull_request:
branches: [main, dev]
jobs:
registry-check:
runs-on: windows-latest
env:
# Fix Unicode encoding issue on Windows runner (cp1252 -> utf-8)
PYTHONIOENCODING: utf-8
PYTHONUTF8: 1
defaults:
run:
shell: cmd
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Setup Miniforge
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
use-mamba: true
channels: robostack-staging,conda-forge,uni-lab
channel-priority: flexible
activate-environment: check-env
auto-update-conda: false
show-channel-urls: true
- name: Install ROS dependencies, uv and unilabos-msgs
run: |
echo Installing ROS dependencies...
mamba install -n check-env conda-forge::uv conda-forge::opencv robostack-staging::ros-humble-ros-core robostack-staging::ros-humble-action-msgs robostack-staging::ros-humble-std-msgs robostack-staging::ros-humble-geometry-msgs robostack-staging::ros-humble-control-msgs robostack-staging::ros-humble-nav2-msgs uni-lab::ros-humble-unilabos-msgs robostack-staging::ros-humble-cv-bridge robostack-staging::ros-humble-vision-opencv robostack-staging::ros-humble-tf-transformations robostack-staging::ros-humble-moveit-msgs robostack-staging::ros-humble-tf2-ros robostack-staging::ros-humble-tf2-ros-py conda-forge::transforms3d -c robostack-staging -c conda-forge -c uni-lab -y
- name: Install pip dependencies and unilabos
run: |
call conda activate check-env
echo Installing pip dependencies...
uv pip install -r unilabos/utils/requirements.txt
uv pip install pywinauto git+https://github.com/Xuwznln/pylabrobot.git
uv pip uninstall enum34 || echo enum34 not installed, skipping
uv pip install .
- name: Run check mode (complete_registry)
run: |
call conda activate check-env
echo Running check mode...
python -m unilabos --check_mode --skip_env_check
- name: Check for uncommitted changes
shell: bash
run: |
if ! git diff --exit-code; then
echo "::error::检测到文件变化!请先在本地运行 'python -m unilabos --complete_registry' 并提交变更"
echo "变化的文件:"
git diff --name-only
exit 1
fi
echo "检查通过:无文件变化"

View File

@@ -13,11 +13,6 @@ on:
required: false required: false
default: 'win-64' default: 'win-64'
type: string type: string
build_full:
description: '是否构建完整版 unilabos-full (默认构建轻量版 unilabos)'
required: false
default: false
type: boolean
jobs: jobs:
build-conda-pack: build-conda-pack:
@@ -62,7 +57,7 @@ jobs:
echo "should_build=false" >> $GITHUB_OUTPUT echo "should_build=false" >> $GITHUB_OUTPUT
fi fi
- uses: actions/checkout@v6 - uses: actions/checkout@v4
if: steps.should_build.outputs.should_build == 'true' if: steps.should_build.outputs.should_build == 'true'
with: with:
ref: ${{ github.event.inputs.branch }} ref: ${{ github.event.inputs.branch }}
@@ -74,7 +69,7 @@ jobs:
with: with:
miniforge-version: latest miniforge-version: latest
use-mamba: true use-mamba: true
python-version: '3.11.14' python-version: '3.11.11'
channels: conda-forge,robostack-staging,uni-lab,defaults channels: conda-forge,robostack-staging,uni-lab,defaults
channel-priority: flexible channel-priority: flexible
activate-environment: unilab activate-environment: unilab
@@ -86,14 +81,7 @@ jobs:
run: | run: |
echo Installing unilabos and dependencies to unilab environment... echo Installing unilabos and dependencies to unilab environment...
echo Using mamba for faster and more reliable dependency resolution... echo Using mamba for faster and more reliable dependency resolution...
echo Build full: ${{ github.event.inputs.build_full }} mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
if "${{ github.event.inputs.build_full }}"=="true" (
echo Installing unilabos-full ^(complete package^)...
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
) else (
echo Installing unilabos ^(minimal package^)...
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
)
- name: Install conda-pack, unilabos and dependencies (Unix) - name: Install conda-pack, unilabos and dependencies (Unix)
if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64' if: steps.should_build.outputs.should_build == 'true' && matrix.platform != 'win-64'
@@ -101,14 +89,7 @@ jobs:
run: | run: |
echo "Installing unilabos and dependencies to unilab environment..." echo "Installing unilabos and dependencies to unilab environment..."
echo "Using mamba for faster and more reliable dependency resolution..." echo "Using mamba for faster and more reliable dependency resolution..."
echo "Build full: ${{ github.event.inputs.build_full }}" mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
echo "Installing unilabos-full (complete package)..."
mamba install -n unilab uni-lab::unilabos-full conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
else
echo "Installing unilabos (minimal package)..."
mamba install -n unilab uni-lab::unilabos conda-pack -c uni-lab -c robostack-staging -c conda-forge -y
fi
- name: Get latest ros-humble-unilabos-msgs version (Windows) - name: Get latest ros-humble-unilabos-msgs version (Windows)
if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64' if: steps.should_build.outputs.should_build == 'true' && matrix.platform == 'win-64'
@@ -312,7 +293,7 @@ jobs:
- name: Upload distribution package - name: Upload distribution package
if: steps.should_build.outputs.should_build == 'true' if: steps.should_build.outputs.should_build == 'true'
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v4
with: with:
name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }} name: unilab-pack-${{ matrix.platform }}-${{ github.event.inputs.branch }}
path: dist-package/ path: dist-package/
@@ -327,12 +308,7 @@ jobs:
echo ========================================== echo ==========================================
echo Platform: ${{ matrix.platform }} echo Platform: ${{ matrix.platform }}
echo Branch: ${{ github.event.inputs.branch }} echo Branch: ${{ github.event.inputs.branch }}
echo Python version: 3.11.14 echo Python version: 3.11.11
if "${{ github.event.inputs.build_full }}"=="true" (
echo Package: unilabos-full ^(complete^)
) else (
echo Package: unilabos ^(minimal^)
)
echo. echo.
echo Distribution package contents: echo Distribution package contents:
dir dist-package dir dist-package
@@ -352,12 +328,7 @@ jobs:
echo "==========================================" echo "=========================================="
echo "Platform: ${{ matrix.platform }}" echo "Platform: ${{ matrix.platform }}"
echo "Branch: ${{ github.event.inputs.branch }}" echo "Branch: ${{ github.event.inputs.branch }}"
echo "Python version: 3.11.14" echo "Python version: 3.11.11"
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
echo "Package: unilabos-full (complete)"
else
echo "Package: unilabos (minimal)"
fi
echo "" echo ""
echo "Distribution package contents:" echo "Distribution package contents:"
ls -lh dist-package/ ls -lh dist-package/

View File

@@ -1,12 +1,10 @@
name: Deploy Docs name: Deploy Docs
on: on:
# 在 CI Check 成功后自动触发(仅 main 分支) push:
workflow_run: branches: [main]
workflows: ["CI Check"] pull_request:
types: [completed]
branches: [main] branches: [main]
# 手动触发
workflow_dispatch: workflow_dispatch:
inputs: inputs:
branch: branch:
@@ -35,19 +33,12 @@ concurrency:
jobs: jobs:
# Build documentation # Build documentation
build: build:
# 只在以下情况运行:
# 1. workflow_run 触发且 CI Check 成功
# 2. 手动触发
if: |
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v6 uses: actions/checkout@v4
with: with:
# workflow_run 时使用触发工作流的分支,手动触发时使用输入的分支 ref: ${{ github.event.inputs.branch || github.ref }}
ref: ${{ github.event.workflow_run.head_branch || github.event.inputs.branch || github.ref }}
fetch-depth: 0 fetch-depth: 0
- name: Setup Miniforge (with mamba) - name: Setup Miniforge (with mamba)
@@ -55,7 +46,7 @@ jobs:
with: with:
miniforge-version: latest miniforge-version: latest
use-mamba: true use-mamba: true
python-version: '3.11.14' python-version: '3.11.11'
channels: conda-forge,robostack-staging,uni-lab,defaults channels: conda-forge,robostack-staging,uni-lab,defaults
channel-priority: flexible channel-priority: flexible
activate-environment: unilab activate-environment: unilab
@@ -84,10 +75,8 @@ jobs:
- name: Setup Pages - name: Setup Pages
id: pages id: pages
uses: actions/configure-pages@v5 uses: actions/configure-pages@v4
if: | if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
github.event.workflow_run.head_branch == 'main' ||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
- name: Build Sphinx documentation - name: Build Sphinx documentation
run: | run: |
@@ -105,18 +94,14 @@ jobs:
test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing" test -f docs/_build/html/index.html && echo "✓ index.html exists" || echo "✗ index.html missing"
- name: Upload build artifacts - name: Upload build artifacts
uses: actions/upload-pages-artifact@v4 uses: actions/upload-pages-artifact@v3
if: | if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
github.event.workflow_run.head_branch == 'main' ||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
with: with:
path: docs/_build/html path: docs/_build/html
# Deploy to GitHub Pages # Deploy to GitHub Pages
deploy: deploy:
if: | if: github.ref == 'refs/heads/main' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
github.event.workflow_run.head_branch == 'main' ||
(github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_to_pages == 'true')
environment: environment:
name: github-pages name: github-pages
url: ${{ steps.deployment.outputs.page_url }} url: ${{ steps.deployment.outputs.page_url }}

View File

@@ -1,16 +1,11 @@
name: Multi-Platform Conda Build name: Multi-Platform Conda Build
on: on:
# 在 CI Check 工作流完成后触发(仅限 main/dev 分支)
workflow_run:
workflows: ["CI Check"]
types:
- completed
branches: [main, dev]
# 支持 tag 推送(不依赖 CI Check
push: push:
branches: [main, dev]
tags: ['v*'] tags: ['v*']
# 手动触发 pull_request:
branches: [main, dev]
workflow_dispatch: workflow_dispatch:
inputs: inputs:
platforms: platforms:
@@ -22,37 +17,9 @@ on:
required: false required: false
default: false default: false
type: boolean type: boolean
skip_ci_check:
description: '跳过等待 CI Check (手动触发时可选)'
required: false
default: false
type: boolean
jobs: jobs:
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
wait-for-ci:
runs-on: ubuntu-latest
if: github.event_name == 'workflow_run'
outputs:
should_continue: ${{ steps.check.outputs.should_continue }}
steps:
- name: Check CI status
id: check
run: |
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
echo "should_continue=true" >> $GITHUB_OUTPUT
echo "CI Check passed, proceeding with build"
else
echo "should_continue=false" >> $GITHUB_OUTPUT
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
fi
build: build:
needs: [wait-for-ci]
# 运行条件workflow_run 触发且 CI 成功,或者其他触发方式
if: |
always() &&
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -77,10 +44,8 @@ jobs:
shell: bash -l {0} shell: bash -l {0}
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
with: with:
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
fetch-depth: 0 fetch-depth: 0
- name: Check if platform should be built - name: Check if platform should be built
@@ -104,6 +69,7 @@ jobs:
channels: conda-forge,robostack-staging,defaults channels: conda-forge,robostack-staging,defaults
channel-priority: strict channel-priority: strict
activate-environment: build-env activate-environment: build-env
auto-activate-base: false
auto-update-conda: false auto-update-conda: false
show-channel-urls: true show-channel-urls: true
@@ -149,7 +115,7 @@ jobs:
- name: Upload conda package artifacts - name: Upload conda package artifacts
if: steps.should_build.outputs.should_build == 'true' if: steps.should_build.outputs.should_build == 'true'
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v4
with: with:
name: conda-package-${{ matrix.platform }} name: conda-package-${{ matrix.platform }}
path: conda-packages-temp path: conda-packages-temp

View File

@@ -1,62 +1,25 @@
name: UniLabOS Conda Build name: UniLabOS Conda Build
on: on:
# 在 CI Check 成功后自动触发
workflow_run:
workflows: ["CI Check"]
types: [completed]
branches: [main, dev]
# 标签推送时直接触发(发布版本)
push: push:
branches: [main, dev]
tags: ['v*'] tags: ['v*']
# 手动触发 pull_request:
branches: [main, dev]
workflow_dispatch: workflow_dispatch:
inputs: inputs:
platforms: platforms:
description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64' description: '选择构建平台 (逗号分隔): linux-64, osx-64, osx-arm64, win-64'
required: false required: false
default: 'linux-64' default: 'linux-64'
build_full:
description: '是否构建 unilabos-full 完整包 (默认只构建 unilabos 基础包)'
required: false
default: false
type: boolean
upload_to_anaconda: upload_to_anaconda:
description: '是否上传到Anaconda.org' description: '是否上传到Anaconda.org'
required: false required: false
default: false default: false
type: boolean type: boolean
skip_ci_check:
description: '跳过等待 CI Check (手动触发时可选)'
required: false
default: false
type: boolean
jobs: jobs:
# 等待 CI Check 完成的 job (仅用于 workflow_run 触发)
wait-for-ci:
runs-on: ubuntu-latest
if: github.event_name == 'workflow_run'
outputs:
should_continue: ${{ steps.check.outputs.should_continue }}
steps:
- name: Check CI status
id: check
run: |
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
echo "should_continue=true" >> $GITHUB_OUTPUT
echo "CI Check passed, proceeding with build"
else
echo "should_continue=false" >> $GITHUB_OUTPUT
echo "CI Check did not succeed (status: ${{ github.event.workflow_run.conclusion }}), skipping build"
fi
build: build:
needs: [wait-for-ci]
# 运行条件workflow_run 触发且 CI 成功,或者其他触发方式
if: |
always() &&
(needs.wait-for-ci.result == 'skipped' || needs.wait-for-ci.outputs.should_continue == 'true')
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -77,10 +40,8 @@ jobs:
shell: bash -l {0} shell: bash -l {0}
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
with: with:
# 如果是 workflow_run 触发,使用触发 CI Check 的 commit
ref: ${{ github.event.workflow_run.head_sha || github.ref }}
fetch-depth: 0 fetch-depth: 0
- name: Check if platform should be built - name: Check if platform should be built
@@ -104,6 +65,7 @@ jobs:
channels: conda-forge,robostack-staging,uni-lab,defaults channels: conda-forge,robostack-staging,uni-lab,defaults
channel-priority: strict channel-priority: strict
activate-environment: build-env activate-environment: build-env
auto-activate-base: false
auto-update-conda: false auto-update-conda: false
show-channel-urls: true show-channel-urls: true
@@ -119,61 +81,12 @@ jobs:
conda list | grep -E "(rattler-build|anaconda-client)" conda list | grep -E "(rattler-build|anaconda-client)"
echo "Platform: ${{ matrix.platform }}" echo "Platform: ${{ matrix.platform }}"
echo "OS: ${{ matrix.os }}" echo "OS: ${{ matrix.os }}"
echo "Build full package: ${{ github.event.inputs.build_full || 'false' }}" echo "Building UniLabOS package"
echo "Building packages:"
echo " - unilabos-env (environment dependencies)"
echo " - unilabos (with pip package)"
if [[ "${{ github.event.inputs.build_full }}" == "true" ]]; then
echo " - unilabos-full (complete package)"
fi
- name: Build unilabos-env (conda environment only, noarch) - name: Build conda package
if: steps.should_build.outputs.should_build == 'true' if: steps.should_build.outputs.should_build == 'true'
run: | run: |
echo "Building unilabos-env (conda environment dependencies)..." rattler-build build -r .conda/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
rattler-build build -r .conda/environment/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge
- name: Upload unilabos-env to Anaconda.org (if enabled)
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
run: |
echo "Uploading unilabos-env to uni-lab organization..."
for package in $(find ./output -name "unilabos-env*.conda"); do
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
done
- name: Build unilabos (with pip package)
if: steps.should_build.outputs.should_build == 'true'
run: |
echo "Building unilabos package..."
# 如果已上传到 Anaconda从 uni-lab channel 获取 unilabos-env否则从本地 output 获取
rattler-build build -r .conda/base/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
- name: Upload unilabos to Anaconda.org (if enabled)
if: steps.should_build.outputs.should_build == 'true' && github.event.inputs.upload_to_anaconda == 'true'
run: |
echo "Uploading unilabos to uni-lab organization..."
for package in $(find ./output -name "unilabos-0*.conda" -o -name "unilabos-[0-9]*.conda"); do
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
done
- name: Build unilabos-full - Only when explicitly requested
if: |
steps.should_build.outputs.should_build == 'true' &&
github.event.inputs.build_full == 'true'
run: |
echo "Building unilabos-full package on ${{ matrix.platform }}..."
rattler-build build -r .conda/full/recipe.yaml -c uni-lab -c robostack-staging -c conda-forge --channel ./output
- name: Upload unilabos-full to Anaconda.org (if enabled)
if: |
steps.should_build.outputs.should_build == 'true' &&
github.event.inputs.build_full == 'true' &&
github.event.inputs.upload_to_anaconda == 'true'
run: |
echo "Uploading unilabos-full to uni-lab organization..."
for package in $(find ./output -name "unilabos-full*.conda"); do
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
done
- name: List built packages - name: List built packages
if: steps.should_build.outputs.should_build == 'true' if: steps.should_build.outputs.should_build == 'true'
@@ -195,9 +108,17 @@ jobs:
- name: Upload conda package artifacts - name: Upload conda package artifacts
if: steps.should_build.outputs.should_build == 'true' if: steps.should_build.outputs.should_build == 'true'
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v4
with: with:
name: conda-package-unilabos-${{ matrix.platform }} name: conda-package-unilabos-${{ matrix.platform }}
path: conda-packages-temp path: conda-packages-temp
if-no-files-found: warn if-no-files-found: warn
retention-days: 30 retention-days: 30
- name: Upload to Anaconda.org (uni-lab organization)
if: github.event.inputs.upload_to_anaconda == 'true'
run: |
for package in $(find ./output -name "*.conda"); do
echo "Uploading $package to uni-lab organization..."
anaconda -t ${{ secrets.ANACONDA_API_TOKEN }} upload --user uni-lab --force "$package"
done

1
.gitignore vendored
View File

@@ -4,7 +4,6 @@ temp/
output/ output/
unilabos_data/ unilabos_data/
pyrightconfig.json pyrightconfig.json
.cursorignore
## Python ## Python
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files

View File

@@ -1,5 +1,4 @@
recursive-include unilabos/test * recursive-include unilabos/test *
recursive-include unilabos/utils *
recursive-include unilabos/registry *.yaml recursive-include unilabos/registry *.yaml
recursive-include unilabos/app/web/static * recursive-include unilabos/app/web/static *
recursive-include unilabos/app/web/templates * recursive-include unilabos/app/web/templates *

View File

@@ -31,46 +31,26 @@ Detailed documentation can be found at:
## Quick Start ## Quick Start
### 1. Setup Conda Environment 1. Setup Conda Environment
Uni-Lab-OS recommends using `mamba` for environment management. Choose the package that fits your needs: Uni-Lab-OS recommends using `mamba` for environment management:
| Package | Use Case | Contents |
|---------|----------|----------|
| `unilabos` | **Recommended for most users** | Complete package, ready to use |
| `unilabos-env` | Developers (editable install) | Environment only, install unilabos via pip |
| `unilabos-full` | Simulation/Visualization | unilabos + ROS2 Desktop + Gazebo + MoveIt |
```bash ```bash
# Create new environment # Create new environment
mamba create -n unilab python=3.11.14 mamba create -n unilab python=3.11.11
mamba activate unilab mamba activate unilab
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
# Option A: Standard installation (recommended for most users)
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
# Option B: For developers (editable mode development)
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
# Then install unilabos and dependencies:
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
pip install -e .
uv pip install -r unilabos/utils/requirements.txt
# Option C: Full installation (simulation/visualization)
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
``` ```
**When to use which?** 2. Install Dev Uni-Lab-OS
- **unilabos**: Standard installation for production deployment and general usage (recommended)
- **unilabos-env**: For developers who need `pip install -e .` editable mode, modify source code
- **unilabos-full**: For simulation (Gazebo), visualization (rviz2), and Jupyter notebooks
### 2. Clone Repository (Optional, for developers)
```bash ```bash
# Clone the repository (only needed for development or examples) # Clone the repository
git clone https://github.com/deepmodeling/Uni-Lab-OS.git git clone https://github.com/deepmodeling/Uni-Lab-OS.git
cd Uni-Lab-OS cd Uni-Lab-OS
# Install Uni-Lab-OS
pip install .
``` ```
3. Start Uni-Lab System 3. Start Uni-Lab System

View File

@@ -31,46 +31,26 @@ Uni-Lab-OS 是一个用于实验室自动化的综合平台,旨在连接和控
## 快速开始 ## 快速开始
### 1. 配置 Conda 环境 1. 配置 Conda 环境
Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的需求选择合适的安装包: Uni-Lab-OS 建议使用 `mamba` 管理环境。根据您的操作系统选择适当的环境文件:
| 安装包 | 适用场景 | 包含内容 |
|--------|----------|----------|
| `unilabos` | **推荐大多数用户** | 完整安装包,开箱即用 |
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
| `unilabos-full` | 仿真/可视化 | unilabos + ROS2 桌面版 + Gazebo + MoveIt |
```bash ```bash
# 创建新环境 # 创建新环境
mamba create -n unilab python=3.11.14 mamba create -n unilab python=3.11.11
mamba activate unilab mamba activate unilab
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
# 方案 A标准安装推荐大多数用户
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
# 方案 B开发者环境可编辑模式开发
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
# 然后安装 unilabos 和依赖:
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
pip install -e .
uv pip install -r unilabos/utils/requirements.txt
# 方案 C完整安装仿真/可视化)
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
``` ```
**如何选择?** 2. 安装开发版 Uni-Lab-OS:
- **unilabos**:标准安装,适用于生产部署和日常使用(推荐)
- **unilabos-env**:开发者使用,支持 `pip install -e .` 可编辑模式,可修改源代码
- **unilabos-full**需要仿真Gazebo、可视化rviz2或 Jupyter Notebook
### 2. 克隆仓库(可选,供开发者使用)
```bash ```bash
# 克隆仓库(仅开发或查看示例时需要) # 克隆仓库
git clone https://github.com/deepmodeling/Uni-Lab-OS.git git clone https://github.com/deepmodeling/Uni-Lab-OS.git
cd Uni-Lab-OS cd Uni-Lab-OS
# 安装 Uni-Lab-OS
pip install .
``` ```
3. 启动 Uni-Lab 系统 3. 启动 Uni-Lab 系统

View File

@@ -31,14 +31,6 @@
详细的安装步骤请参考 [安装指南](installation.md)。 详细的安装步骤请参考 [安装指南](installation.md)。
**选择合适的安装包:**
| 安装包 | 适用场景 | 包含组件 |
|--------|----------|----------|
| `unilabos` | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 |
| `unilabos-env` | 开发者(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos |
| `unilabos-full` | 仿真/可视化 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt |
**关键步骤:** **关键步骤:**
```bash ```bash
@@ -46,30 +38,15 @@
# 下载 Miniforge: https://github.com/conda-forge/miniforge/releases # 下载 Miniforge: https://github.com/conda-forge/miniforge/releases
# 2. 创建 Conda 环境 # 2. 创建 Conda 环境
mamba create -n unilab python=3.11.14 mamba create -n unilab python=3.11.11
# 3. 激活环境 # 3. 激活环境
mamba activate unilab mamba activate unilab
# 4. 安装 Uni-Lab-OS(选择其一) # 4. 安装 Uni-Lab-OS
# 方案 A标准安装推荐大多数用户
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
# 方案 B开发者环境可编辑模式开发
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
pip install -e /path/to/Uni-Lab-OS # 可编辑安装
uv pip install -r unilabos/utils/requirements.txt # 安装 pip 依赖
# 方案 C完整版仿真/可视化)
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
``` ```
**选择建议:**
- **日常使用/生产部署**:使用 `unilabos`(推荐),完整功能,开箱即用
- **开发者**:使用 `unilabos-env` + `pip install -e .` + `uv pip install -r unilabos/utils/requirements.txt`,代码修改立即生效
- **仿真/可视化**:使用 `unilabos-full`,含 Gazebo、rviz2、MoveIt
#### 1.2 验证安装 #### 1.2 验证安装
```bash ```bash
@@ -439,9 +416,6 @@ unilab --ak your_ak --sk your_sk -g test/experiments/mock_devices/mock_all.json
1. 访问 Web 界面,进入"仪器耗材"模块 1. 访问 Web 界面,进入"仪器耗材"模块
2. 在"仪器设备"区域找到并添加上述设备 2. 在"仪器设备"区域找到并添加上述设备
3. 在"物料耗材"区域找到并添加容器 3. 在"物料耗材"区域找到并添加容器
4. 在workstation中配置protocol_type包含PumpTransferProtocol
![添加Protocol类型](image/add_protocol.png)
![物料列表](image/material.png) ![物料列表](image/material.png)
@@ -452,9 +426,8 @@ unilab --ak your_ak --sk your_sk -g test/experiments/mock_devices/mock_all.json
**操作步骤:** **操作步骤:**
1. 将两个 `container` 拖拽到 `workstation` 1. 将两个 `container` 拖拽到 `workstation`
2.`virtual_multiway_valve` 拖拽到 `workstation` 2.`virtual_transfer_pump` 拖拽到 `workstation`
3. `virtual_transfer_pump` 拖拽到 `workstation` 3. 在画布上连接它们(建立父子关系)
4. 在画布上连接它们(建立父子关系)
![设备连接](image/links.png) ![设备连接](image/links.png)
@@ -795,43 +768,7 @@ Waiting for host service...
详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。 详细的设备驱动编写指南请参考 [添加设备驱动](../developer_guide/add_device.md)。
#### 9.1 开发环境准备 #### 9.1 为什么需要自定义设备?
**推荐使用 `unilabos-env` + `pip install -e .` + `uv pip install`** 进行设备开发:
```bash
# 1. 创建环境并安装 unilabos-envROS2 + conda 依赖 + uv
mamba create -n unilab python=3.11.14
conda activate unilab
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
# 2. 克隆代码
git clone https://github.com/deepmodeling/Uni-Lab-OS.git
cd Uni-Lab-OS
# 3. 以可编辑模式安装(推荐使用脚本,自动检测中文环境)
python scripts/dev_install.py
# 或手动安装:
pip install -e .
uv pip install -r unilabos/utils/requirements.txt
```
**为什么使用这种方式?**
- `unilabos-env` 提供 ROS2 核心组件和 uv通过 conda 安装,避免编译)
- `unilabos/utils/requirements.txt` 包含所有运行时需要的 pip 依赖
- `dev_install.py` 自动检测中文环境,中文系统自动使用清华镜像
- 使用 `uv` 替代 `pip`,安装速度更快
- 可编辑模式:代码修改**立即生效**,无需重新安装
**如果安装失败或速度太慢**,可以手动执行(使用清华镜像):
```bash
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
```
#### 9.2 为什么需要自定义设备?
Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成: Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要集成:
@@ -840,7 +777,7 @@ Uni-Lab-OS 内置了常见设备,但您的实验室可能有特殊设备需要
- 特殊的实验流程 - 特殊的实验流程
- 第三方设备集成 - 第三方设备集成
#### 9.3 创建 Python 包 #### 9.2 创建 Python 包
为了方便开发和管理,建议为您的实验室创建独立的 Python 包。 为了方便开发和管理,建议为您的实验室创建独立的 Python 包。
@@ -877,7 +814,7 @@ touch my_lab_devices/my_lab_devices/__init__.py
touch my_lab_devices/my_lab_devices/devices/__init__.py touch my_lab_devices/my_lab_devices/devices/__init__.py
``` ```
#### 9.4 创建 setup.py #### 9.3 创建 setup.py
```python ```python
# my_lab_devices/setup.py # my_lab_devices/setup.py
@@ -908,7 +845,7 @@ setup(
) )
``` ```
#### 9.5 开发安装 #### 9.4 开发安装
使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效: 使用 `-e` 参数进行可编辑安装,这样代码修改后立即生效:
@@ -923,7 +860,7 @@ pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
- 方便调试和测试 - 方便调试和测试
- 支持版本控制git - 支持版本控制git
#### 9.6 编写设备驱动 #### 9.5 编写设备驱动
创建设备驱动文件: 创建设备驱动文件:
@@ -1064,7 +1001,7 @@ class MyPump:
- **返回 Dict**:所有动作方法返回字典类型 - **返回 Dict**:所有动作方法返回字典类型
- **文档字符串**:详细说明参数和功能 - **文档字符串**:详细说明参数和功能
#### 9.7 测试设备驱动 #### 9.6 测试设备驱动
创建简单的测试脚本: 创建简单的测试脚本:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 81 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 415 KiB

After

Width:  |  Height:  |  Size: 275 KiB

View File

@@ -13,26 +13,15 @@
- 开发者需要 Git 和基本的 Python 开发知识 - 开发者需要 Git 和基本的 Python 开发知识
- 自定义 msgs 需要 GitHub 账号 - 自定义 msgs 需要 GitHub 账号
## 安装包选择
Uni-Lab-OS 提供三个安装包版本,根据您的需求选择:
| 安装包 | 适用场景 | 包含组件 | 磁盘占用 |
|--------|----------|----------|----------|
| **unilabos** | **推荐大多数用户**,生产部署 | 完整安装包,开箱即用 | ~2-3 GB |
| **unilabos-env** | 开发者环境(可编辑安装) | 仅环境依赖,通过 pip 安装 unilabos | ~2 GB |
| **unilabos-full** | 仿真可视化、完整功能体验 | unilabos + 完整 ROS2 桌面版 + Gazebo + MoveIt | ~8-10 GB |
## 安装方式选择 ## 安装方式选择
根据您的使用场景,选择合适的安装方式: 根据您的使用场景,选择合适的安装方式:
| 安装方式 | 适用人群 | 推荐安装包 | 特点 | 安装时间 | | 安装方式 | 适用人群 | 特点 | 安装时间 |
| ---------------------- | -------------------- | ----------------- | ------------------------------ | ---------------------------- | | ---------------------- | -------------------- | ------------------------------ | ---------------------------- |
| **方式一:一键安装** | 快速体验、演示 | 预打包环境 | 离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) | | **方式一:一键安装** | 实验室用户、快速体验 | 预打包环境,离线可用,无需配置 | 5-10 分钟 (网络良好的情况下) |
| **方式二:手动安装** | **大多数用户** | `unilabos` | 完整功能,开箱即用 | 10-20 分钟 | | **方式二:手动安装** | 标准用户、生产环境 | 灵活配置,版本可控 | 10-20 分钟 |
| **方式三:开发者安装** | 开发者、需要修改源码 | `unilabos-env` | 可编辑模式,支持自定义开发 | 20-30 分钟 | | **方式三:开发者安装** | 开发者、需要修改源码 | 可编辑模式,支持自定义 msgs | 20-30 分钟 |
| **仿真/可视化** | 仿真测试、可视化调试 | `unilabos-full` | 含 Gazebo、rviz2、MoveIt | 30-60 分钟 |
--- ---
@@ -155,38 +144,17 @@ bash Miniforge3-$(uname)-$(uname -m).sh
使用以下命令创建 Uni-Lab 专用环境: 使用以下命令创建 Uni-Lab 专用环境:
```bash ```bash
mamba create -n unilab python=3.11.14 # 目前ros2组件依赖版本大多为3.11.14 mamba create -n unilab python=3.11.11 # 目前ros2组件依赖版本大多为3.11.11
mamba activate unilab mamba activate unilab
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
# 选择安装包(三选一):
# 方案 A标准安装推荐大多数用户
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
# 方案 B开发者环境可编辑模式开发
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
# 然后安装 unilabos 和 pip 依赖:
git clone https://github.com/deepmodeling/Uni-Lab-OS.git && cd Uni-Lab-OS
pip install -e .
uv pip install -r unilabos/utils/requirements.txt
# 方案 C完整版含仿真和可视化工具
mamba install uni-lab::unilabos-full -c robostack-staging -c conda-forge
``` ```
**参数说明**: **参数说明**:
- `-n unilab`: 创建名为 "unilab" 的环境 - `-n unilab`: 创建名为 "unilab" 的环境
- `uni-lab::unilabos`: 安装 unilabos 完整包,开箱即用(推荐) - `uni-lab::unilabos`: 从 uni-lab channel 安装 unilabos 包
- `uni-lab::unilabos-env`: 仅安装环境依赖,适合开发者使用 `pip install -e .`
- `uni-lab::unilabos-full`: 安装完整包(含 ROS2 Desktop、Gazebo、MoveIt 等)
- `-c robostack-staging -c conda-forge`: 添加额外的软件源 - `-c robostack-staging -c conda-forge`: 添加额外的软件源
**包选择建议**
- **日常使用/生产部署**:安装 `unilabos`(推荐,完整功能,开箱即用)
- **开发者**:安装 `unilabos-env`,然后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖,再 `pip install -e .` 进行可编辑安装
- **仿真/可视化**:安装 `unilabos-full`Gazebo、rviz2、MoveIt
**如果遇到网络问题**,可以使用清华镜像源加速下载: **如果遇到网络问题**,可以使用清华镜像源加速下载:
```bash ```bash
@@ -195,14 +163,8 @@ mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/m
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/
mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/ mamba config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/conda-forge/
# 然后重新执行安装命令(推荐标准安装) # 然后重新执行安装命令
mamba create -n unilab uni-lab::unilabos -c robostack-staging mamba create -n unilab uni-lab::unilabos -c robostack-staging
# 或完整版(仿真/可视化)
mamba create -n unilab uni-lab::unilabos-full -c robostack-staging
# pip 安装时使用清华镜像(开发者安装时使用)
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
``` ```
### 第三步:激活环境 ### 第三步:激活环境
@@ -241,87 +203,58 @@ cd Uni-Lab-OS
cd Uni-Lab-OS cd Uni-Lab-OS
``` ```
### 第二步:安装开发环境unilabos-env ### 第二步:安装基础环境
**重要**:开发者请使用 `unilabos-env` 包,它专为开发者设计: **推荐方式**:先通过**方式一(一键安装)**或**方式二(手动安装)**完成基础环境的安装这将包含所有必需的依赖项ROS2、msgs 等)。
- 包含 ROS2 核心组件和消息包ros-humble-ros-core、std-msgs、geometry-msgs 等)
- 包含 transforms3d、cv-bridge、tf2 等 conda 依赖 #### 选项 A通过一键安装推荐
- 包含 `uv` 工具,用于快速安装 pip 依赖
- **不包含** pip 依赖和 unilabos 包(由 `pip install -e .` 和 `uv pip install` 安装) 参考上文"方式一:一键安装",完成基础环境的安装后,激活环境:
```bash ```bash
# 创建并激活环境
mamba create -n unilab python=3.11.14
conda activate unilab conda activate unilab
# 安装开发者环境包ROS2 + conda 依赖 + uv
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
``` ```
### 第三步:安装 pip 依赖和可编辑模式安装 #### 选项 B通过手动安装
克隆代码并安装依赖 参考上文"方式二:手动安装",创建并安装环境
```bash
mamba create -n unilab python=3.11.11
conda activate unilab
mamba install -n unilab uni-lab::unilabos -c robostack-staging -c conda-forge
```
**说明**:这会安装包括 Python 3.11.11、ROS2 Humble、ros-humble-unilabos-msgs 和所有必需依赖
### 第三步:切换到开发版本
现在你已经有了一个完整可用的 Uni-Lab 环境,接下来将 unilabos 包切换为开发版本:
```bash ```bash
# 确保环境已激活 # 确保环境已激活
conda activate unilab conda activate unilab
# 克隆仓库(如果还未克隆 # 卸载 pip 安装的 unilabos保留所有 conda 依赖
git clone https://github.com/deepmodeling/Uni-Lab-OS.git pip uninstall unilabos -y
cd Uni-Lab-OS
# 切换到 dev 分支(可选 # 克隆 dev 分支(如果还未克隆
cd /path/to/your/workspace
git clone -b dev https://github.com/deepmodeling/Uni-Lab-OS.git
# 或者如果已经克隆,切换到 dev 分支
cd Uni-Lab-OS
git checkout dev git checkout dev
git pull git pull
```
**推荐:使用安装脚本**(自动检测中文环境,使用 uv 加速): # 以可编辑模式安装开发版 unilabos
```bash
# 自动检测中文环境,如果是中文系统则使用清华镜像
python scripts/dev_install.py
# 或者手动指定:
python scripts/dev_install.py --china # 强制使用清华镜像
python scripts/dev_install.py --no-mirror # 强制使用 PyPI
python scripts/dev_install.py --skip-deps # 跳过 pip 依赖安装
python scripts/dev_install.py --use-pip # 使用 pip 而非 uv
```
**手动安装**(如果脚本安装失败或速度太慢):
```bash
# 1. 安装 unilabos可编辑模式
pip install -e .
# 2. 使用 uv 安装 pip 依赖(推荐,速度更快)
uv pip install -r unilabos/utils/requirements.txt
# 国内用户使用清华镜像:
pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple pip install -e . -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
uv pip install -r unilabos/utils/requirements.txt -i https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
``` ```
**注意** **参数说明**
- `uv` 已包含在 `unilabos-env` 中,无需单独安装
- `unilabos/utils/requirements.txt` 包含运行 unilabos 所需的所有 pip 依赖
- 部分特殊包(如 pylabrobot会在运行时由 unilabos 自动检测并安装
**为什么使用可编辑模式?** - `-e`: editable mode可编辑模式代码修改立即生效无需重新安装
- `-i`: 使用清华镜像源加速下载
- `-e` (editable mode):代码修改**立即生效**,无需重新安装 - `pip uninstall unilabos`: 只卸载 pip 安装的 unilabos 包,不影响 conda 安装的其他依赖(如 ROS2、msgs 等)
- 适合开发调试:修改代码后直接运行测试
- 与 `unilabos-env` 配合:环境依赖由 conda 管理unilabos 代码由 pip 管理
**验证安装**
```bash
# 检查 unilabos 版本
python -c "import unilabos; print(unilabos.__version__)"
# 检查安装位置(应该指向你的代码目录)
pip show unilabos | grep Location
```
### 第四步:安装或自定义 ros-humble-unilabos-msgs可选 ### 第四步:安装或自定义 ros-humble-unilabos-msgs可选
@@ -531,45 +464,7 @@ cd $CONDA_PREFIX/envs/unilab
### 问题 8: 环境很大,有办法减小吗? ### 问题 8: 环境很大,有办法减小吗?
**解决方案**: **解决方案**: 预打包的环境包含所有依赖,通常较大(压缩后 2-5GB。这是为了确保离线安装和完整功能。如果空间有限考虑使用方式二手动安装只安装需要的组件。
1. **使用 `unilabos` 标准版**(推荐大多数用户):
```bash
mamba install uni-lab::unilabos -c robostack-staging -c conda-forge
```
标准版包含完整功能,环境大小约 2-3GB相比完整版的 8-10GB
2. **使用 `unilabos-env` 开发者版**(最小化):
```bash
mamba install uni-lab::unilabos-env -c robostack-staging -c conda-forge
# 然后手动安装依赖
pip install -e .
uv pip install -r unilabos/utils/requirements.txt
```
开发者版只包含环境依赖,体积最小约 2GB。
3. **按需安装额外组件**
如果后续需要特定功能,可以单独安装:
```bash
# 需要 Jupyter
mamba install jupyter jupyros
# 需要可视化
mamba install matplotlib opencv
# 需要仿真(注意:这会安装大量依赖)
mamba install ros-humble-gazebo-ros
```
4. **预打包环境问题**
预打包环境(方式一)包含所有依赖,通常较大(压缩后 2-5GB。这是为了确保离线安装和完整功能。
**包选择建议**
| 需求 | 推荐包 | 预估大小 |
|------|--------|----------|
| 日常使用/生产部署 | `unilabos` | ~2-3 GB |
| 开发调试(可编辑模式) | `unilabos-env` | ~2 GB |
| 仿真/可视化 | `unilabos-full` | ~8-10 GB |
### 问题 9: 如何更新到最新版本? ### 问题 9: 如何更新到最新版本?
@@ -616,7 +511,6 @@ mamba update ros-humble-unilabos-msgs -c uni-lab -c robostack-staging -c conda-f
**提示**: **提示**:
- **大多数用户**推荐使用方式二(手动安装)的 `unilabos` 标准版 - 生产环境推荐使用方式二(手动安装)的稳定版本
- **开发者**推荐使用方式三(开发者安装),安装 `unilabos-env` 后使用 `uv pip install -r unilabos/utils/requirements.txt` 安装依赖 - 开发和测试推荐使用方式三(开发者安装)
- **仿真/可视化**推荐安装 `unilabos-full` 完整版 - 快速体验和演示推荐使用方式一(一键安装)
- **快速体验和演示**推荐使用方式一(一键安装)

View File

@@ -1,6 +1,6 @@
package: package:
name: ros-humble-unilabos-msgs name: ros-humble-unilabos-msgs
version: 0.10.17 version: 0.10.15
source: source:
path: ../../unilabos_msgs path: ../../unilabos_msgs
target_directory: src target_directory: src
@@ -25,7 +25,7 @@ requirements:
build: build:
- ${{ compiler('cxx') }} - ${{ compiler('cxx') }}
- ${{ compiler('c') }} - ${{ compiler('c') }}
- python ==3.11.14 - python ==3.11.11
- numpy - numpy
- if: build_platform != target_platform - if: build_platform != target_platform
then: then:
@@ -63,14 +63,14 @@ requirements:
- robostack-staging::ros-humble-rosidl-default-generators - robostack-staging::ros-humble-rosidl-default-generators
- robostack-staging::ros-humble-std-msgs - robostack-staging::ros-humble-std-msgs
- robostack-staging::ros-humble-geometry-msgs - robostack-staging::ros-humble-geometry-msgs
- robostack-staging::ros2-distro-mutex=0.7 - robostack-staging::ros2-distro-mutex=0.6
run: run:
- robostack-staging::ros-humble-action-msgs - robostack-staging::ros-humble-action-msgs
- robostack-staging::ros-humble-ros-workspace - robostack-staging::ros-humble-ros-workspace
- robostack-staging::ros-humble-rosidl-default-runtime - robostack-staging::ros-humble-rosidl-default-runtime
- robostack-staging::ros-humble-std-msgs - robostack-staging::ros-humble-std-msgs
- robostack-staging::ros-humble-geometry-msgs - robostack-staging::ros-humble-geometry-msgs
- robostack-staging::ros2-distro-mutex=0.7 - robostack-staging::ros2-distro-mutex=0.6
- if: osx and x86_64 - if: osx and x86_64
then: then:
- __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }} - __osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}

View File

@@ -1,6 +1,6 @@
package: package:
name: unilabos name: unilabos
version: "0.10.17" version: "0.10.15"
source: source:
path: ../.. path: ../..

View File

@@ -85,7 +85,7 @@ Verification:
------------- -------------
The verify_installation.py script will check: The verify_installation.py script will check:
- Python version (3.11.14) - Python version (3.11.11)
- ROS2 rclpy installation - ROS2 rclpy installation
- UniLabOS installation and dependencies - UniLabOS installation and dependencies
@@ -104,7 +104,7 @@ Build Information:
Branch: {branch} Branch: {branch}
Platform: {platform} Platform: {platform}
Python: 3.11.14 Python: 3.11.11
Date: {build_date} Date: {build_date}
Troubleshooting: Troubleshooting:

View File

@@ -1,214 +0,0 @@
#!/usr/bin/env python3
"""
Development installation script for UniLabOS.
Auto-detects Chinese locale and uses appropriate mirror.
Usage:
python scripts/dev_install.py
python scripts/dev_install.py --no-mirror # Force no mirror
python scripts/dev_install.py --china # Force China mirror
python scripts/dev_install.py --skip-deps # Skip pip dependencies installation
Flow:
1. pip install -e . (install unilabos in editable mode)
2. Detect Chinese locale
3. Use uv to install pip dependencies from requirements.txt
4. Special packages (like pylabrobot) are handled by environment_check.py at runtime
"""
import locale
import subprocess
import sys
import argparse
from pathlib import Path
# Tsinghua mirror URL
TSINGHUA_MIRROR = "https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple"
def is_chinese_locale() -> bool:
"""
Detect if system is in Chinese locale.
Same logic as EnvironmentChecker._is_chinese_locale()
"""
try:
lang = locale.getdefaultlocale()[0]
if lang and ("zh" in lang.lower() or "chinese" in lang.lower()):
return True
except Exception:
pass
return False
def run_command(cmd: list, description: str, retry: int = 2) -> bool:
"""Run command with retry support."""
print(f"[INFO] {description}")
print(f"[CMD] {' '.join(cmd)}")
for attempt in range(retry + 1):
try:
result = subprocess.run(cmd, check=True, timeout=600)
print(f"[OK] {description}")
return True
except subprocess.CalledProcessError as e:
if attempt < retry:
print(f"[WARN] Attempt {attempt + 1} failed, retrying...")
else:
print(f"[ERROR] {description} failed: {e}")
return False
except subprocess.TimeoutExpired:
print(f"[ERROR] {description} timed out")
return False
return False
def install_editable(project_root: Path, use_mirror: bool) -> bool:
"""Install unilabos in editable mode using pip."""
cmd = [sys.executable, "-m", "pip", "install", "-e", str(project_root)]
if use_mirror:
cmd.extend(["-i", TSINGHUA_MIRROR])
return run_command(cmd, "Installing unilabos in editable mode")
def install_requirements_uv(requirements_file: Path, use_mirror: bool) -> bool:
"""Install pip dependencies using uv (installed via conda-forge::uv)."""
cmd = ["uv", "pip", "install", "-r", str(requirements_file)]
if use_mirror:
cmd.extend(["-i", TSINGHUA_MIRROR])
return run_command(cmd, "Installing pip dependencies with uv", retry=2)
def install_requirements_pip(requirements_file: Path, use_mirror: bool) -> bool:
"""Fallback: Install pip dependencies using pip."""
cmd = [sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
if use_mirror:
cmd.extend(["-i", TSINGHUA_MIRROR])
return run_command(cmd, "Installing pip dependencies with pip", retry=2)
def check_uv_available() -> bool:
"""Check if uv is available (installed via conda-forge::uv)."""
try:
subprocess.run(["uv", "--version"], capture_output=True, check=True)
return True
except (subprocess.CalledProcessError, FileNotFoundError):
return False
def main():
parser = argparse.ArgumentParser(description="Development installation script for UniLabOS")
parser.add_argument("--china", action="store_true", help="Force use China mirror (Tsinghua)")
parser.add_argument("--no-mirror", action="store_true", help="Force use default PyPI (no mirror)")
parser.add_argument(
"--skip-deps", action="store_true", help="Skip pip dependencies installation (only install unilabos)"
)
parser.add_argument("--use-pip", action="store_true", help="Use pip instead of uv for dependencies")
args = parser.parse_args()
# Determine project root
script_dir = Path(__file__).parent
project_root = script_dir.parent
requirements_file = project_root / "unilabos" / "utils" / "requirements.txt"
if not (project_root / "setup.py").exists():
print(f"[ERROR] setup.py not found in {project_root}")
sys.exit(1)
print("=" * 60)
print("UniLabOS Development Installation")
print("=" * 60)
print(f"Project root: {project_root}")
print()
# Determine mirror usage based on locale
if args.no_mirror:
use_mirror = False
print("[INFO] Mirror disabled by --no-mirror flag")
elif args.china:
use_mirror = True
print("[INFO] China mirror enabled by --china flag")
else:
use_mirror = is_chinese_locale()
if use_mirror:
print("[INFO] Chinese locale detected, using Tsinghua mirror")
else:
print("[INFO] Non-Chinese locale detected, using default PyPI")
print()
# Step 1: Install unilabos in editable mode
print("[STEP 1] Installing unilabos in editable mode...")
if not install_editable(project_root, use_mirror):
print("[ERROR] Failed to install unilabos")
print()
print("Manual fallback:")
if use_mirror:
print(f" pip install -e {project_root} -i {TSINGHUA_MIRROR}")
else:
print(f" pip install -e {project_root}")
sys.exit(1)
print()
# Step 2: Install pip dependencies
if args.skip_deps:
print("[INFO] Skipping pip dependencies installation (--skip-deps)")
else:
print("[STEP 2] Installing pip dependencies...")
if not requirements_file.exists():
print(f"[WARN] Requirements file not found: {requirements_file}")
print("[INFO] Skipping dependencies installation")
else:
# Try uv first (faster), fallback to pip
if args.use_pip:
print("[INFO] Using pip (--use-pip flag)")
success = install_requirements_pip(requirements_file, use_mirror)
elif check_uv_available():
print("[INFO] Using uv (installed via conda-forge::uv)")
success = install_requirements_uv(requirements_file, use_mirror)
if not success:
print("[WARN] uv failed, falling back to pip...")
success = install_requirements_pip(requirements_file, use_mirror)
else:
print("[WARN] uv not available (should be installed via: mamba install conda-forge::uv)")
print("[INFO] Falling back to pip...")
success = install_requirements_pip(requirements_file, use_mirror)
if not success:
print()
print("[WARN] Failed to install some dependencies automatically.")
print("You can manually install them:")
if use_mirror:
print(f" uv pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
print(" or:")
print(f" pip install -r {requirements_file} -i {TSINGHUA_MIRROR}")
else:
print(f" uv pip install -r {requirements_file}")
print(" or:")
print(f" pip install -r {requirements_file}")
print()
print("=" * 60)
print("Installation complete!")
print("=" * 60)
print()
print("Note: Some special packages (like pylabrobot) are installed")
print("automatically at runtime by unilabos if needed.")
print()
print("Verify installation:")
print(' python -c "import unilabos; print(unilabos.__version__)"')
print()
print("If you encounter issues, you can manually install dependencies:")
if use_mirror:
print(f" uv pip install -r unilabos/utils/requirements.txt -i {TSINGHUA_MIRROR}")
else:
print(" uv pip install -r unilabos/utils/requirements.txt")
print()
if __name__ == "__main__":
main()

View File

@@ -4,7 +4,7 @@ package_name = 'unilabos'
setup( setup(
name=package_name, name=package_name,
version='0.10.17', version='0.10.15',
packages=find_packages(), packages=find_packages(),
include_package_data=True, include_package_data=True,
install_requires=['setuptools'], install_requires=['setuptools'],

View File

@@ -1,213 +0,0 @@
{
"workflow": [
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines",
"targets": "Liquid_1",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines",
"targets": "Liquid_2",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines",
"targets": "Liquid_3",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines_2",
"targets": "Liquid_4",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines_2",
"targets": "Liquid_5",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines_2",
"targets": "Liquid_6",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines_3",
"targets": "dest_set",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines_3",
"targets": "dest_set_2",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
},
{
"action": "transfer_liquid",
"action_args": {
"sources": "cell_lines_3",
"targets": "dest_set_3",
"asp_vol": 100.0,
"dis_vol": 74.75,
"asp_flow_rate": 94.0,
"dis_flow_rate": 95.5
}
}
],
"reagent": {
"Liquid_1": {
"slot": 1,
"well": [
"A4",
"A7",
"A10"
],
"labware": "rep 1"
},
"Liquid_4": {
"slot": 1,
"well": [
"A4",
"A7",
"A10"
],
"labware": "rep 1"
},
"dest_set": {
"slot": 1,
"well": [
"A4",
"A7",
"A10"
],
"labware": "rep 1"
},
"Liquid_2": {
"slot": 2,
"well": [
"A3",
"A5",
"A8"
],
"labware": "rep 2"
},
"Liquid_5": {
"slot": 2,
"well": [
"A3",
"A5",
"A8"
],
"labware": "rep 2"
},
"dest_set_2": {
"slot": 2,
"well": [
"A3",
"A5",
"A8"
],
"labware": "rep 2"
},
"Liquid_3": {
"slot": 3,
"well": [
"A4",
"A6",
"A10"
],
"labware": "rep 3"
},
"Liquid_6": {
"slot": 3,
"well": [
"A4",
"A6",
"A10"
],
"labware": "rep 3"
},
"dest_set_3": {
"slot": 3,
"well": [
"A4",
"A6",
"A10"
],
"labware": "rep 3"
},
"cell_lines": {
"slot": 4,
"well": [
"A1",
"A3",
"A5"
],
"labware": "DRUG + YOYO-MEDIA"
},
"cell_lines_2": {
"slot": 4,
"well": [
"A1",
"A3",
"A5"
],
"labware": "DRUG + YOYO-MEDIA"
},
"cell_lines_3": {
"slot": 4,
"well": [
"A1",
"A3",
"A5"
],
"labware": "DRUG + YOYO-MEDIA"
}
}
}

View File

@@ -1 +1 @@
__version__ = "0.10.17" __version__ = "0.10.15"

View File

@@ -1,6 +0,0 @@
"""Entry point for `python -m unilabos`."""
from unilabos.app.main import main
if __name__ == "__main__":
main()

View File

@@ -7,6 +7,7 @@ import sys
import threading import threading
import time import time
from typing import Dict, Any, List from typing import Dict, Any, List
import networkx as nx import networkx as nx
import yaml import yaml
@@ -16,9 +17,9 @@ unilabos_dir = os.path.dirname(os.path.dirname(current_dir))
if unilabos_dir not in sys.path: if unilabos_dir not in sys.path:
sys.path.append(unilabos_dir) sys.path.append(unilabos_dir)
from unilabos.app.utils import cleanup_for_restart
from unilabos.utils.banner_print import print_status, print_unilab_banner from unilabos.utils.banner_print import print_status, print_unilab_banner
from unilabos.config.config import load_config, BasicConfig, HTTPConfig from unilabos.config.config import load_config, BasicConfig, HTTPConfig
from unilabos.app.utils import cleanup_for_restart
# Global restart flags (used by ws_client and web/server) # Global restart flags (used by ws_client and web/server)
_restart_requested: bool = False _restart_requested: bool = False
@@ -160,12 +161,6 @@ def parse_args():
default=False, default=False,
help="Complete registry information", help="Complete registry information",
) )
parser.add_argument(
"--check_mode",
action="store_true",
default=False,
help="Run in check mode for CI: validates registry imports and ensures no file changes",
)
parser.add_argument( parser.add_argument(
"--no_update_feedback", "--no_update_feedback",
action="store_true", action="store_true",
@@ -216,10 +211,7 @@ def main():
args_dict = vars(args) args_dict = vars(args)
# 环境检查 - 检查并自动安装必需的包 (可选) # 环境检查 - 检查并自动安装必需的包 (可选)
skip_env_check = args_dict.get("skip_env_check", False) if not args_dict.get("skip_env_check", False):
check_mode = args_dict.get("check_mode", False)
if not skip_env_check:
from unilabos.utils.environment_check import check_environment from unilabos.utils.environment_check import check_environment
if not check_environment(auto_install=True): if not check_environment(auto_install=True):
@@ -230,21 +222,7 @@ def main():
# 加载配置文件优先加载config然后从env读取 # 加载配置文件优先加载config然后从env读取
config_path = args_dict.get("config") config_path = args_dict.get("config")
if os.getcwd().endswith("unilabos_data"):
if check_mode:
args_dict["working_dir"] = os.path.abspath(os.getcwd())
# 当 skip_env_check 时,默认使用当前目录作为 working_dir
if skip_env_check and not args_dict.get("working_dir") and not config_path:
working_dir = os.path.abspath(os.getcwd())
print_status(f"跳过环境检查模式:使用当前目录作为工作目录 {working_dir}", "info")
# 检查当前目录是否有 local_config.py
local_config_in_cwd = os.path.join(working_dir, "local_config.py")
if os.path.exists(local_config_in_cwd):
config_path = local_config_in_cwd
print_status(f"发现本地配置文件: {config_path}", "info")
else:
print_status(f"未指定config路径可通过 --config 传入 local_config.py 文件路径", "info")
elif os.getcwd().endswith("unilabos_data"):
working_dir = os.path.abspath(os.getcwd()) working_dir = os.path.abspath(os.getcwd())
else: else:
working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data")) working_dir = os.path.abspath(os.path.join(os.getcwd(), "unilabos_data"))
@@ -263,7 +241,7 @@ def main():
working_dir = os.path.dirname(config_path) working_dir = os.path.dirname(config_path)
elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")): elif os.path.exists(working_dir) and os.path.exists(os.path.join(working_dir, "local_config.py")):
config_path = os.path.join(working_dir, "local_config.py") config_path = os.path.join(working_dir, "local_config.py")
elif not skip_env_check and not config_path and ( elif not config_path and (
not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py")) not os.path.exists(working_dir) or not os.path.exists(os.path.join(working_dir, "local_config.py"))
): ):
print_status(f"未指定config路径可通过 --config 传入 local_config.py 文件路径", "info") print_status(f"未指定config路径可通过 --config 传入 local_config.py 文件路径", "info")
@@ -277,11 +255,9 @@ def main():
print_status(f"已创建 local_config.py 路径: {config_path}", "info") print_status(f"已创建 local_config.py 路径: {config_path}", "info")
else: else:
os._exit(1) os._exit(1)
# 加载配置文件
# 加载配置文件 (check_mode 跳过)
print_status(f"当前工作目录为 {working_dir}", "info") print_status(f"当前工作目录为 {working_dir}", "info")
if not check_mode: load_config_from_file(config_path)
load_config_from_file(config_path)
# 根据配置重新设置日志级别 # 根据配置重新设置日志级别
from unilabos.utils.log import configure_logger, logger from unilabos.utils.log import configure_logger, logger
@@ -337,7 +313,6 @@ def main():
machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name]) machine_name = "".join([c if c.isalnum() or c == "_" else "_" for c in machine_name])
BasicConfig.machine_name = machine_name BasicConfig.machine_name = machine_name
BasicConfig.vis_2d_enable = args_dict["2d_vis"] BasicConfig.vis_2d_enable = args_dict["2d_vis"]
BasicConfig.check_mode = check_mode
from unilabos.resources.graphio import ( from unilabos.resources.graphio import (
read_node_link_json, read_node_link_json,
@@ -356,14 +331,10 @@ def main():
# 显示启动横幅 # 显示启动横幅
print_unilab_banner(args_dict) print_unilab_banner(args_dict)
# 注册表 - check_mode 时强制启用 complete_registry # 注册表
complete_registry = args_dict.get("complete_registry", False) or check_mode lab_registry = build_registry(
lab_registry = build_registry(args_dict["registry_path"], complete_registry, BasicConfig.upload_registry) args_dict["registry_path"], args_dict.get("complete_registry", False), BasicConfig.upload_registry
)
# Check mode: complete_registry 完成后直接退出git diff 检测由 CI workflow 执行
if check_mode:
print_status("Check mode: complete_registry 完成,退出", "info")
os._exit(0)
if BasicConfig.upload_registry: if BasicConfig.upload_registry:
# 设备注册到服务端 - 需要 ak 和 sk # 设备注册到服务端 - 需要 ak 和 sk

View File

@@ -54,7 +54,6 @@ class JobAddReq(BaseModel):
action_type: str = Field( action_type: str = Field(
examples=["unilabos_msgs.action._str_single_input.StrSingleInput"], description="action type", default="" examples=["unilabos_msgs.action._str_single_input.StrSingleInput"], description="action type", default=""
) )
sample_material: dict = Field(examples=[{"string": "string"}], description="sample uuid to material uuid")
action_args: dict = Field(examples=[{"string": "string"}], description="action arguments", default_factory=dict) action_args: dict = Field(examples=[{"string": "string"}], description="action arguments", default_factory=dict)
task_id: str = Field(examples=["task_id"], description="task uuid (auto-generated if empty)", default="") task_id: str = Field(examples=["task_id"], description="task uuid (auto-generated if empty)", default="")
job_id: str = Field(examples=["job_id"], description="goal uuid (auto-generated if empty)", default="") job_id: str = Field(examples=["job_id"], description="goal uuid (auto-generated if empty)", default="")

View File

@@ -4,40 +4,8 @@ UniLabOS 应用工具函数
提供清理、重启等工具函数 提供清理、重启等工具函数
""" """
import glob
import os
import shutil
import sys
def patch_rclpy_dll_windows():
"""在 Windows + conda 环境下为 rclpy 打 DLL 加载补丁"""
if sys.platform != "win32" or not os.environ.get("CONDA_PREFIX"):
return
try:
import rclpy
return
except ImportError as e:
if not str(e).startswith("DLL load failed"):
return
cp = os.environ["CONDA_PREFIX"]
impl = os.path.join(cp, "Lib", "site-packages", "rclpy", "impl", "implementation_singleton.py")
pyd = glob.glob(os.path.join(cp, "Lib", "site-packages", "rclpy", "_rclpy_pybind11*.pyd"))
if not os.path.exists(impl) or not pyd:
return
with open(impl, "r", encoding="utf-8") as f:
content = f.read()
lib_bin = os.path.join(cp, "Library", "bin").replace("\\", "/")
patch = f'# UniLabOS DLL Patch\nimport os,ctypes\nos.add_dll_directory("{lib_bin}") if hasattr(os,"add_dll_directory") else None\ntry: ctypes.CDLL("{pyd[0].replace(chr(92),"/")}")\nexcept: pass\n# End Patch\n'
shutil.copy2(impl, impl + ".bak")
with open(impl, "w", encoding="utf-8") as f:
f.write(patch + content)
patch_rclpy_dll_windows()
import gc import gc
import os
import threading import threading
import time import time

View File

@@ -359,7 +359,9 @@ class HTTPClient:
Returns: Returns:
Dict: API响应数据包含 code 和 data (uuid, name) Dict: API响应数据包含 code 和 data (uuid, name)
""" """
# target_lab_uuid 暂时使用默认值,后续由后端根据 ak/sk 获取
payload = { payload = {
"target_lab_uuid": "28c38bb0-63f6-4352-b0d8-b5b8eb1766d5",
"name": name, "name": name,
"data": { "data": {
"workflow_uuid": workflow_uuid, "workflow_uuid": workflow_uuid,

View File

@@ -58,14 +58,14 @@ class JobResultStore:
feedback=feedback or {}, feedback=feedback or {},
timestamp=time.time(), timestamp=time.time(),
) )
logger.trace(f"[JobResultStore] Stored result for job {job_id[:8]}, status={status}") logger.debug(f"[JobResultStore] Stored result for job {job_id[:8]}, status={status}")
def get_and_remove(self, job_id: str) -> Optional[JobResult]: def get_and_remove(self, job_id: str) -> Optional[JobResult]:
"""获取并删除任务结果""" """获取并删除任务结果"""
with self._results_lock: with self._results_lock:
result = self._results.pop(job_id, None) result = self._results.pop(job_id, None)
if result: if result:
logger.trace(f"[JobResultStore] Retrieved and removed result for job {job_id[:8]}") logger.debug(f"[JobResultStore] Retrieved and removed result for job {job_id[:8]}")
return result return result
def get_result(self, job_id: str) -> Optional[JobResult]: def get_result(self, job_id: str) -> Optional[JobResult]:
@@ -327,7 +327,6 @@ def job_add(req: JobAddReq) -> JobData:
queue_item, queue_item,
action_type=action_type, action_type=action_type,
action_kwargs=action_args, action_kwargs=action_args,
sample_material=req.sample_material,
server_info=server_info, server_info=server_info,
) )

View File

@@ -23,7 +23,7 @@ from typing import Optional, Dict, Any, List
from urllib.parse import urlparse from urllib.parse import urlparse
from enum import Enum from enum import Enum
from typing_extensions import TypedDict from jedi.inference.gradual.typing import TypedDict
from unilabos.app.model import JobAddReq from unilabos.app.model import JobAddReq
from unilabos.ros.nodes.presets.host_node import HostNode from unilabos.ros.nodes.presets.host_node import HostNode
@@ -154,7 +154,7 @@ class DeviceActionManager:
job_info.set_ready_timeout(10) # 设置10秒超时 job_info.set_ready_timeout(10) # 设置10秒超时
self.active_jobs[device_key] = job_info self.active_jobs[device_key] = job_info
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name) job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
logger.trace(f"[DeviceActionManager] Job {job_log} can start immediately for {device_key}") logger.info(f"[DeviceActionManager] Job {job_log} can start immediately for {device_key}")
return True return True
def start_job(self, job_id: str) -> bool: def start_job(self, job_id: str) -> bool:
@@ -210,9 +210,8 @@ class DeviceActionManager:
job_info.update_timestamp() job_info.update_timestamp()
# 从all_jobs中移除已结束的job # 从all_jobs中移除已结束的job
del self.all_jobs[job_id] del self.all_jobs[job_id]
# job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name) job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
# logger.debug(f"[DeviceActionManager] Job {job_log} ended for {device_key}") logger.info(f"[DeviceActionManager] Job {job_log} ended for {device_key}")
pass
else: else:
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name) job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
logger.warning(f"[DeviceActionManager] Job {job_log} was not active for {device_key}") logger.warning(f"[DeviceActionManager] Job {job_log} was not active for {device_key}")
@@ -228,7 +227,7 @@ class DeviceActionManager:
next_job_log = format_job_log( next_job_log = format_job_log(
next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name
) )
logger.trace(f"[DeviceActionManager] Next job {next_job_log} can start for {device_key}") logger.info(f"[DeviceActionManager] Next job {next_job_log} can start for {device_key}")
return next_job return next_job
return None return None
@@ -269,7 +268,7 @@ class DeviceActionManager:
# 从all_jobs中移除 # 从all_jobs中移除
del self.all_jobs[job_id] del self.all_jobs[job_id]
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name) job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
logger.trace(f"[DeviceActionManager] Active job {job_log} cancelled for {device_key}") logger.info(f"[DeviceActionManager] Active job {job_log} cancelled for {device_key}")
# 启动下一个任务 # 启动下一个任务
if device_key in self.device_queues and self.device_queues[device_key]: if device_key in self.device_queues and self.device_queues[device_key]:
@@ -282,7 +281,7 @@ class DeviceActionManager:
next_job_log = format_job_log( next_job_log = format_job_log(
next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name
) )
logger.trace(f"[DeviceActionManager] Next job {next_job_log} can start after cancel") logger.info(f"[DeviceActionManager] Next job {next_job_log} can start after cancel")
return True return True
# 如果是排队中的任务 # 如果是排队中的任务
@@ -296,7 +295,7 @@ class DeviceActionManager:
job_log = format_job_log( job_log = format_job_log(
job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name
) )
logger.trace(f"[DeviceActionManager] Queued job {job_log} cancelled for {device_key}") logger.info(f"[DeviceActionManager] Queued job {job_log} cancelled for {device_key}")
return True return True
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name) job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
@@ -495,12 +494,8 @@ class MessageProcessor:
await self._process_message(message_type, message_data) await self._process_message(message_type, message_data)
else: else:
if message_type.endswith("_material"): if message_type.endswith("_material"):
logger.trace( logger.trace(f"[MessageProcessor] 收到一条归属 {data.get('edge_session')} 的旧消息:{data}")
f"[MessageProcessor] 收到一条归属 {data.get('edge_session')} 的旧消息{data}" logger.debug(f"[MessageProcessor] 跳过了一条归属 {data.get('edge_session')} 的旧消息: {data.get('action')}")
)
logger.debug(
f"[MessageProcessor] 跳过了一条归属 {data.get('edge_session')} 的旧消息: {data.get('action')}"
)
else: else:
await self._process_message(message_type, message_data) await self._process_message(message_type, message_data)
except json.JSONDecodeError: except json.JSONDecodeError:
@@ -545,7 +540,7 @@ class MessageProcessor:
try: try:
message_str = json.dumps(msg, ensure_ascii=False) message_str = json.dumps(msg, ensure_ascii=False)
await self.websocket.send(message_str) await self.websocket.send(message_str)
# logger.trace(f"[MessageProcessor] Message sent: {msg.get('action', 'unknown')}") # type: ignore # noqa: E501 logger.trace(f"[MessageProcessor] Message sent: {msg.get('action', 'unknown')}") # type: ignore # noqa: E501
except Exception as e: except Exception as e:
logger.error(f"[MessageProcessor] Failed to send message: {str(e)}") logger.error(f"[MessageProcessor] Failed to send message: {str(e)}")
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
@@ -570,7 +565,7 @@ class MessageProcessor:
async def _process_message(self, message_type: str, message_data: Dict[str, Any]): async def _process_message(self, message_type: str, message_data: Dict[str, Any]):
"""处理收到的消息""" """处理收到的消息"""
logger.trace(f"[MessageProcessor] Processing message: {message_type}") logger.debug(f"[MessageProcessor] Processing message: {message_type}")
try: try:
if message_type == "pong": if message_type == "pong":
@@ -642,13 +637,13 @@ class MessageProcessor:
await self._send_action_state_response( await self._send_action_state_response(
device_id, action_name, task_id, job_id, "query_action_status", True, 0 device_id, action_name, task_id, job_id, "query_action_status", True, 0
) )
logger.trace(f"[MessageProcessor] Job {job_log} can start immediately") logger.info(f"[MessageProcessor] Job {job_log} can start immediately")
else: else:
# 需要排队 # 需要排队
await self._send_action_state_response( await self._send_action_state_response(
device_id, action_name, task_id, job_id, "query_action_status", False, 10 device_id, action_name, task_id, job_id, "query_action_status", False, 10
) )
logger.trace(f"[MessageProcessor] Job {job_log} queued") logger.info(f"[MessageProcessor] Job {job_log} queued")
# 通知QueueProcessor有新的队列更新 # 通知QueueProcessor有新的队列更新
if self.queue_processor: if self.queue_processor:
@@ -657,8 +652,6 @@ class MessageProcessor:
async def _handle_job_start(self, data: Dict[str, Any]): async def _handle_job_start(self, data: Dict[str, Any]):
"""处理job_start消息""" """处理job_start消息"""
try: try:
if not data.get("sample_material"):
data["sample_material"] = {}
req = JobAddReq(**data) req = JobAddReq(**data)
job_log = format_job_log(req.job_id, req.task_id, req.device_id, req.action) job_log = format_job_log(req.job_id, req.task_id, req.device_id, req.action)
@@ -690,7 +683,6 @@ class MessageProcessor:
queue_item, queue_item,
action_type=req.action_type, action_type=req.action_type,
action_kwargs=req.action_args, action_kwargs=req.action_args,
sample_material=req.sample_material,
server_info=req.server_info, server_info=req.server_info,
) )
@@ -855,7 +847,9 @@ class MessageProcessor:
device_action_groups[key_add] = [] device_action_groups[key_add] = []
device_action_groups[key_add].append(item["uuid"]) device_action_groups[key_add].append(item["uuid"])
logger.info(f"[资源同步] 跨站Transfer: {item['uuid'][:8]} from {device_old_id} to {device_id}") logger.info(
f"[MessageProcessor] Resource migrated: {item['uuid'][:8]} from {device_old_id} to {device_id}"
)
else: else:
# 正常update # 正常update
key = (device_id, "update") key = (device_id, "update")
@@ -869,13 +863,11 @@ class MessageProcessor:
device_action_groups[key] = [] device_action_groups[key] = []
device_action_groups[key].append(item["uuid"]) device_action_groups[key].append(item["uuid"])
logger.trace( logger.info(f"触发物料更新 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}")
f"[资源同步] 动作 {action} 分组数量: {len(device_action_groups)}, 总数量: {len(resource_uuid_list)}"
)
# 为每个(device_id, action)创建独立的更新线程 # 为每个(device_id, action)创建独立的更新线程
for (device_id, actual_action), items in device_action_groups.items(): for (device_id, actual_action), items in device_action_groups.items():
logger.trace(f"[资源同步] {device_id} 物料动作 {actual_action} 数量: {len(items)}") logger.info(f"设备 {device_id} 物料更新 {actual_action} 数量: {len(items)}")
def _notify_resource_tree(dev_id, act, item_list): def _notify_resource_tree(dev_id, act, item_list):
try: try:
@@ -919,13 +911,13 @@ class MessageProcessor:
# 发送确认消息 # 发送确认消息
if self.websocket_client: if self.websocket_client:
await self.websocket_client.send_message( await self.websocket_client.send_message({
{"action": "restart_acknowledged", "data": {"reason": reason, "delay": delay}} "action": "restart_acknowledged",
) "data": {"reason": reason, "delay": delay}
})
# 设置全局重启标志 # 设置全局重启标志
import unilabos.app.main as main_module import unilabos.app.main as main_module
main_module._restart_requested = True main_module._restart_requested = True
main_module._restart_reason = reason main_module._restart_reason = reason
@@ -935,12 +927,10 @@ class MessageProcessor:
# 在新线程中执行清理,避免阻塞当前事件循环 # 在新线程中执行清理,避免阻塞当前事件循环
def do_cleanup(): def do_cleanup():
import time import time
time.sleep(0.5) # 给当前消息处理完成的时间 time.sleep(0.5) # 给当前消息处理完成的时间
logger.info(f"[MessageProcessor] Starting cleanup for restart, reason: {reason}") logger.info(f"[MessageProcessor] Starting cleanup for restart, reason: {reason}")
try: try:
from unilabos.app.utils import cleanup_for_restart from unilabos.app.utils import cleanup_for_restart
if cleanup_for_restart(): if cleanup_for_restart():
logger.info("[MessageProcessor] Cleanup successful, main() will restart") logger.info("[MessageProcessor] Cleanup successful, main() will restart")
else: else:
@@ -1138,7 +1128,7 @@ class QueueProcessor:
success = self.message_processor.send_message(message) success = self.message_processor.send_message(message)
job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name) job_log = format_job_log(job_info.job_id, job_info.task_id, job_info.device_id, job_info.action_name)
if success: if success:
logger.trace(f"[QueueProcessor] Sent busy/need_more for queued job {job_log}") logger.debug(f"[QueueProcessor] Sent busy/need_more for queued job {job_log}")
else: else:
logger.warning(f"[QueueProcessor] Failed to send busy status for job {job_log}") logger.warning(f"[QueueProcessor] Failed to send busy status for job {job_log}")
@@ -1161,7 +1151,7 @@ class QueueProcessor:
job_info.action_name, job_info.action_name,
) )
logger.trace(f"[QueueProcessor] Job {job_log} completed with status: {status}") logger.info(f"[QueueProcessor] Job {job_log} completed with status: {status}")
# 结束任务,获取下一个可执行的任务 # 结束任务,获取下一个可执行的任务
next_job = self.device_manager.end_job(job_id) next_job = self.device_manager.end_job(job_id)
@@ -1181,8 +1171,8 @@ class QueueProcessor:
}, },
} }
self.message_processor.send_message(message) self.message_processor.send_message(message)
# next_job_log = format_job_log(next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name) next_job_log = format_job_log(next_job.job_id, next_job.task_id, next_job.device_id, next_job.action_name)
# logger.debug(f"[QueueProcessor] Notified next job {next_job_log} can start") logger.info(f"[QueueProcessor] Notified next job {next_job_log} can start")
# 立即触发下一轮状态检查 # 立即触发下一轮状态检查
self.notify_queue_update() self.notify_queue_update()
@@ -1304,7 +1294,7 @@ class WebSocketClient(BaseCommunicationClient):
}, },
} }
self.message_processor.send_message(message) self.message_processor.send_message(message)
# logger.trace(f"[WebSocketClient] Device status published: {device_id}.{property_name}") logger.trace(f"[WebSocketClient] Device status published: {device_id}.{property_name}")
def publish_job_status( def publish_job_status(
self, feedback_data: dict, item: QueueItem, status: str, return_info: Optional[dict] = None self, feedback_data: dict, item: QueueItem, status: str, return_info: Optional[dict] = None
@@ -1324,7 +1314,7 @@ class WebSocketClient(BaseCommunicationClient):
except (KeyError, AttributeError): except (KeyError, AttributeError):
logger.warning(f"[WebSocketClient] Failed to remove job {item.job_id} from HostNode status") logger.warning(f"[WebSocketClient] Failed to remove job {item.job_id} from HostNode status")
# logger.debug(f"[WebSocketClient] Intercepting final status for job_id: {item.job_id} - {status}") logger.info(f"[WebSocketClient] Intercepting final status for job_id: {item.job_id} - {status}")
# 通知队列处理器job完成包括timeout的job # 通知队列处理器job完成包括timeout的job
self.queue_processor.handle_job_completed(item.job_id, status) self.queue_processor.handle_job_completed(item.job_id, status)
@@ -1391,9 +1381,7 @@ class WebSocketClient(BaseCommunicationClient):
if host_node: if host_node:
# 获取设备信息 # 获取设备信息
for device_id, namespace in host_node.devices_names.items(): for device_id, namespace in host_node.devices_names.items():
device_key = ( device_key = f"{namespace}/{device_id}" if namespace.startswith("/") else f"/{namespace}/{device_id}"
f"{namespace}/{device_id}" if namespace.startswith("/") else f"/{namespace}/{device_id}"
)
is_online = device_key in host_node._online_devices is_online = device_key in host_node._online_devices
# 获取设备的动作信息 # 获取设备的动作信息
@@ -1407,16 +1395,14 @@ class WebSocketClient(BaseCommunicationClient):
"action_type": str(type(client).__name__), "action_type": str(type(client).__name__),
} }
devices.append( devices.append({
{ "device_id": device_id,
"device_id": device_id, "namespace": namespace,
"namespace": namespace, "device_key": device_key,
"device_key": device_key, "is_online": is_online,
"is_online": is_online, "machine_name": host_node.device_machine_names.get(device_id, machine_name),
"machine_name": host_node.device_machine_names.get(device_id, machine_name), "actions": actions,
"actions": actions, })
}
)
logger.info(f"[WebSocketClient] Collected {len(devices)} devices for host_ready") logger.info(f"[WebSocketClient] Collected {len(devices)} devices for host_ready")
except Exception as e: except Exception as e:

View File

@@ -95,29 +95,8 @@ def get_vessel_liquid_volume(G: nx.DiGraph, vessel: str) -> float:
return total_volume return total_volume
def is_integrated_pump(node_class: str, node_name: str = "") -> bool: def is_integrated_pump(node_name):
""" return "pump" in node_name and "valve" in node_name
判断是否为泵阀一体设备
"""
class_lower = (node_class or "").lower()
name_lower = (node_name or "").lower()
if "pump" not in class_lower and "pump" not in name_lower:
return False
integrated_markers = [
"valve",
"pump_valve",
"pumpvalve",
"integrated",
"transfer_pump",
]
for marker in integrated_markers:
if marker in class_lower or marker in name_lower:
return True
return False
def find_connected_pump(G, valve_node): def find_connected_pump(G, valve_node):
@@ -207,9 +186,7 @@ def build_pump_valve_maps(G, pump_backbone):
debug_print(f"🔧 过滤后的骨架: {filtered_backbone}") debug_print(f"🔧 过滤后的骨架: {filtered_backbone}")
for node in filtered_backbone: for node in filtered_backbone:
node_data = G.nodes.get(node, {}) if is_integrated_pump(G.nodes[node]["class"]):
node_class = node_data.get("class", "") or ""
if is_integrated_pump(node_class, node):
pumps_from_node[node] = node pumps_from_node[node] = node
valve_from_node[node] = node valve_from_node[node] = node
debug_print(f" - 集成泵-阀: {node}") debug_print(f" - 集成泵-阀: {node}")

View File

@@ -22,7 +22,6 @@ class BasicConfig:
startup_json_path = None # 填写绝对路径 startup_json_path = None # 填写绝对路径
disable_browser = False # 禁止浏览器自动打开 disable_browser = False # 禁止浏览器自动打开
port = 8002 # 本地HTTP服务 port = 8002 # 本地HTTP服务
check_mode = False # CI 检查模式,用于验证 registry 导入和文件一致性
# 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' # 'TRACE', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG" log_level: Literal["TRACE", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "DEBUG"

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,15 @@
from __future__ import annotations from __future__ import annotations
import asyncio
import time import time
import traceback import traceback
from collections import Counter from collections import Counter
from typing import List, Sequence, Optional, Literal, Union, Iterator, Dict, Any, Callable, Set, cast from typing import List, Sequence, Optional, Literal, Union, Iterator, Dict, Any, Callable, Set, cast
from typing_extensions import TypedDict
from pylabrobot.liquid_handling import LiquidHandler, LiquidHandlerBackend, LiquidHandlerChatterboxBackend, Strictness from pylabrobot.liquid_handling import LiquidHandler, LiquidHandlerBackend, LiquidHandlerChatterboxBackend, Strictness
from unilabos.devices.liquid_handling.rviz_backend import UniLiquidHandlerRvizBackend
from unilabos.devices.liquid_handling.laiyu.backend.laiyu_v_backend import UniLiquidHandlerLaiyuBackend
from pylabrobot.liquid_handling.liquid_handler import TipPresenceProbingMethod from pylabrobot.liquid_handling.liquid_handler import TipPresenceProbingMethod
from pylabrobot.liquid_handling.standard import GripDirection from pylabrobot.liquid_handling.standard import GripDirection
from pylabrobot.resources import ( from pylabrobot.resources import (
@@ -23,53 +27,22 @@ from pylabrobot.resources import (
Trash, Trash,
Tip, Tip,
) )
from typing_extensions import TypedDict
from unilabos.devices.liquid_handling.rviz_backend import UniLiquidHandlerRvizBackend
from unilabos.registry.placeholder_type import ResourceSlot
from unilabos.resources.resource_tracker import (
ResourceTreeSet,
ResourceDict,
EXTRA_SAMPLE_UUID,
EXTRA_UNILABOS_SAMPLE_UUID,
)
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
class SimpleReturn(TypedDict): class SimpleReturn(TypedDict):
samples: List[List[ResourceDict]] samples: list
volumes: List[float] volumes: list
class SetLiquidReturn(TypedDict):
wells: List[List[ResourceDict]]
volumes: List[float]
class SetLiquidFromPlateReturn(TypedDict):
plate: List[List[ResourceDict]]
wells: List[List[ResourceDict]]
volumes: List[float]
class TransferLiquidReturn(TypedDict):
sources: List[List[ResourceDict]]
targets: List[List[ResourceDict]]
class LiquidHandlerMiddleware(LiquidHandler): class LiquidHandlerMiddleware(LiquidHandler):
def __init__( def __init__(self, backend: LiquidHandlerBackend, deck: Deck, simulator: bool = False, channel_num: int = 8, **kwargs):
self, backend: LiquidHandlerBackend, deck: Deck, simulator: bool = False, channel_num: int = 8, **kwargs
):
self._simulator = simulator self._simulator = simulator
self.channel_num = channel_num self.channel_num = channel_num
self.pending_liquids_dict = {} self.pending_liquids_dict = {}
joint_config = kwargs.get("joint_config", None) joint_config = kwargs.get("joint_config", None)
if simulator: if simulator:
if joint_config: if joint_config:
self._simulate_backend = UniLiquidHandlerRvizBackend( self._simulate_backend = UniLiquidHandlerRvizBackend(channel_num, kwargs["total_height"],
channel_num, kwargs["total_height"], joint_config=joint_config, lh_device_id=deck.name joint_config=joint_config, lh_device_id=deck.name)
)
else: else:
self._simulate_backend = LiquidHandlerChatterboxBackend(channel_num) self._simulate_backend = LiquidHandlerChatterboxBackend(channel_num)
self._simulate_handler = LiquidHandlerAbstract(self._simulate_backend, deck, False) self._simulate_handler = LiquidHandlerAbstract(self._simulate_backend, deck, False)
@@ -186,9 +159,7 @@ class LiquidHandlerMiddleware(LiquidHandler):
if not offsets or (isinstance(offsets, list) and len(offsets) != len(use_channels)): if not offsets or (isinstance(offsets, list) and len(offsets) != len(use_channels)):
offsets = [Coordinate.zero()] * len(use_channels) offsets = [Coordinate.zero()] * len(use_channels)
if self._simulator: if self._simulator:
return await self._simulate_handler.discard_tips( return await self._simulate_handler.discard_tips(use_channels, allow_nonzero_volume, offsets, **backend_kwargs)
use_channels, allow_nonzero_volume, offsets, **backend_kwargs
)
await super().discard_tips(use_channels, allow_nonzero_volume, offsets, **backend_kwargs) await super().discard_tips(use_channels, allow_nonzero_volume, offsets, **backend_kwargs)
self.pending_liquids_dict = {} self.pending_liquids_dict = {}
return return
@@ -209,6 +180,7 @@ class LiquidHandlerMiddleware(LiquidHandler):
**backend_kwargs, **backend_kwargs,
): ):
if self._simulator: if self._simulator:
return await self._simulate_handler.aspirate( return await self._simulate_handler.aspirate(
resources, resources,
@@ -236,15 +208,15 @@ class LiquidHandlerMiddleware(LiquidHandler):
res_samples = [] res_samples = []
res_volumes = [] res_volumes = []
for resource, volume, channel in zip(resources, vols, use_channels): for resource, volume, channel in zip(resources, vols, use_channels):
sample_uuid_value = resource.unilabos_extra.get(EXTRA_SAMPLE_UUID, None) res_samples.append({"name": resource.name, "sample_uuid": resource.unilabos_extra.get("sample_uuid", None)})
res_samples.append({"name": resource.name, EXTRA_SAMPLE_UUID: sample_uuid_value})
res_volumes.append(volume) res_volumes.append(volume)
self.pending_liquids_dict[channel] = { self.pending_liquids_dict[channel] = {
EXTRA_SAMPLE_UUID: sample_uuid_value, "sample_uuid": resource.unilabos_extra.get("sample_uuid", None),
"volume": volume, "volume": volume
} }
return SimpleReturn(samples=res_samples, volumes=res_volumes) return SimpleReturn(samples=res_samples, volumes=res_volumes)
async def dispense( async def dispense(
self, self,
resources: Sequence[Container], resources: Sequence[Container],
@@ -282,10 +254,10 @@ class LiquidHandlerMiddleware(LiquidHandler):
res_samples = [] res_samples = []
res_volumes = [] res_volumes = []
for resource, volume, channel in zip(resources, vols, use_channels): for resource, volume, channel in zip(resources, vols, use_channels):
res_uuid = self.pending_liquids_dict[channel][EXTRA_SAMPLE_UUID] res_uuid = self.pending_liquids_dict[channel]["sample_uuid"]
self.pending_liquids_dict[channel]["volume"] -= volume self.pending_liquids_dict[channel]["volume"] -= volume
resource.unilabos_extra[EXTRA_SAMPLE_UUID] = res_uuid resource.unilabos_extra["sample_uuid"] = res_uuid
res_samples.append({"name": resource.name, EXTRA_SAMPLE_UUID: res_uuid}) res_samples.append({"name": resource.name, "sample_uuid": res_uuid})
res_volumes.append(volume) res_volumes.append(volume)
return SimpleReturn(samples=res_samples, volumes=res_volumes) return SimpleReturn(samples=res_samples, volumes=res_volumes)
@@ -606,18 +578,10 @@ class LiquidHandlerMiddleware(LiquidHandler):
class LiquidHandlerAbstract(LiquidHandlerMiddleware): class LiquidHandlerAbstract(LiquidHandlerMiddleware):
"""Extended LiquidHandler with additional operations.""" """Extended LiquidHandler with additional operations."""
support_touch_tip = True support_touch_tip = True
_ros_node: BaseROS2DeviceNode _ros_node: BaseROS2DeviceNode
def __init__( def __init__(self, backend: LiquidHandlerBackend, deck: Deck, simulator: bool=False, channel_num:int = 8, total_height:float = 310):
self,
backend: LiquidHandlerBackend,
deck: Deck,
simulator: bool = False,
channel_num: int = 8,
total_height: float = 310,
):
"""Initialize a LiquidHandler. """Initialize a LiquidHandler.
Args: Args:
@@ -641,7 +605,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
module_name = ".".join(components[:-1]) module_name = ".".join(components[:-1])
try: try:
import importlib import importlib
mod = importlib.import_module(module_name) mod = importlib.import_module(module_name)
except ImportError: except ImportError:
mod = None mod = None
@@ -651,7 +614,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
# Try pylabrobot style import (if available) # Try pylabrobot style import (if available)
try: try:
import pylabrobot import pylabrobot
backend_cls = getattr(pylabrobot, type_str, None) backend_cls = getattr(pylabrobot, type_str, None)
except Exception: except Exception:
backend_cls = None backend_cls = None
@@ -669,65 +631,16 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
self._ros_node = ros_node self._ros_node = ros_node
@classmethod @classmethod
def set_liquid(cls, wells: list[Well], liquid_names: list[str], volumes: list[float]) -> SetLiquidReturn: def set_liquid(cls, wells: list[Well], liquid_names: list[str], volumes: list[float]) -> SimpleReturn:
"""Set the liquid in a well. """Set the liquid in a well."""
res_samples = []
如果 liquid_names 和 volumes 为空,但 wells 不为空,直接返回 wells。
"""
res_volumes = [] res_volumes = []
# 如果 liquid_names 和 volumes 都为空,直接返回 wells
if not liquid_names and not volumes:
return SetLiquidReturn(
wells=ResourceTreeSet.from_plr_resources(wells, known_newly_created=False).dump(), volumes=res_volumes # type: ignore
)
for well, liquid_name, volume in zip(wells, liquid_names, volumes): for well, liquid_name, volume in zip(wells, liquid_names, volumes):
well.set_liquids([(liquid_name, volume)]) # type: ignore well.set_liquids([(liquid_name, volume)]) # type: ignore
res_samples.append({"name": well.name, "sample_uuid": well.unilabos_extra.get("sample_uuid", None)})
res_volumes.append(volume) res_volumes.append(volume)
return SetLiquidReturn( return SimpleReturn(samples=res_samples, volumes=res_volumes)
wells=ResourceTreeSet.from_plr_resources(wells, known_newly_created=False).dump(), volumes=res_volumes # type: ignore
)
def set_liquid_from_plate(
self, plate: ResourceSlot, well_names: list[str], liquid_names: list[str], volumes: list[float]
) -> SetLiquidFromPlateReturn:
"""Set the liquid in wells of a plate by well names (e.g., A1, A2, B3).
如果 liquid_names 和 volumes 为空,但 plate 和 well_names 不为空,直接返回 plate 和 wells。
"""
assert issubclass(plate.__class__, Plate), "plate must be a Plate"
plate: Plate = cast(Plate, cast(Resource, plate))
# 根据 well_names 获取对应的 Well 对象
wells = [plate.get_well(name) for name in well_names]
res_volumes = []
# 如果 liquid_names 和 volumes 都为空,直接返回
if not liquid_names and not volumes:
return SetLiquidFromPlateReturn(
plate=ResourceTreeSet.from_plr_resources([plate], known_newly_created=False).dump(), # type: ignore
wells=ResourceTreeSet.from_plr_resources(wells, known_newly_created=False).dump(), # type: ignore
volumes=res_volumes,
)
for well, liquid_name, volume in zip(wells, liquid_names, volumes):
well.set_liquids([(liquid_name, volume)]) # type: ignore
res_volumes.append(volume)
task = ROS2DeviceNode.run_async_func(self._ros_node.update_resource, True, **{"resources": wells})
submit_time = time.time()
while not task.done():
if time.time() - submit_time > 10:
self._ros_node.lab_logger().info(f"set_liquid_from_plate {plate} 超时")
break
time.sleep(0.01)
return SetLiquidFromPlateReturn(
plate=ResourceTreeSet.from_plr_resources([plate], known_newly_created=False).dump(), # type: ignore
wells=ResourceTreeSet.from_plr_resources(wells, known_newly_created=False).dump(), # type: ignore
volumes=res_volumes,
)
# --------------------------------------------------------------- # ---------------------------------------------------------------
# REMOVE LIQUID -------------------------------------------------- # REMOVE LIQUID --------------------------------------------------
# --------------------------------------------------------------- # ---------------------------------------------------------------
@@ -763,7 +676,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
target_rack = child target_rack = child
target_rack = cast(TipRack, target_rack) target_rack = cast(TipRack, target_rack)
available_tips = {} available_tips = {}
for idx, tipSpot in enumerate(target_rack.get_all_items()): for (idx, tipSpot) in enumerate(target_rack.get_all_items()):
if tipSpot.has_tip(): if tipSpot.has_tip():
available_tips[idx] = tipSpot available_tips[idx] = tipSpot
continue continue
@@ -771,8 +684,8 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
print("channel_num", self.channel_num) print("channel_num", self.channel_num)
if self.channel_num == 8: if self.channel_num == 8:
tip_prefix = list(available_tips.values())[0].name.split("_")[0] tip_prefix = list(available_tips.values())[0].name.split('_')[0]
colnum_list = [int(tip.name.split("_")[-1][1:]) for tip in available_tips.values()] colnum_list = [int(tip.name.split('_')[-1][1:]) for tip in available_tips.values()]
available_cols = [colnum for colnum, count in dict(Counter(colnum_list)).items() if count == 8] available_cols = [colnum for colnum, count in dict(Counter(colnum_list)).items() if count == 8]
available_cols.sort() available_cols.sort()
available_tips_dict = {tip.name: tip for tip in available_tips.values()} available_tips_dict = {tip.name: tip for tip in available_tips.values()}
@@ -816,6 +729,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
"""Create a new protocol with the given metadata.""" """Create a new protocol with the given metadata."""
pass pass
async def remove_liquid( async def remove_liquid(
self, self,
vols: List[float], vols: List[float],
@@ -874,11 +788,10 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
elif len(use_channels) == 8 and self.backend.num_channels == 8: elif len(use_channels) == 8 and self.backend.num_channels == 8:
# 对于8个的情况需要判断此时任务是不是能被8通道移液站来成功处理 # 对于8个的情况需要判断此时任务是不是能被8通道移液站来成功处理
if len(sources) % 8 != 0: if len(sources) % 8 != 0:
raise ValueError( raise ValueError(f"Length of `sources` {len(sources)} must be a multiple of 8 for 8-channel mode.")
f"Length of `sources` {len(sources)} must be a multiple of 8 for 8-channel mode."
)
# 8个8个来取任务序列 # 8个8个来取任务序列
@@ -887,28 +800,18 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
for _ in range(len(use_channels)): for _ in range(len(use_channels)):
tip.extend(next(self.current_tip)) tip.extend(next(self.current_tip))
await self.pick_up_tips(tip) await self.pick_up_tips(tip)
current_targets = waste_liquid[i : i + 8] current_targets = waste_liquid[i:i + 8]
current_reagent_sources = sources[i : i + 8] current_reagent_sources = sources[i:i + 8]
current_asp_vols = vols[i : i + 8] current_asp_vols = vols[i:i + 8]
current_dis_vols = vols[i : i + 8] current_dis_vols = vols[i:i + 8]
current_asp_flow_rates = flow_rates[i : i + 8] if flow_rates else [None] * 8 current_asp_flow_rates = flow_rates[i:i + 8] if flow_rates else [None] * 8
current_dis_flow_rates = ( current_dis_flow_rates = flow_rates[-i*8-8:len(flow_rates)-i*8] if flow_rates else [None] * 8
flow_rates[-i * 8 - 8 : len(flow_rates) - i * 8] if flow_rates else [None] * 8 current_asp_offset = offsets[i:i + 8] if offsets else [None] * 8
) current_dis_offset = offsets[-i*8-8:len(offsets)-i*8] if offsets else [None] * 8
current_asp_offset = offsets[i : i + 8] if offsets else [None] * 8 current_asp_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_dis_offset = offsets[-i * 8 - 8 : len(offsets) - i * 8] if offsets else [None] * 8 current_dis_liquid_height = liquid_height[-i*8-8:len(liquid_height)-i*8] if liquid_height else [None] * 8
current_asp_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8 current_asp_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
current_dis_liquid_height = ( current_dis_blow_out_air_volume = blow_out_air_volume[-i*8-8:len(blow_out_air_volume)-i*8] if blow_out_air_volume else [None] * 8
liquid_height[-i * 8 - 8 : len(liquid_height) - i * 8] if liquid_height else [None] * 8
)
current_asp_blow_out_air_volume = (
blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8
)
current_dis_blow_out_air_volume = (
blow_out_air_volume[-i * 8 - 8 : len(blow_out_air_volume) - i * 8]
if blow_out_air_volume
else [None] * 8
)
await self.aspirate( await self.aspirate(
resources=current_reagent_sources, resources=current_reagent_sources,
@@ -969,136 +872,127 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
# """A complete *add* (aspirate reagent → dispense into targets) operation.""" # """A complete *add* (aspirate reagent → dispense into targets) operation."""
# # try: # # try:
if is_96_well: if is_96_well:
pass # This mode is not verified. pass # This mode is not verified.
else: else:
if len(asp_vols) != len(targets): if len(asp_vols) != len(targets):
raise ValueError(f"Length of `asp_vols` {len(asp_vols)} must match `targets` {len(targets)}.") raise ValueError(f"Length of `asp_vols` {len(asp_vols)} must match `targets` {len(targets)}.")
# 首先应该对任务分组然后每次1个/8个进行操作处理 # 首先应该对任务分组然后每次1个/8个进行操作处理
if len(use_channels) == 1: if len(use_channels) == 1:
for _ in range(len(targets)): for _ in range(len(targets)):
tip = [] tip = []
for x in range(len(use_channels)): for x in range(len(use_channels)):
tip.extend(next(self.current_tip)) tip.extend(next(self.current_tip))
await self.pick_up_tips(tip) await self.pick_up_tips(tip)
await self.aspirate( await self.aspirate(
resources=[reagent_sources[_]], resources=[reagent_sources[_]],
vols=[asp_vols[_]], vols=[asp_vols[_]],
use_channels=use_channels, use_channels=use_channels,
flow_rates=[flow_rates[0]] if flow_rates else None, flow_rates=[flow_rates[0]] if flow_rates else None,
offsets=[offsets[0]] if offsets else None, offsets=[offsets[0]] if offsets else None,
liquid_height=[liquid_height[0]] if liquid_height else None, liquid_height=[liquid_height[0]] if liquid_height else None,
blow_out_air_volume=[blow_out_air_volume[0]] if blow_out_air_volume else None, blow_out_air_volume=[blow_out_air_volume[0]] if blow_out_air_volume else None,
spread=spread, spread=spread,
)
if delays is not None:
await self.custom_delay(seconds=delays[0])
await self.dispense(
resources=[targets[_]],
vols=[dis_vols[_]],
use_channels=use_channels,
flow_rates=[flow_rates[1]] if flow_rates else None,
offsets=[offsets[1]] if offsets else None,
blow_out_air_volume=[blow_out_air_volume[1]] if blow_out_air_volume else None,
liquid_height=[liquid_height[1]] if liquid_height else None,
spread=spread,
)
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
# 只有在 mix_time 有效时才调用 mix
if mix_time is not None and mix_time > 0:
await self.mix(
targets=[targets[_]],
mix_time=mix_time,
mix_vol=mix_vol,
offsets=offsets if offsets else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None,
) )
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
await self.touch_tip(targets[_])
await self.discard_tips()
elif len(use_channels) == 8: if delays is not None:
# 对于8个的情况需要判断此时任务是不是能被8通道移液站来成功处理 await self.custom_delay(seconds=delays[0])
if len(targets) % 8 != 0: await self.dispense(
raise ValueError(f"Length of `targets` {len(targets)} must be a multiple of 8 for 8-channel mode.") resources=[targets[_]],
vols=[dis_vols[_]],
for i in range(0, len(targets), 8): use_channels=use_channels,
tip = [] flow_rates=[flow_rates[1]] if flow_rates else None,
for _ in range(len(use_channels)): offsets=[offsets[1]] if offsets else None,
tip.extend(next(self.current_tip)) blow_out_air_volume=[blow_out_air_volume[1]] if blow_out_air_volume else None,
await self.pick_up_tips(tip) liquid_height=[liquid_height[1]] if liquid_height else None,
current_targets = targets[i : i + 8] spread=spread,
current_reagent_sources = reagent_sources[i : i + 8]
current_asp_vols = asp_vols[i : i + 8]
current_dis_vols = dis_vols[i : i + 8]
current_asp_flow_rates = flow_rates[i : i + 8] if flow_rates else [None] * 8
current_dis_flow_rates = (
flow_rates[-i * 8 - 8 : len(flow_rates) - i * 8] if flow_rates else [None] * 8
)
current_asp_offset = offsets[i : i + 8] if offsets else [None] * 8
current_dis_offset = offsets[-i * 8 - 8 : len(offsets) - i * 8] if offsets else [None] * 8
current_asp_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8
current_dis_liquid_height = (
liquid_height[-i * 8 - 8 : len(liquid_height) - i * 8] if liquid_height else [None] * 8
)
current_asp_blow_out_air_volume = (
blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8
)
current_dis_blow_out_air_volume = (
blow_out_air_volume[-i * 8 - 8 : len(blow_out_air_volume) - i * 8]
if blow_out_air_volume
else [None] * 8
)
await self.aspirate(
resources=current_reagent_sources,
vols=current_asp_vols,
use_channels=use_channels,
flow_rates=current_asp_flow_rates,
offsets=current_asp_offset,
liquid_height=current_asp_liquid_height,
blow_out_air_volume=current_asp_blow_out_air_volume,
spread=spread,
)
if delays is not None:
await self.custom_delay(seconds=delays[0])
await self.dispense(
resources=current_targets,
vols=current_dis_vols,
use_channels=use_channels,
flow_rates=current_dis_flow_rates,
offsets=current_dis_offset,
liquid_height=current_dis_liquid_height,
blow_out_air_volume=current_dis_blow_out_air_volume,
spread=spread,
)
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
# 只有在 mix_time 有效时才调用 mix
if mix_time is not None and mix_time > 0:
await self.mix(
targets=current_targets,
mix_time=mix_time,
mix_vol=mix_vol,
offsets=offsets if offsets else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None,
) )
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
await self.touch_tip(current_targets)
await self.discard_tips()
# except Exception as e: if delays is not None and len(delays) > 1:
# traceback.print_exc() await self.custom_delay(seconds=delays[1])
# raise RuntimeError(f"Liquid addition failed: {e}") from e # 只有在 mix_time 有效时才调用 mix
if mix_time is not None and mix_time > 0:
await self.mix(
targets=[targets[_]],
mix_time=mix_time,
mix_vol=mix_vol,
offsets=offsets if offsets else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None,
)
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
await self.touch_tip(targets[_])
await self.discard_tips()
elif len(use_channels) == 8:
# 对于8个的情况需要判断此时任务是不是能被8通道移液站来成功处理
if len(targets) % 8 != 0:
raise ValueError(f"Length of `targets` {len(targets)} must be a multiple of 8 for 8-channel mode.")
for i in range(0, len(targets), 8):
tip = []
for _ in range(len(use_channels)):
tip.extend(next(self.current_tip))
await self.pick_up_tips(tip)
current_targets = targets[i:i + 8]
current_reagent_sources = reagent_sources[i:i + 8]
current_asp_vols = asp_vols[i:i + 8]
current_dis_vols = dis_vols[i:i + 8]
current_asp_flow_rates = flow_rates[i:i + 8] if flow_rates else [None] * 8
current_dis_flow_rates = flow_rates[-i*8-8:len(flow_rates)-i*8] if flow_rates else [None] * 8
current_asp_offset = offsets[i:i + 8] if offsets else [None] * 8
current_dis_offset = offsets[-i*8-8:len(offsets)-i*8] if offsets else [None] * 8
current_asp_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_dis_liquid_height = liquid_height[-i*8-8:len(liquid_height)-i*8] if liquid_height else [None] * 8
current_asp_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
current_dis_blow_out_air_volume = blow_out_air_volume[-i*8-8:len(blow_out_air_volume)-i*8] if blow_out_air_volume else [None] * 8
await self.aspirate(
resources=current_reagent_sources,
vols=current_asp_vols,
use_channels=use_channels,
flow_rates=current_asp_flow_rates,
offsets=current_asp_offset,
liquid_height=current_asp_liquid_height,
blow_out_air_volume=current_asp_blow_out_air_volume,
spread=spread,
)
if delays is not None:
await self.custom_delay(seconds=delays[0])
await self.dispense(
resources=current_targets,
vols=current_dis_vols,
use_channels=use_channels,
flow_rates=current_dis_flow_rates,
offsets=current_dis_offset,
liquid_height=current_dis_liquid_height,
blow_out_air_volume=current_dis_blow_out_air_volume,
spread=spread,
)
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
# 只有在 mix_time 有效时才调用 mix
if mix_time is not None and mix_time > 0:
await self.mix(
targets=current_targets,
mix_time=mix_time,
mix_vol=mix_vol,
offsets=offsets if offsets else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None,
)
if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1])
await self.touch_tip(current_targets)
await self.discard_tips()
# except Exception as e:
# traceback.print_exc()
# raise RuntimeError(f"Liquid addition failed: {e}") from e
# --------------------------------------------------------------- # ---------------------------------------------------------------
# TRANSFER LIQUID ------------------------------------------------ # TRANSFER LIQUID ------------------------------------------------
@@ -1127,7 +1021,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
mix_liquid_height: Optional[float] = None, mix_liquid_height: Optional[float] = None,
delays: Optional[List[int]] = None, delays: Optional[List[int]] = None,
none_keys: List[str] = [], none_keys: List[str] = [],
) -> TransferLiquidReturn: ):
"""Transfer liquid with automatic mode detection. """Transfer liquid with automatic mode detection.
Supports three transfer modes: Supports three transfer modes:
@@ -1195,71 +1089,29 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
if num_sources == 1 and num_targets > 1: if num_sources == 1 and num_targets > 1:
# 模式1: 一对多 (1 source -> N targets) # 模式1: 一对多 (1 source -> N targets)
await self._transfer_one_to_many( await self._transfer_one_to_many(
sources[0], sources[0], targets, tip_racks, use_channels,
targets, asp_vols, dis_vols, asp_flow_rates, dis_flow_rates,
tip_racks, offsets, touch_tip, liquid_height, blow_out_air_volume,
use_channels, spread, mix_stage, mix_times, mix_vol, mix_rate,
asp_vols, mix_liquid_height, delays
dis_vols,
asp_flow_rates,
dis_flow_rates,
offsets,
touch_tip,
liquid_height,
blow_out_air_volume,
spread,
mix_stage,
mix_times,
mix_vol,
mix_rate,
mix_liquid_height,
delays,
) )
elif num_sources > 1 and num_targets == 1: elif num_sources > 1 and num_targets == 1:
# 模式2: 多对一 (N sources -> 1 target) # 模式2: 多对一 (N sources -> 1 target)
await self._transfer_many_to_one( await self._transfer_many_to_one(
sources, sources, targets[0], tip_racks, use_channels,
targets[0], asp_vols, dis_vols, asp_flow_rates, dis_flow_rates,
tip_racks, offsets, touch_tip, liquid_height, blow_out_air_volume,
use_channels, spread, mix_stage, mix_times, mix_vol, mix_rate,
asp_vols, mix_liquid_height, delays
dis_vols,
asp_flow_rates,
dis_flow_rates,
offsets,
touch_tip,
liquid_height,
blow_out_air_volume,
spread,
mix_stage,
mix_times,
mix_vol,
mix_rate,
mix_liquid_height,
delays,
) )
elif num_sources == num_targets: elif num_sources == num_targets:
# 模式3: 一对一 (N sources -> N targets) # 模式3: 一对一 (N sources -> N targets)
await self._transfer_one_to_one( await self._transfer_one_to_one(
sources, sources, targets, tip_racks, use_channels,
targets, asp_vols, dis_vols, asp_flow_rates, dis_flow_rates,
tip_racks, offsets, touch_tip, liquid_height, blow_out_air_volume,
use_channels, spread, mix_stage, mix_times, mix_vol, mix_rate,
asp_vols, mix_liquid_height, delays
dis_vols,
asp_flow_rates,
dis_flow_rates,
offsets,
touch_tip,
liquid_height,
blow_out_air_volume,
spread,
mix_stage,
mix_times,
mix_vol,
mix_rate,
mix_liquid_height,
delays,
) )
else: else:
raise ValueError( raise ValueError(
@@ -1267,11 +1119,6 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
"Supported modes: 1->N, N->1, or N->N." "Supported modes: 1->N, N->1, or N->N."
) )
return TransferLiquidReturn(
sources=ResourceTreeSet.from_plr_resources(list(sources), known_newly_created=False).dump(), # type: ignore
targets=ResourceTreeSet.from_plr_resources(list(targets), known_newly_created=False).dump(), # type: ignore
)
async def _transfer_one_to_one( async def _transfer_one_to_one(
self, self,
sources: Sequence[Container], sources: Sequence[Container],
@@ -1327,9 +1174,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
flow_rates=[asp_flow_rates[_]] if asp_flow_rates and len(asp_flow_rates) > _ else None, flow_rates=[asp_flow_rates[_]] if asp_flow_rates and len(asp_flow_rates) > _ else None,
offsets=[offsets[_]] if offsets and len(offsets) > _ else None, offsets=[offsets[_]] if offsets and len(offsets) > _ else None,
liquid_height=[liquid_height[_]] if liquid_height and len(liquid_height) > _ else None, liquid_height=[liquid_height[_]] if liquid_height and len(liquid_height) > _ else None,
blow_out_air_volume=( blow_out_air_volume=[blow_out_air_volume[_]] if blow_out_air_volume and len(blow_out_air_volume) > _ else None,
[blow_out_air_volume[_]] if blow_out_air_volume and len(blow_out_air_volume) > _ else None
),
spread=spread, spread=spread,
) )
if delays is not None: if delays is not None:
@@ -1340,9 +1185,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
use_channels=use_channels, use_channels=use_channels,
flow_rates=[dis_flow_rates[_]] if dis_flow_rates and len(dis_flow_rates) > _ else None, flow_rates=[dis_flow_rates[_]] if dis_flow_rates and len(dis_flow_rates) > _ else None,
offsets=[offsets[_]] if offsets and len(offsets) > _ else None, offsets=[offsets[_]] if offsets and len(offsets) > _ else None,
blow_out_air_volume=( blow_out_air_volume=[blow_out_air_volume[_]] if blow_out_air_volume and len(blow_out_air_volume) > _ else None,
[blow_out_air_volume[_]] if blow_out_air_volume and len(blow_out_air_volume) > _ else None
),
liquid_height=[liquid_height[_]] if liquid_height and len(liquid_height) > _ else None, liquid_height=[liquid_height[_]] if liquid_height and len(liquid_height) > _ else None,
spread=spread, spread=spread,
) )
@@ -1371,18 +1214,18 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
for _ in range(len(use_channels)): for _ in range(len(use_channels)):
tip.extend(next(self.current_tip)) tip.extend(next(self.current_tip))
await self.pick_up_tips(tip) await self.pick_up_tips(tip)
current_targets = targets[i : i + 8] current_targets = targets[i:i + 8]
current_reagent_sources = sources[i : i + 8] current_reagent_sources = sources[i:i + 8]
current_asp_vols = asp_vols[i : i + 8] current_asp_vols = asp_vols[i:i + 8]
current_dis_vols = dis_vols[i : i + 8] current_dis_vols = dis_vols[i:i + 8]
current_asp_flow_rates = asp_flow_rates[i : i + 8] if asp_flow_rates else None current_asp_flow_rates = asp_flow_rates[i:i + 8] if asp_flow_rates else None
current_asp_offset = offsets[i : i + 8] if offsets else [None] * 8 current_asp_offset = offsets[i:i + 8] if offsets else [None] * 8
current_dis_offset = offsets[i : i + 8] if offsets else [None] * 8 current_dis_offset = offsets[i:i + 8] if offsets else [None] * 8
current_asp_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8 current_asp_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_dis_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8 current_dis_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_asp_blow_out_air_volume = blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8 current_asp_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
current_dis_blow_out_air_volume = blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8 current_dis_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
current_dis_flow_rates = dis_flow_rates[i : i + 8] if dis_flow_rates else None current_dis_flow_rates = dis_flow_rates[i:i + 8] if dis_flow_rates else None
if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0: if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0:
await self.mix( await self.mix(
@@ -1432,7 +1275,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
if delays is not None and len(delays) > 1: if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1]) await self.custom_delay(seconds=delays[1])
await self.touch_tip(current_targets) await self.touch_tip(current_targets)
await self.discard_tips([0, 1, 2, 3, 4, 5, 6, 7]) await self.discard_tips([0,1,2,3,4,5,6,7])
async def _transfer_one_to_many( async def _transfer_one_to_many(
self, self,
@@ -1481,7 +1324,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
targets=[target], targets=[target],
mix_time=mix_times, mix_time=mix_times,
mix_vol=mix_vol, mix_vol=mix_vol,
offsets=offsets[idx : idx + 1] if offsets and len(offsets) > idx else None, offsets=offsets[idx:idx + 1] if offsets and len(offsets) > idx else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None, height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None, mix_rate=mix_rate if mix_rate else None,
) )
@@ -1494,9 +1337,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
flow_rates=[asp_flow_rates[0]] if asp_flow_rates and len(asp_flow_rates) > 0 else None, flow_rates=[asp_flow_rates[0]] if asp_flow_rates and len(asp_flow_rates) > 0 else None,
offsets=[offsets[0]] if offsets and len(offsets) > 0 else None, offsets=[offsets[0]] if offsets and len(offsets) > 0 else None,
liquid_height=[liquid_height[0]] if liquid_height and len(liquid_height) > 0 else None, liquid_height=[liquid_height[0]] if liquid_height and len(liquid_height) > 0 else None,
blow_out_air_volume=( blow_out_air_volume=[blow_out_air_volume[0]] if blow_out_air_volume and len(blow_out_air_volume) > 0 else None,
[blow_out_air_volume[0]] if blow_out_air_volume and len(blow_out_air_volume) > 0 else None
),
spread=spread, spread=spread,
) )
@@ -1511,9 +1352,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
use_channels=use_channels, use_channels=use_channels,
flow_rates=[dis_flow_rates[idx]] if dis_flow_rates and len(dis_flow_rates) > idx else None, flow_rates=[dis_flow_rates[idx]] if dis_flow_rates and len(dis_flow_rates) > idx else None,
offsets=[offsets[idx]] if offsets and len(offsets) > idx else None, offsets=[offsets[idx]] if offsets and len(offsets) > idx else None,
blow_out_air_volume=( blow_out_air_volume=[blow_out_air_volume[idx]] if blow_out_air_volume and len(blow_out_air_volume) > idx else None,
[blow_out_air_volume[idx]] if blow_out_air_volume and len(blow_out_air_volume) > idx else None
),
liquid_height=[liquid_height[idx]] if liquid_height and len(liquid_height) > idx else None, liquid_height=[liquid_height[idx]] if liquid_height and len(liquid_height) > idx else None,
spread=spread, spread=spread,
) )
@@ -1524,7 +1363,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
targets=[target], targets=[target],
mix_time=mix_times, mix_time=mix_times,
mix_vol=mix_vol, mix_vol=mix_vol,
offsets=offsets[idx : idx + 1] if offsets else None, offsets=offsets[idx:idx+1] if offsets else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None, height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None, mix_rate=mix_rate if mix_rate else None,
) )
@@ -1545,29 +1384,21 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
tip.extend(next(self.current_tip)) tip.extend(next(self.current_tip))
await self.pick_up_tips(tip) await self.pick_up_tips(tip)
current_targets = targets[i : i + 8] current_targets = targets[i:i + 8]
current_dis_vols = dis_vols[i : i + 8] current_dis_vols = dis_vols[i:i + 8]
# 8个通道都从同一个源容器吸液每个通道的吸液体积等于对应的分液体积 # 8个通道都从同一个源容器吸液每个通道的吸液体积等于对应的分液体积
current_asp_flow_rates = ( current_asp_flow_rates = asp_flow_rates[0:1] * 8 if asp_flow_rates and len(asp_flow_rates) > 0 else None
asp_flow_rates[0:1] * 8 if asp_flow_rates and len(asp_flow_rates) > 0 else None
)
current_asp_offset = offsets[0:1] * 8 if offsets and len(offsets) > 0 else [None] * 8 current_asp_offset = offsets[0:1] * 8 if offsets and len(offsets) > 0 else [None] * 8
current_asp_liquid_height = ( current_asp_liquid_height = liquid_height[0:1] * 8 if liquid_height and len(liquid_height) > 0 else [None] * 8
liquid_height[0:1] * 8 if liquid_height and len(liquid_height) > 0 else [None] * 8 current_asp_blow_out_air_volume = blow_out_air_volume[0:1] * 8 if blow_out_air_volume and len(blow_out_air_volume) > 0 else [None] * 8
)
current_asp_blow_out_air_volume = (
blow_out_air_volume[0:1] * 8
if blow_out_air_volume and len(blow_out_air_volume) > 0
else [None] * 8
)
if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0: if mix_stage in ["before", "both"] and mix_times is not None and mix_times > 0:
await self.mix( await self.mix(
targets=current_targets, targets=current_targets,
mix_time=mix_times, mix_time=mix_times,
mix_vol=mix_vol, mix_vol=mix_vol,
offsets=offsets[i : i + 8] if offsets else None, offsets=offsets[i:i + 8] if offsets else None,
height_to_bottom=mix_liquid_height if mix_liquid_height else None, height_to_bottom=mix_liquid_height if mix_liquid_height else None,
mix_rate=mix_rate if mix_rate else None, mix_rate=mix_rate if mix_rate else None,
) )
@@ -1588,10 +1419,10 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
await self.custom_delay(seconds=delays[0]) await self.custom_delay(seconds=delays[0])
# 分液到8个目标 # 分液到8个目标
current_dis_flow_rates = dis_flow_rates[i : i + 8] if dis_flow_rates else None current_dis_flow_rates = dis_flow_rates[i:i + 8] if dis_flow_rates else None
current_dis_offset = offsets[i : i + 8] if offsets else [None] * 8 current_dis_offset = offsets[i:i + 8] if offsets else [None] * 8
current_dis_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8 current_dis_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_dis_blow_out_air_volume = blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8 current_dis_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
await self.dispense( await self.dispense(
resources=current_targets, resources=current_targets,
@@ -1620,7 +1451,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
if touch_tip: if touch_tip:
await self.touch_tip(current_targets) await self.touch_tip(current_targets)
await self.discard_tips([0, 1, 2, 3, 4, 5, 6, 7]) await self.discard_tips([0,1,2,3,4,5,6,7])
async def _transfer_many_to_one( async def _transfer_many_to_one(
self, self,
@@ -1693,9 +1524,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
flow_rates=[asp_flow_rates[idx]] if asp_flow_rates and len(asp_flow_rates) > idx else None, flow_rates=[asp_flow_rates[idx]] if asp_flow_rates and len(asp_flow_rates) > idx else None,
offsets=[offsets[idx]] if offsets and len(offsets) > idx else None, offsets=[offsets[idx]] if offsets and len(offsets) > idx else None,
liquid_height=[liquid_height[idx]] if liquid_height and len(liquid_height) > idx else None, liquid_height=[liquid_height[idx]] if liquid_height and len(liquid_height) > idx else None,
blow_out_air_volume=( blow_out_air_volume=[blow_out_air_volume[idx]] if blow_out_air_volume and len(blow_out_air_volume) > idx else None,
[blow_out_air_volume[idx]] if blow_out_air_volume and len(blow_out_air_volume) > idx else None
),
spread=spread, spread=spread,
) )
@@ -1709,18 +1538,14 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
dis_flow_rate = dis_flow_rates[idx] if dis_flow_rates and len(dis_flow_rates) > idx else None dis_flow_rate = dis_flow_rates[idx] if dis_flow_rates and len(dis_flow_rates) > idx else None
dis_offset = offsets[idx] if offsets and len(offsets) > idx else None dis_offset = offsets[idx] if offsets and len(offsets) > idx else None
dis_liquid_height = liquid_height[idx] if liquid_height and len(liquid_height) > idx else None dis_liquid_height = liquid_height[idx] if liquid_height and len(liquid_height) > idx else None
dis_blow_out = ( dis_blow_out = blow_out_air_volume[idx] if blow_out_air_volume and len(blow_out_air_volume) > idx else None
blow_out_air_volume[idx] if blow_out_air_volume and len(blow_out_air_volume) > idx else None
)
else: else:
# 标准模式:分液体积等于吸液体积 # 标准模式:分液体积等于吸液体积
dis_vol = asp_vols[idx] dis_vol = asp_vols[idx]
dis_flow_rate = dis_flow_rates[0] if dis_flow_rates and len(dis_flow_rates) > 0 else None dis_flow_rate = dis_flow_rates[0] if dis_flow_rates and len(dis_flow_rates) > 0 else None
dis_offset = offsets[0] if offsets and len(offsets) > 0 else None dis_offset = offsets[0] if offsets and len(offsets) > 0 else None
dis_liquid_height = liquid_height[0] if liquid_height and len(liquid_height) > 0 else None dis_liquid_height = liquid_height[0] if liquid_height and len(liquid_height) > 0 else None
dis_blow_out = ( dis_blow_out = blow_out_air_volume[0] if blow_out_air_volume and len(blow_out_air_volume) > 0 else None
blow_out_air_volume[0] if blow_out_air_volume and len(blow_out_air_volume) > 0 else None
)
await self.dispense( await self.dispense(
resources=[target], resources=[target],
@@ -1774,12 +1599,12 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
tip.extend(next(self.current_tip)) tip.extend(next(self.current_tip))
await self.pick_up_tips(tip) await self.pick_up_tips(tip)
current_sources = sources[i : i + 8] current_sources = sources[i:i + 8]
current_asp_vols = asp_vols[i : i + 8] current_asp_vols = asp_vols[i:i + 8]
current_asp_flow_rates = asp_flow_rates[i : i + 8] if asp_flow_rates else None current_asp_flow_rates = asp_flow_rates[i:i + 8] if asp_flow_rates else None
current_asp_offset = offsets[i : i + 8] if offsets else [None] * 8 current_asp_offset = offsets[i:i + 8] if offsets else [None] * 8
current_asp_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8 current_asp_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_asp_blow_out_air_volume = blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8 current_asp_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
# 从8个源容器吸液 # 从8个源容器吸液
await self.aspirate( await self.aspirate(
@@ -1799,22 +1624,18 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
# 分液到目标容器(每个通道分液到同一个目标) # 分液到目标容器(每个通道分液到同一个目标)
if use_proportional_mixing: if use_proportional_mixing:
# 按比例混合:使用对应的 dis_vols # 按比例混合:使用对应的 dis_vols
current_dis_vols = dis_vols[i : i + 8] current_dis_vols = dis_vols[i:i + 8]
current_dis_flow_rates = dis_flow_rates[i : i + 8] if dis_flow_rates else None current_dis_flow_rates = dis_flow_rates[i:i + 8] if dis_flow_rates else None
current_dis_offset = offsets[i : i + 8] if offsets else [None] * 8 current_dis_offset = offsets[i:i + 8] if offsets else [None] * 8
current_dis_liquid_height = liquid_height[i : i + 8] if liquid_height else [None] * 8 current_dis_liquid_height = liquid_height[i:i + 8] if liquid_height else [None] * 8
current_dis_blow_out_air_volume = ( current_dis_blow_out_air_volume = blow_out_air_volume[i:i + 8] if blow_out_air_volume else [None] * 8
blow_out_air_volume[i : i + 8] if blow_out_air_volume else [None] * 8
)
else: else:
# 标准模式:每个通道分液体积等于其吸液体积 # 标准模式:每个通道分液体积等于其吸液体积
current_dis_vols = current_asp_vols current_dis_vols = current_asp_vols
current_dis_flow_rates = dis_flow_rates[0:1] * 8 if dis_flow_rates else None current_dis_flow_rates = dis_flow_rates[0:1] * 8 if dis_flow_rates else None
current_dis_offset = offsets[0:1] * 8 if offsets else [None] * 8 current_dis_offset = offsets[0:1] * 8 if offsets else [None] * 8
current_dis_liquid_height = liquid_height[0:1] * 8 if liquid_height else [None] * 8 current_dis_liquid_height = liquid_height[0:1] * 8 if liquid_height else [None] * 8
current_dis_blow_out_air_volume = ( current_dis_blow_out_air_volume = blow_out_air_volume[0:1] * 8 if blow_out_air_volume else [None] * 8
blow_out_air_volume[0:1] * 8 if blow_out_air_volume else [None] * 8
)
await self.dispense( await self.dispense(
resources=[target] * 8, # 8个通道都分到同一个目标 resources=[target] * 8, # 8个通道都分到同一个目标
@@ -1830,7 +1651,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
if delays is not None and len(delays) > 1: if delays is not None and len(delays) > 1:
await self.custom_delay(seconds=delays[1]) await self.custom_delay(seconds=delays[1])
await self.discard_tips([0, 1, 2, 3, 4, 5, 6, 7]) await self.discard_tips([0,1,2,3,4,5,6,7])
# 最后在目标容器中混合(如果需要) # 最后在目标容器中混合(如果需要)
if mix_stage in ["after", "both"] and mix_times is not None and mix_times > 0: if mix_stage in ["after", "both"] and mix_times is not None and mix_times > 0:
@@ -1850,6 +1671,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
# traceback.print_exc() # traceback.print_exc()
# raise RuntimeError(f"Liquid addition failed: {e}") from e # raise RuntimeError(f"Liquid addition failed: {e}") from e
# --------------------------------------------------------------- # ---------------------------------------------------------------
# Helper utilities # Helper utilities
# --------------------------------------------------------------- # ---------------------------------------------------------------
@@ -1870,6 +1692,7 @@ class LiquidHandlerAbstract(LiquidHandlerMiddleware):
print(f"Current time: {time.strftime('%H:%M:%S')}") print(f"Current time: {time.strftime('%H:%M:%S')}")
async def touch_tip(self, targets: Sequence[Container]): async def touch_tip(self, targets: Sequence[Container]):
"""Touch the tip to the side of the well.""" """Touch the tip to the side of the well."""
if not self.support_touch_tip: if not self.support_touch_tip:

View File

@@ -30,31 +30,9 @@ from pylabrobot.liquid_handling.standard import (
ResourceMove, ResourceMove,
ResourceDrop, ResourceDrop,
) )
from pylabrobot.resources import ( from pylabrobot.resources import ResourceHolder, ResourceStack, Tip, Deck, Plate, Well, TipRack, Resource, Container, Coordinate, TipSpot, Trash, PlateAdapter, TubeRack
ResourceHolder,
ResourceStack,
Tip,
Deck,
Plate,
Well,
TipRack,
Resource,
Container,
Coordinate,
TipSpot,
Trash,
PlateAdapter,
TubeRack,
)
from unilabos.devices.liquid_handling.liquid_handler_abstract import ( from unilabos.devices.liquid_handling.liquid_handler_abstract import LiquidHandlerAbstract, SimpleReturn
LiquidHandlerAbstract,
SimpleReturn,
SetLiquidReturn,
SetLiquidFromPlateReturn,
TransferLiquidReturn,
)
from unilabos.registry.placeholder_type import ResourceSlot
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
@@ -102,7 +80,6 @@ class PRCXI9300Deck(Deck):
self.slots[slot - 1] = resource self.slots[slot - 1] = resource
super().assign_child_resource(resource, location=self.slot_locations[slot - 1]) super().assign_child_resource(resource, location=self.slot_locations[slot - 1])
class PRCXI9300Container(Plate): class PRCXI9300Container(Plate):
"""PRCXI 9300 的专用 Container 类,继承自 Plate用于槽位定位和未知模块。 """PRCXI 9300 的专用 Container 类,继承自 Plate用于槽位定位和未知模块。
@@ -132,80 +109,73 @@ class PRCXI9300Container(Plate):
data = super().serialize_state() data = super().serialize_state()
data.update(self._unilabos_state) data.update(self._unilabos_state)
return data return data
class PRCXI9300Plate(Plate): class PRCXI9300Plate(Plate):
""" """
专用孔板类: 专用孔板类:
1. 继承自 PLR 原生 Plate保留所有物理特性。 1. 继承自 PLR 原生 Plate保留所有物理特性。
2. 增加 material_info 参数,用于在初始化时直接绑定 Unilab UUID。 2. 增加 material_info 参数,用于在初始化时直接绑定 Unilab UUID。
""" """
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
def __init__( category: str = "plate",
self, ordered_items: collections.OrderedDict = None,
name: str, ordering: Optional[collections.OrderedDict] = None,
size_x: float, model: Optional[str] = None,
size_y: float, material_info: Optional[Dict[str, Any]] = None,
size_z: float, **kwargs):
category: str = "plate",
ordered_items: collections.OrderedDict = None,
ordering: Optional[collections.OrderedDict] = None,
model: Optional[str] = None,
material_info: Optional[Dict[str, Any]] = None,
**kwargs,
):
# 如果 ordered_items 不为 None直接使用 # 如果 ordered_items 不为 None直接使用
items = None
ordering_param = None
if ordered_items is not None: if ordered_items is not None:
items = ordered_items items = ordered_items
elif ordering is not None: elif ordering is not None:
# 检查 ordering 中的值是否是字符串(从 JSON 反序列化时的情况) # 检查 ordering 中的值是否是字符串(从 JSON 反序列化时的情况)
# 如果是字符串,说明这是位置名称,需要让 Plate 自己创建 Well 对象 # 如果是字符串,说明这是位置名称,需要让 Plate 自己创建 Well 对象
# 我们只传递位置信息(键),不传递值,使用 ordering 参数 # 我们只传递位置信息(键),不传递值,使用 ordering 参数
if ordering: if ordering and isinstance(next(iter(ordering.values()), None), str):
values = list(ordering.values()) # ordering 的值是字符串,只使用键(位置信息)创建新的 OrderedDict
value = values[0] # 传递 ordering 参数而不是 ordered_items让 Plate 自己创建 Well 对象
if isinstance(value, str): items = None
# ordering 的值是字符串,只使用键(位置信息)创建新的 OrderedDict # 使用 ordering 参数,只包含位置信息(键)
# 传递 ordering 参数而不是 ordered_items让 Plate 自己创建 Well 对象 ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
items = None
# 使用 ordering 参数,只包含位置信息(键)
ordering_param = collections.OrderedDict((k, None) for k in ordering.keys())
elif value is None:
ordering_param = ordering
else: else:
# ordering 的值已经是对象,可以直接使用 # ordering 的值已经是对象,可以直接使用
items = ordering items = ordering
ordering_param = None ordering_param = None
else:
items = None
ordering_param = None
# 根据情况传递不同的参数 # 根据情况传递不同的参数
if items is not None: if items is not None:
super().__init__( super().__init__(name, size_x, size_y, size_z,
name, size_x, size_y, size_z, ordered_items=items, category=category, model=model, **kwargs ordered_items=items,
) category=category,
model=model, **kwargs)
elif ordering_param is not None: elif ordering_param is not None:
# 传递 ordering 参数,让 Plate 自己创建 Well 对象 # 传递 ordering 参数,让 Plate 自己创建 Well 对象
super().__init__( super().__init__(name, size_x, size_y, size_z,
name, size_x, size_y, size_z, ordering=ordering_param, category=category, model=model, **kwargs ordering=ordering_param,
) category=category,
model=model, **kwargs)
else: else:
super().__init__(name, size_x, size_y, size_z, category=category, model=model, **kwargs) super().__init__(name, size_x, size_y, size_z,
category=category,
model=model, **kwargs)
self._unilabos_state = {} self._unilabos_state = {}
if material_info: if material_info:
self._unilabos_state["Material"] = material_info self._unilabos_state["Material"] = material_info
def load_state(self, state: Dict[str, Any]) -> None: def load_state(self, state: Dict[str, Any]) -> None:
super().load_state(state) super().load_state(state)
self._unilabos_state = state self._unilabos_state = state
def serialize_state(self) -> Dict[str, Dict[str, Any]]: def serialize_state(self) -> Dict[str, Dict[str, Any]]:
try: try:
data = super().serialize_state() data = super().serialize_state()
except AttributeError: except AttributeError:
data = {} data = {}
if hasattr(self, "_unilabos_state") and self._unilabos_state: if hasattr(self, '_unilabos_state') and self._unilabos_state:
safe_state = {} safe_state = {}
for k, v in self._unilabos_state.items(): for k, v in self._unilabos_state.items():
# 如果是 Material 字典,深入检查 # 如果是 Material 字典,深入检查
@@ -225,25 +195,16 @@ class PRCXI9300Plate(Plate):
safe_state[k] = v safe_state[k] = v
data.update(safe_state) data.update(safe_state)
return data # 其他顶层属性也进行类型检查 return data # 其他顶层属性也进行类型检查
class PRCXI9300TipRack(TipRack): class PRCXI9300TipRack(TipRack):
"""专用吸头盒类""" """ 专用吸头盒类 """
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
def __init__( category: str = "tip_rack",
self, ordered_items: collections.OrderedDict = None,
name: str, ordering: Optional[collections.OrderedDict] = None,
size_x: float, model: Optional[str] = None,
size_y: float, material_info: Optional[Dict[str, Any]] = None,
size_z: float, **kwargs):
category: str = "tip_rack",
ordered_items: collections.OrderedDict = None,
ordering: Optional[collections.OrderedDict] = None,
model: Optional[str] = None,
material_info: Optional[Dict[str, Any]] = None,
**kwargs,
):
# 如果 ordered_items 不为 None直接使用 # 如果 ordered_items 不为 None直接使用
if ordered_items is not None: if ordered_items is not None:
items = ordered_items items = ordered_items
@@ -267,16 +228,20 @@ class PRCXI9300TipRack(TipRack):
# 根据情况传递不同的参数 # 根据情况传递不同的参数
if items is not None: if items is not None:
super().__init__( super().__init__(name, size_x, size_y, size_z,
name, size_x, size_y, size_z, ordered_items=items, category=category, model=model, **kwargs ordered_items=items,
) category=category,
model=model, **kwargs)
elif ordering_param is not None: elif ordering_param is not None:
# 传递 ordering 参数,让 TipRack 自己创建 Tip 对象 # 传递 ordering 参数,让 TipRack 自己创建 Tip 对象
super().__init__( super().__init__(name, size_x, size_y, size_z,
name, size_x, size_y, size_z, ordering=ordering_param, category=category, model=model, **kwargs ordering=ordering_param,
) category=category,
model=model, **kwargs)
else: else:
super().__init__(name, size_x, size_y, size_z, category=category, model=model, **kwargs) super().__init__(name, size_x, size_y, size_z,
category=category,
model=model, **kwargs)
self._unilabos_state = {} self._unilabos_state = {}
if material_info: if material_info:
self._unilabos_state["Material"] = material_info self._unilabos_state["Material"] = material_info
@@ -290,7 +255,7 @@ class PRCXI9300TipRack(TipRack):
data = super().serialize_state() data = super().serialize_state()
except AttributeError: except AttributeError:
data = {} data = {}
if hasattr(self, "_unilabos_state") and self._unilabos_state: if hasattr(self, '_unilabos_state') and self._unilabos_state:
safe_state = {} safe_state = {}
for k, v in self._unilabos_state.items(): for k, v in self._unilabos_state.items():
# 如果是 Material 字典,深入检查 # 如果是 Material 字典,深入检查
@@ -312,23 +277,16 @@ class PRCXI9300TipRack(TipRack):
data.update(safe_state) data.update(safe_state)
return data return data
class PRCXI9300Trash(Trash): class PRCXI9300Trash(Trash):
"""PRCXI 9300 的专用 Trash 类,继承自 Trash。 """PRCXI 9300 的专用 Trash 类,继承自 Trash。
该类定义了 PRCXI 9300 的工作台布局和槽位信息。 该类定义了 PRCXI 9300 的工作台布局和槽位信息。
""" """
def __init__( def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
self, category: str = "trash",
name: str, material_info: Optional[Dict[str, Any]] = None,
size_x: float, **kwargs):
size_y: float,
size_z: float,
category: str = "trash",
material_info: Optional[Dict[str, Any]] = None,
**kwargs,
):
if name != "trash": if name != "trash":
print(f"Warning: PRCXI9300Trash usually expects name='trash' for backend logic, but got '{name}'.") print(f"Warning: PRCXI9300Trash usually expects name='trash' for backend logic, but got '{name}'.")
@@ -348,7 +306,7 @@ class PRCXI9300Trash(Trash):
data = super().serialize_state() data = super().serialize_state()
except AttributeError: except AttributeError:
data = {} data = {}
if hasattr(self, "_unilabos_state") and self._unilabos_state: if hasattr(self, '_unilabos_state') and self._unilabos_state:
safe_state = {} safe_state = {}
for k, v in self._unilabos_state.items(): for k, v in self._unilabos_state.items():
# 如果是 Material 字典,深入检查 # 如果是 Material 字典,深入检查
@@ -370,27 +328,19 @@ class PRCXI9300Trash(Trash):
data.update(safe_state) data.update(safe_state)
return data return data
class PRCXI9300TubeRack(TubeRack): class PRCXI9300TubeRack(TubeRack):
""" """
专用管架类:用于 EP 管架、试管架等。 专用管架类:用于 EP 管架、试管架等。
继承自 PLR 的 TubeRack并支持注入 material_info (UUID)。 继承自 PLR 的 TubeRack并支持注入 material_info (UUID)。
""" """
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
def __init__( category: str = "tube_rack",
self, items: Optional[Dict[str, Any]] = None,
name: str, ordered_items: Optional[OrderedDict] = None,
size_x: float, ordering: Optional[OrderedDict] = None,
size_y: float, model: Optional[str] = None,
size_z: float, material_info: Optional[Dict[str, Any]] = None,
category: str = "tube_rack", **kwargs):
items: Optional[Dict[str, Any]] = None,
ordered_items: Optional[OrderedDict] = None,
ordering: Optional[OrderedDict] = None,
model: Optional[str] = None,
material_info: Optional[Dict[str, Any]] = None,
**kwargs,
):
# 如果 ordered_items 不为 None直接使用 # 如果 ordered_items 不为 None直接使用
if ordered_items is not None: if ordered_items is not None:
@@ -420,12 +370,20 @@ class PRCXI9300TubeRack(TubeRack):
# 根据情况传递不同的参数 # 根据情况传递不同的参数
if items_to_pass is not None: if items_to_pass is not None:
super().__init__(name, size_x, size_y, size_z, ordered_items=items_to_pass, model=model, **kwargs) super().__init__(name, size_x, size_y, size_z,
ordered_items=items_to_pass,
model=model,
**kwargs)
elif ordering_param is not None: elif ordering_param is not None:
# 传递 ordering 参数,让 TubeRack 自己创建 Tube 对象 # 传递 ordering 参数,让 TubeRack 自己创建 Tube 对象
super().__init__(name, size_x, size_y, size_z, ordering=ordering_param, model=model, **kwargs) super().__init__(name, size_x, size_y, size_z,
ordering=ordering_param,
model=model,
**kwargs)
else: else:
super().__init__(name, size_x, size_y, size_z, model=model, **kwargs) super().__init__(name, size_x, size_y, size_z,
model=model,
**kwargs)
self._unilabos_state = {} self._unilabos_state = {}
if material_info: if material_info:
@@ -436,7 +394,7 @@ class PRCXI9300TubeRack(TubeRack):
data = super().serialize_state() data = super().serialize_state()
except AttributeError: except AttributeError:
data = {} data = {}
if hasattr(self, "_unilabos_state") and self._unilabos_state: if hasattr(self, '_unilabos_state') and self._unilabos_state:
safe_state = {} safe_state = {}
for k, v in self._unilabos_state.items(): for k, v in self._unilabos_state.items():
# 如果是 Material 字典,深入检查 # 如果是 Material 字典,深入检查
@@ -458,31 +416,23 @@ class PRCXI9300TubeRack(TubeRack):
data.update(safe_state) data.update(safe_state)
return data return data
class PRCXI9300PlateAdapter(PlateAdapter): class PRCXI9300PlateAdapter(PlateAdapter):
""" """
专用板式适配器类:用于承载 Plate 的底座(如 PCR 适配器、磁吸架等)。 专用板式适配器类:用于承载 Plate 的底座(如 PCR 适配器、磁吸架等)。
支持注入 material_info (UUID)。 支持注入 material_info (UUID)。
""" """
def __init__(self, name: str, size_x: float, size_y: float, size_z: float,
def __init__( category: str = "plate_adapter",
self, model: Optional[str] = None,
name: str, material_info: Optional[Dict[str, Any]] = None,
size_x: float, # 参数给予默认值 (标准96孔板尺寸)
size_y: float, adapter_hole_size_x: float = 127.76,
size_z: float, adapter_hole_size_y: float = 85.48,
category: str = "plate_adapter", adapter_hole_size_z: float = 10.0, # 假设凹槽深度或板子放置高度
model: Optional[str] = None, dx: Optional[float] = None,
material_info: Optional[Dict[str, Any]] = None, dy: Optional[float] = None,
# 参数给予默认值 (标准96孔板尺寸) dz: float = 0.0, # 默认Z轴偏移
adapter_hole_size_x: float = 127.76, **kwargs):
adapter_hole_size_y: float = 85.48,
adapter_hole_size_z: float = 10.0, # 假设凹槽深度或板子放置高度
dx: Optional[float] = None,
dy: Optional[float] = None,
dz: float = 0.0, # 默认Z轴偏移
**kwargs,
):
# 自动居中计算:如果未指定 dx/dy则根据适配器尺寸和孔尺寸计算居中位置 # 自动居中计算:如果未指定 dx/dy则根据适配器尺寸和孔尺寸计算居中位置
if dx is None: if dx is None:
@@ -502,7 +452,7 @@ class PRCXI9300PlateAdapter(PlateAdapter):
adapter_hole_size_y=adapter_hole_size_y, adapter_hole_size_y=adapter_hole_size_y,
adapter_hole_size_z=adapter_hole_size_z, adapter_hole_size_z=adapter_hole_size_z,
model=model, model=model,
**kwargs, **kwargs
) )
self._unilabos_state = {} self._unilabos_state = {}
@@ -514,7 +464,7 @@ class PRCXI9300PlateAdapter(PlateAdapter):
data = super().serialize_state() data = super().serialize_state()
except AttributeError: except AttributeError:
data = {} data = {}
if hasattr(self, "_unilabos_state") and self._unilabos_state: if hasattr(self, '_unilabos_state') and self._unilabos_state:
safe_state = {} safe_state = {}
for k, v in self._unilabos_state.items(): for k, v in self._unilabos_state.items():
# 如果是 Material 字典,深入检查 # 如果是 Material 字典,深入检查
@@ -536,7 +486,6 @@ class PRCXI9300PlateAdapter(PlateAdapter):
data.update(safe_state) data.update(safe_state)
return data return data
class PRCXI9300Handler(LiquidHandlerAbstract): class PRCXI9300Handler(LiquidHandlerAbstract):
support_touch_tip = False support_touch_tip = False
@@ -569,9 +518,7 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
if "Material" in child.children[0]._unilabos_state: if "Material" in child.children[0]._unilabos_state:
number = int(child.name.replace("T", "")) number = int(child.name.replace("T", ""))
tablets_info.append( tablets_info.append(
WorkTablets( WorkTablets(Number=number, Code=f"T{number}", Material=child.children[0]._unilabos_state["Material"])
Number=number, Code=f"T{number}", Material=child.children[0]._unilabos_state["Material"]
)
) )
if is_9320: if is_9320:
print("当前设备是9320") print("当前设备是9320")
@@ -591,14 +538,9 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
super().post_init(ros_node) super().post_init(ros_node)
self._unilabos_backend.post_init(ros_node) self._unilabos_backend.post_init(ros_node)
def set_liquid(self, wells: list[Well], liquid_names: list[str], volumes: list[float]) -> SetLiquidReturn: def set_liquid(self, wells: list[Well], liquid_names: list[str], volumes: list[float]) -> SimpleReturn:
return super().set_liquid(wells, liquid_names, volumes) return super().set_liquid(wells, liquid_names, volumes)
def set_liquid_from_plate(
self, plate: ResourceSlot, well_names: list[str], liquid_names: list[str], volumes: list[float]
) -> SetLiquidFromPlateReturn:
return super().set_liquid_from_plate(plate, well_names, liquid_names, volumes)
def set_group(self, group_name: str, wells: List[Well], volumes: List[float]): def set_group(self, group_name: str, wells: List[Well], volumes: List[float]):
return super().set_group(group_name, wells, volumes) return super().set_group(group_name, wells, volumes)
@@ -718,7 +660,7 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
mix_liquid_height: Optional[float] = None, mix_liquid_height: Optional[float] = None,
delays: Optional[List[int]] = None, delays: Optional[List[int]] = None,
none_keys: List[str] = [], none_keys: List[str] = [],
) -> TransferLiquidReturn: ):
return await super().transfer_liquid( return await super().transfer_liquid(
sources, sources,
targets, targets,
@@ -858,7 +800,6 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
async def heater_action(self, temperature: float, time: int): async def heater_action(self, temperature: float, time: int):
return await self._unilabos_backend.heater_action(temperature, time) return await self._unilabos_backend.heater_action(temperature, time)
async def move_plate( async def move_plate(
self, self,
plate: Plate, plate: Plate,
@@ -881,11 +822,10 @@ class PRCXI9300Handler(LiquidHandlerAbstract):
drop_direction, drop_direction,
pickup_direction, pickup_direction,
pickup_distance_from_top, pickup_distance_from_top,
target_plate_number=to, target_plate_number = to,
**backend_kwargs, **backend_kwargs,
) )
class PRCXI9300Backend(LiquidHandlerBackend): class PRCXI9300Backend(LiquidHandlerBackend):
"""PRCXI 9300 的后端实现,继承自 LiquidHandlerBackend。 """PRCXI 9300 的后端实现,继承自 LiquidHandlerBackend。
@@ -938,12 +878,13 @@ class PRCXI9300Backend(LiquidHandlerBackend):
self.steps_todo_list.append(step) self.steps_todo_list.append(step)
return step return step
async def pick_up_resource(self, pickup: ResourcePickup, **backend_kwargs): async def pick_up_resource(self, pickup: ResourcePickup, **backend_kwargs):
resource = pickup.resource resource=pickup.resource
offset = pickup.offset offset=pickup.offset
pickup_distance_from_top = pickup.pickup_distance_from_top pickup_distance_from_top=pickup.pickup_distance_from_top
direction = pickup.direction direction=pickup.direction
plate_number = int(resource.parent.name.replace("T", "")) plate_number = int(resource.parent.name.replace("T", ""))
is_whole_plate = True is_whole_plate = True
@@ -955,11 +896,13 @@ class PRCXI9300Backend(LiquidHandlerBackend):
async def drop_resource(self, drop: ResourceDrop, **backend_kwargs): async def drop_resource(self, drop: ResourceDrop, **backend_kwargs):
plate_number = None plate_number = None
target_plate_number = backend_kwargs.get("target_plate_number", None) target_plate_number = backend_kwargs.get("target_plate_number", None)
if target_plate_number is not None: if target_plate_number is not None:
plate_number = int(target_plate_number.name.replace("T", "")) plate_number = int(target_plate_number.name.replace("T", ""))
is_whole_plate = True is_whole_plate = True
balance_height = 0 balance_height = 0
if plate_number is None: if plate_number is None:
@@ -968,6 +911,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
self.steps_todo_list.append(step) self.steps_todo_list.append(step)
return step return step
async def heater_action(self, temperature: float, time: int): async def heater_action(self, temperature: float, time: int):
print(f"\n\nHeater action: temperature={temperature}, time={time}\n\n") print(f"\n\nHeater action: temperature={temperature}, time={time}\n\n")
# return await self.api_client.heater_action(temperature, time) # return await self.api_client.heater_action(temperature, time)
@@ -1036,7 +980,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
# 检查重置状态并等待完成 # 检查重置状态并等待完成
while not self.is_reset_ok: while not self.is_reset_ok:
print("Waiting for PRCXI9300 to reset...") print("Waiting for PRCXI9300 to reset...")
if hasattr(self, "_ros_node") and self._ros_node is not None: if hasattr(self, '_ros_node') and self._ros_node is not None:
await self._ros_node.sleep(1) await self._ros_node.sleep(1)
else: else:
await asyncio.sleep(1) await asyncio.sleep(1)
@@ -1054,7 +998,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
"""Pick up tips from the specified resource.""" """Pick up tips from the specified resource."""
# INSERT_YOUR_CODE # INSERT_YOUR_CODE
# Ensure use_channels is converted to a list of ints if it's an array # Ensure use_channels is converted to a list of ints if it's an array
if hasattr(use_channels, "tolist"): if hasattr(use_channels, 'tolist'):
_use_channels = use_channels.tolist() _use_channels = use_channels.tolist()
else: else:
_use_channels = list(use_channels) if use_channels is not None else None _use_channels = list(use_channels) if use_channels is not None else None
@@ -1108,7 +1052,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
async def drop_tips(self, ops: List[Drop], use_channels: List[int] = None): async def drop_tips(self, ops: List[Drop], use_channels: List[int] = None):
"""Pick up tips from the specified resource.""" """Pick up tips from the specified resource."""
if hasattr(use_channels, "tolist"): if hasattr(use_channels, 'tolist'):
_use_channels = use_channels.tolist() _use_channels = use_channels.tolist()
else: else:
_use_channels = list(use_channels) if use_channels is not None else None _use_channels = list(use_channels) if use_channels is not None else None
@@ -1234,7 +1178,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
async def aspirate(self, ops: List[SingleChannelAspiration], use_channels: List[int] = None): async def aspirate(self, ops: List[SingleChannelAspiration], use_channels: List[int] = None):
"""Aspirate liquid from the specified resources.""" """Aspirate liquid from the specified resources."""
if hasattr(use_channels, "tolist"): if hasattr(use_channels, 'tolist'):
_use_channels = use_channels.tolist() _use_channels = use_channels.tolist()
else: else:
_use_channels = list(use_channels) if use_channels is not None else None _use_channels = list(use_channels) if use_channels is not None else None
@@ -1291,7 +1235,7 @@ class PRCXI9300Backend(LiquidHandlerBackend):
async def dispense(self, ops: List[SingleChannelDispense], use_channels: List[int] = None): async def dispense(self, ops: List[SingleChannelDispense], use_channels: List[int] = None):
"""Dispense liquid into the specified resources.""" """Dispense liquid into the specified resources."""
if hasattr(use_channels, "tolist"): if hasattr(use_channels, 'tolist'):
_use_channels = use_channels.tolist() _use_channels = use_channels.tolist()
else: else:
_use_channels = list(use_channels) if use_channels is not None else None _use_channels = list(use_channels) if use_channels is not None else None
@@ -1472,6 +1416,7 @@ class PRCXI9300Api:
time.sleep(1) time.sleep(1)
return success return success
def call(self, service: str, method: str, params: Optional[list] = None) -> Any: def call(self, service: str, method: str, params: Optional[list] = None) -> Any:
payload = json.dumps( payload = json.dumps(
{"ServiceName": service, "MethodName": method, "Paramters": params or []}, separators=(",", ":") {"ServiceName": service, "MethodName": method, "Paramters": params or []}, separators=(",", ":")
@@ -1598,7 +1543,7 @@ class PRCXI9300Api:
assist_fun5: str = "", assist_fun5: str = "",
liquid_method: str = "NormalDispense", liquid_method: str = "NormalDispense",
axis: str = "Left", axis: str = "Left",
) -> Dict[str, Any]: ) -> Dict[str, Any]:
return { return {
"StepAxis": axis, "StepAxis": axis,
"Function": "Imbibing", "Function": "Imbibing",
@@ -1676,7 +1621,7 @@ class PRCXI9300Api:
assist_fun5: str = "", assist_fun5: str = "",
liquid_method: str = "NormalDispense", liquid_method: str = "NormalDispense",
axis: str = "Left", axis: str = "Left",
) -> Dict[str, Any]: ) -> Dict[str, Any]:
return { return {
"StepAxis": axis, "StepAxis": axis,
"Function": "Blending", "Function": "Blending",
@@ -1736,11 +1681,11 @@ class PRCXI9300Api:
"LiquidDispensingMethod": liquid_method, "LiquidDispensingMethod": liquid_method,
} }
def clamp_jaw_pick_up( def clamp_jaw_pick_up(self,
self,
plate_no: int, plate_no: int,
is_whole_plate: bool, is_whole_plate: bool,
balance_height: int, balance_height: int,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
return { return {
"StepAxis": "ClampingJaw", "StepAxis": "ClampingJaw",
@@ -1750,7 +1695,7 @@ class PRCXI9300Api:
"HoleRow": 1, "HoleRow": 1,
"HoleCol": 1, "HoleCol": 1,
"BalanceHeight": balance_height, "BalanceHeight": balance_height,
"PlateOrHoleNum": f"T{plate_no}", "PlateOrHoleNum": f"T{plate_no}"
} }
def clamp_jaw_drop( def clamp_jaw_drop(
@@ -1758,6 +1703,7 @@ class PRCXI9300Api:
plate_no: int, plate_no: int,
is_whole_plate: bool, is_whole_plate: bool,
balance_height: int, balance_height: int,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
return { return {
"StepAxis": "ClampingJaw", "StepAxis": "ClampingJaw",
@@ -1767,7 +1713,7 @@ class PRCXI9300Api:
"HoleRow": 1, "HoleRow": 1,
"HoleCol": 1, "HoleCol": 1,
"BalanceHeight": balance_height, "BalanceHeight": balance_height,
"PlateOrHoleNum": f"T{plate_no}", "PlateOrHoleNum": f"T{plate_no}"
} }
def shaker_action(self, time: int, module_no: int, amplitude: int, is_wait: bool): def shaker_action(self, time: int, module_no: int, amplitude: int, is_wait: bool):
@@ -1780,7 +1726,6 @@ class PRCXI9300Api:
"AssistFun4": is_wait, "AssistFun4": is_wait,
} }
class DefaultLayout: class DefaultLayout:
def __init__(self, product_name: str = "PRCXI9300"): def __init__(self, product_name: str = "PRCXI9300"):
@@ -2159,9 +2104,7 @@ if __name__ == "__main__":
size_y=50, size_y=50,
size_z=10, size_z=10,
category="tip_rack", category="tip_rack",
ordered_items=collections.OrderedDict( ordered_items=collections.OrderedDict({k: f"{child_prefix}_{k}" for k, v in tip_racks["ordering"].items()}),
{k: f"{child_prefix}_{k}" for k, v in tip_racks["ordering"].items()}
),
) )
tip_rack_serialized = tip_rack.serialize() tip_rack_serialized = tip_rack.serialize()
tip_rack_serialized["parent_name"] = deck.name tip_rack_serialized["parent_name"] = deck.name
@@ -2356,37 +2299,43 @@ if __name__ == "__main__":
A = tree_to_list([resource_plr_to_ulab(deck)]) A = tree_to_list([resource_plr_to_ulab(deck)])
with open("deck.json", "w", encoding="utf-8") as f: with open("deck.json", "w", encoding="utf-8") as f:
A.insert( A.insert(0, {
0, "id": "PRCXI",
{ "name": "PRCXI",
"id": "PRCXI", "parent": None,
"name": "PRCXI", "type": "device",
"parent": None, "class": "liquid_handler.prcxi",
"type": "device", "position": {
"class": "liquid_handler.prcxi", "x": 0,
"position": {"x": 0, "y": 0, "z": 0}, "y": 0,
"config": { "z": 0
"deck": {
"_resource_child_name": "PRCXI_Deck",
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck",
},
"host": "192.168.0.121",
"port": 9999,
"timeout": 10.0,
"axis": "Right",
"channel_num": 1,
"setup": False,
"debug": True,
"simulator": True,
"matrix_id": "5de524d0-3f95-406c-86dd-f83626ebc7cb",
"is_9320": True,
},
"data": {},
"children": ["PRCXI_Deck"],
}, },
) "config": {
"deck": {
"_resource_child_name": "PRCXI_Deck",
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck"
},
"host": "192.168.0.121",
"port": 9999,
"timeout": 10.0,
"axis": "Right",
"channel_num": 1,
"setup": False,
"debug": True,
"simulator": True,
"matrix_id": "5de524d0-3f95-406c-86dd-f83626ebc7cb",
"is_9320": True
},
"data": {},
"children": [
"PRCXI_Deck"
]
})
A[1]["parent"] = "PRCXI" A[1]["parent"] = "PRCXI"
json.dump({"nodes": A, "links": []}, f, indent=4, ensure_ascii=False) json.dump({
"nodes": A,
"links": []
}, f, indent=4, ensure_ascii=False)
handler = PRCXI9300Handler( handler = PRCXI9300Handler(
deck=deck, deck=deck,
@@ -2428,6 +2377,7 @@ if __name__ == "__main__":
time.sleep(5) time.sleep(5)
os._exit(0) os._exit(0)
prcxi_api = PRCXI9300Api(host="192.168.0.121", port=9999) prcxi_api = PRCXI9300Api(host="192.168.0.121", port=9999)
prcxi_api.list_matrices() prcxi_api.list_matrices()
prcxi_api.get_all_materials() prcxi_api.get_all_materials()

View File

@@ -31,14 +31,14 @@ class VirtualTransferPump:
# 从config或kwargs中获取参数确保类型正确 # 从config或kwargs中获取参数确保类型正确
if config: if config:
self.max_volume = float(config.get("max_volume", 25.0)) self.max_volume = float(config.get('max_volume', 25.0))
self.port = config.get("port", "VIRTUAL") self.port = config.get('port', 'VIRTUAL')
else: else:
self.max_volume = float(kwargs.get("max_volume", 25.0)) self.max_volume = float(kwargs.get('max_volume', 25.0))
self.port = kwargs.get("port", "VIRTUAL") self.port = kwargs.get('port', 'VIRTUAL')
self._transfer_rate = float(kwargs.get("transfer_rate", 0)) self._transfer_rate = float(kwargs.get('transfer_rate', 0))
self.mode = kwargs.get("mode", VirtualPumpMode.Normal) self.mode = kwargs.get('mode', VirtualPumpMode.Normal)
# 状态变量 - 确保都是正确类型 # 状态变量 - 确保都是正确类型
self._status = "Idle" self._status = "Idle"
@@ -54,9 +54,7 @@ class VirtualTransferPump:
self.logger = logging.getLogger(f"VirtualTransferPump.{self.device_id}") self.logger = logging.getLogger(f"VirtualTransferPump.{self.device_id}")
print(f"🚰 === 虚拟转移泵 {self.device_id} 已创建 === ✨") print(f"🚰 === 虚拟转移泵 {self.device_id} 已创建 === ✨")
print( print(f"💨 快速模式: {'启用' if self._fast_mode else '禁用'} | 移动时间: {self._fast_move_time}s | 喷射时间: {self._fast_dispense_time}s")
f"💨 快速模式: {'启用' if self._fast_mode else '禁用'} | 移动时间: {self._fast_move_time}s | 喷射时间: {self._fast_dispense_time}s"
)
print(f"📊 最大容量: {self.max_volume}mL | 端口: {self.port}") print(f"📊 最大容量: {self.max_volume}mL | 端口: {self.port}")
def post_init(self, ros_node: BaseROS2DeviceNode): def post_init(self, ros_node: BaseROS2DeviceNode):
@@ -191,9 +189,7 @@ class VirtualTransferPump:
operation_emoji = "📍" operation_emoji = "📍"
self.logger.info(f"🎯 SET_POSITION: {operation_type} {operation_emoji}") self.logger.info(f"🎯 SET_POSITION: {operation_type} {operation_emoji}")
self.logger.info( self.logger.info(f" 📍 位置: {self._position:.2f}mL → {target_position:.2f}mL (移动 {volume_to_move:.2f}mL)")
f" 📍 位置: {self._position:.2f}mL → {target_position:.2f}mL (移动 {volume_to_move:.2f}mL)"
)
self.logger.info(f" 🌊 速度: {velocity:.2f} mL/s") self.logger.info(f" 🌊 速度: {velocity:.2f} mL/s")
self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s") self.logger.info(f" ⏰ 预计时间: {display_duration:.2f}s")
@@ -211,11 +207,7 @@ class VirtualTransferPump:
for i in range(steps + 1): for i in range(steps + 1):
# 计算当前位置和进度 # 计算当前位置和进度
progress = (i / steps) * 100 if steps > 0 else 100 progress = (i / steps) * 100 if steps > 0 else 100
current_pos = ( current_pos = start_position + (target_position - start_position) * (i / steps) if steps > 0 else target_position
start_position + (target_position - start_position) * (i / steps)
if steps > 0
else target_position
)
# 更新状态 # 更新状态
if i < steps: if i < steps:
@@ -252,9 +244,7 @@ class VirtualTransferPump:
# 📊 最终状态日志 # 📊 最终状态日志
if volume_to_move > 0.01: if volume_to_move > 0.01:
self.logger.info( self.logger.info(f"🎉 SET_POSITION 完成! 📍 最终位置: {self._position:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL")
f"🎉 SET_POSITION 完成! 📍 最终位置: {self._position:.2f}mL | 💧 当前体积: {self._current_volume:.2f}mL"
)
# 返回符合action定义的结果 # 返回符合action定义的结果
return { return {
@@ -262,7 +252,7 @@ class VirtualTransferPump:
"message": f"✅ 成功移动到位置 {self._position:.2f}mL ({operation_type})", "message": f"✅ 成功移动到位置 {self._position:.2f}mL ({operation_type})",
"final_position": self._position, "final_position": self._position,
"final_volume": self._current_volume, "final_volume": self._current_volume,
"operation_type": operation_type, "operation_type": operation_type
} }
except Exception as e: except Exception as e:
@@ -272,7 +262,7 @@ class VirtualTransferPump:
"success": False, "success": False,
"message": error_msg, "message": error_msg,
"final_position": self._position, "final_position": self._position,
"final_volume": self._current_volume, "final_volume": self._current_volume
} }
# 其他泵操作方法 # 其他泵操作方法
@@ -398,9 +388,7 @@ class VirtualTransferPump:
return self._current_volume >= (self.max_volume - 0.01) # 允许小量误差 return self._current_volume >= (self.max_volume - 0.01) # 允许小量误差
def __str__(self): def __str__(self):
return ( return f"VirtualTransferPump({self.device_id}: {self._current_volume:.2f}/{self.max_volume} ml, {self._status})"
f"VirtualTransferPump({self.device_id}: {self._current_volume:.2f}/{self.max_volume} ml, {self._status})"
)
def __repr__(self): def __repr__(self):
return self.__str__() return self.__str__()

View File

@@ -1,742 +0,0 @@
"""
Virtual Workbench Device - 模拟工作台设备
包含:
- 1个机械臂 (每次操作3s, 独占锁)
- 3个加热台 (每次加热10s, 可并行)
工作流程:
1. A1-A5 物料同时启动,竞争机械臂
2. 机械臂将物料移动到空闲加热台
3. 加热完成后机械臂将物料移动到C1-C5
注意:调用来自线程池,使用 threading.Lock 进行同步
"""
import logging
import time
from typing import Dict, Any, Optional, List
from dataclasses import dataclass
from enum import Enum
from threading import Lock, RLock
from typing_extensions import TypedDict
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode
from unilabos.utils.decorator import not_action
from unilabos.resources.resource_tracker import SampleUUIDsType, LabSample, RETURN_UNILABOS_SAMPLES
# ============ TypedDict 返回类型定义 ============
class MoveToHeatingStationResult(TypedDict):
"""move_to_heating_station 返回类型"""
success: bool
station_id: int
material_id: str
material_number: int
message: str
unilabos_samples: List[LabSample]
class StartHeatingResult(TypedDict):
"""start_heating 返回类型"""
success: bool
station_id: int
material_id: str
material_number: int
message: str
unilabos_samples: List[LabSample]
class MoveToOutputResult(TypedDict):
"""move_to_output 返回类型"""
success: bool
station_id: int
material_id: str
unilabos_samples: List[LabSample]
class PrepareMaterialsResult(TypedDict):
"""prepare_materials 返回类型 - 批量准备物料"""
success: bool
count: int
material_1: int # 物料编号1
material_2: int # 物料编号2
material_3: int # 物料编号3
material_4: int # 物料编号4
material_5: int # 物料编号5
message: str
unilabos_samples: List[LabSample]
# ============ 状态枚举 ============
class HeatingStationState(Enum):
"""加热台状态枚举"""
IDLE = "idle" # 空闲
OCCUPIED = "occupied" # 已放置物料,等待加热
HEATING = "heating" # 加热中
COMPLETED = "completed" # 加热完成,等待取走
class ArmState(Enum):
"""机械臂状态枚举"""
IDLE = "idle" # 空闲
BUSY = "busy" # 工作中
@dataclass
class HeatingStation:
"""加热台数据结构"""
station_id: int
state: HeatingStationState = HeatingStationState.IDLE
current_material: Optional[str] = None # 当前物料 (如 "A1", "A2")
material_number: Optional[int] = None # 物料编号 (1-5)
heating_start_time: Optional[float] = None
heating_progress: float = 0.0
class VirtualWorkbench:
"""
Virtual Workbench Device - 虚拟工作台设备
模拟一个包含1个机械臂和3个加热台的工作站
- 机械臂操作耗时3秒同一时间只能执行一个操作
- 加热台加热耗时10秒3个加热台可并行工作
工作流:
1. 物料A1-A5并发启动线程池竞争机械臂使用权
2. 获取机械臂后,查找空闲加热台
3. 机械臂将物料放入加热台,开始加热
4. 加热完成后机械臂将物料移动到目标位置Cn
"""
_ros_node: BaseROS2DeviceNode
# 配置常量
ARM_OPERATION_TIME: float = 3.0 # 机械臂操作时间(秒)
HEATING_TIME: float = 10.0 # 加热时间(秒)
NUM_HEATING_STATIONS: int = 3 # 加热台数量
def __init__(self, device_id: Optional[str] = None, config: Optional[Dict[str, Any]] = None, **kwargs):
# 处理可能的不同调用方式
if device_id is None and "id" in kwargs:
device_id = kwargs.pop("id")
if config is None and "config" in kwargs:
config = kwargs.pop("config")
self.device_id = device_id or "virtual_workbench"
self.config = config or {}
self.logger = logging.getLogger(f"VirtualWorkbench.{self.device_id}")
self.data: Dict[str, Any] = {}
# 从config中获取可配置参数
self.ARM_OPERATION_TIME = float(self.config.get("arm_operation_time", 3.0))
self.HEATING_TIME = float(self.config.get("heating_time", 10.0))
self.NUM_HEATING_STATIONS = int(self.config.get("num_heating_stations", 3))
# 机械臂状态和锁 (使用threading.Lock)
self._arm_lock = Lock()
self._arm_state = ArmState.IDLE
self._arm_current_task: Optional[str] = None
# 加热台状态 (station_id -> HeatingStation) - 立即初始化不依赖initialize()
self._heating_stations: Dict[int, HeatingStation] = {
i: HeatingStation(station_id=i) for i in range(1, self.NUM_HEATING_STATIONS + 1)
}
self._stations_lock = RLock() # 可重入锁,保护加热台状态
# 任务追踪
self._active_tasks: Dict[str, Dict[str, Any]] = {} # material_id -> task_info
self._tasks_lock = Lock()
# 处理其他kwargs参数
skip_keys = {"arm_operation_time", "heating_time", "num_heating_stations"}
for key, value in kwargs.items():
if key not in skip_keys and not hasattr(self, key):
setattr(self, key, value)
self.logger.info(f"=== 虚拟工作台 {self.device_id} 已创建 ===")
self.logger.info(
f"机械臂操作时间: {self.ARM_OPERATION_TIME}s | "
f"加热时间: {self.HEATING_TIME}s | "
f"加热台数量: {self.NUM_HEATING_STATIONS}"
)
@not_action
def post_init(self, ros_node: BaseROS2DeviceNode):
"""ROS节点初始化后回调"""
self._ros_node = ros_node
@not_action
def initialize(self) -> bool:
"""初始化虚拟工作台"""
self.logger.info(f"初始化虚拟工作台 {self.device_id}")
# 重置加热台状态 (已在__init__中创建这里重置为初始状态)
with self._stations_lock:
for station in self._heating_stations.values():
station.state = HeatingStationState.IDLE
station.current_material = None
station.material_number = None
station.heating_progress = 0.0
# 初始化状态
self.data.update(
{
"status": "Ready",
"arm_state": ArmState.IDLE.value,
"arm_current_task": None,
"heating_stations": self._get_stations_status(),
"active_tasks_count": 0,
"message": "工作台就绪",
}
)
self.logger.info(f"工作台初始化完成: {self.NUM_HEATING_STATIONS}个加热台就绪")
return True
@not_action
def cleanup(self) -> bool:
"""清理虚拟工作台"""
self.logger.info(f"清理虚拟工作台 {self.device_id}")
self._arm_state = ArmState.IDLE
self._arm_current_task = None
with self._stations_lock:
self._heating_stations.clear()
with self._tasks_lock:
self._active_tasks.clear()
self.data.update(
{
"status": "Offline",
"arm_state": ArmState.IDLE.value,
"heating_stations": {},
"message": "工作台已关闭",
}
)
return True
def _get_stations_status(self) -> Dict[int, Dict[str, Any]]:
"""获取所有加热台状态"""
with self._stations_lock:
return {
station_id: {
"state": station.state.value,
"current_material": station.current_material,
"material_number": station.material_number,
"heating_progress": station.heating_progress,
}
for station_id, station in self._heating_stations.items()
}
def _update_data_status(self, message: Optional[str] = None):
"""更新状态数据"""
self.data.update(
{
"arm_state": self._arm_state.value,
"arm_current_task": self._arm_current_task,
"heating_stations": self._get_stations_status(),
"active_tasks_count": len(self._active_tasks),
}
)
if message:
self.data["message"] = message
def _find_available_heating_station(self) -> Optional[int]:
"""查找空闲的加热台
Returns:
空闲加热台ID如果没有则返回None
"""
with self._stations_lock:
for station_id, station in self._heating_stations.items():
if station.state == HeatingStationState.IDLE:
return station_id
return None
def _acquire_arm(self, task_description: str) -> bool:
"""获取机械臂使用权(阻塞直到获取)
Args:
task_description: 任务描述,用于日志
Returns:
是否成功获取
"""
self.logger.info(f"[{task_description}] 等待获取机械臂...")
# 阻塞等待获取锁
self._arm_lock.acquire()
self._arm_state = ArmState.BUSY
self._arm_current_task = task_description
self._update_data_status(f"机械臂执行: {task_description}")
self.logger.info(f"[{task_description}] 成功获取机械臂使用权")
return True
def _release_arm(self):
"""释放机械臂"""
task = self._arm_current_task
self._arm_state = ArmState.IDLE
self._arm_current_task = None
self._arm_lock.release()
self._update_data_status(f"机械臂已释放 (完成: {task})")
self.logger.info(f"机械臂已释放 (完成: {task})")
def prepare_materials(
self,
sample_uuids: SampleUUIDsType,
count: int = 5,
) -> PrepareMaterialsResult:
"""
批量准备物料 - 虚拟起始节点
作为工作流的起始节点,生成指定数量的物料编号供后续节点使用。
输出5个handle (material_1 ~ material_5)分别对应实验1~5。
Args:
count: 待生成的物料数量默认5 (生成 A1-A5)
Returns:
PrepareMaterialsResult: 包含 material_1 ~ material_5 用于传递给 move_to_heating_station
"""
# 生成物料列表 A1 - A{count}
materials = [i for i in range(1, count + 1)]
self.logger.info(f"[准备物料] 生成 {count} 个物料: " f"A1-A{count} -> material_1~material_{count}")
return {
"success": True,
"count": count,
"material_1": materials[0] if len(materials) > 0 else 0,
"material_2": materials[1] if len(materials) > 1 else 0,
"material_3": materials[2] if len(materials) > 2 else 0,
"material_4": materials[3] if len(materials) > 3 else 0,
"material_5": materials[4] if len(materials) > 4 else 0,
"message": f"已准备 {count} 个物料: A1-A{count}",
"unilabos_samples": [LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for sample_uuid, content in sample_uuids.items()]
}
def move_to_heating_station(
self,
sample_uuids: SampleUUIDsType,
material_number: int,
) -> MoveToHeatingStationResult:
"""
将物料从An位置移动到加热台
多线程并发调用时,会竞争机械臂使用权,并自动查找空闲加热台
Args:
material_number: 物料编号 (1-5)
Returns:
MoveToHeatingStationResult: 包含 station_id, material_number 等用于传递给下一个节点
"""
# 根据物料编号生成物料ID
material_id = f"A{material_number}"
task_desc = f"移动{material_id}到加热台"
self.logger.info(f"[任务] {task_desc} - 开始执行")
# 记录任务
with self._tasks_lock:
self._active_tasks[material_id] = {
"status": "waiting_for_arm",
"start_time": time.time(),
}
try:
# 步骤1: 等待获取机械臂使用权(竞争)
with self._tasks_lock:
self._active_tasks[material_id]["status"] = "waiting_for_arm"
self._acquire_arm(task_desc)
# 步骤2: 查找空闲加热台
with self._tasks_lock:
self._active_tasks[material_id]["status"] = "finding_station"
station_id = None
# 循环等待直到找到空闲加热台
while station_id is None:
station_id = self._find_available_heating_station()
if station_id is None:
self.logger.info(f"[{material_id}] 没有空闲加热台,等待中...")
# 释放机械臂,等待后重试
self._release_arm()
time.sleep(0.5)
self._acquire_arm(task_desc)
# 步骤3: 占用加热台 - 立即标记为OCCUPIED防止其他任务选择同一加热台
with self._stations_lock:
self._heating_stations[station_id].state = HeatingStationState.OCCUPIED
self._heating_stations[station_id].current_material = material_id
self._heating_stations[station_id].material_number = material_number
# 步骤4: 模拟机械臂移动操作 (3秒)
with self._tasks_lock:
self._active_tasks[material_id]["status"] = "arm_moving"
self._active_tasks[material_id]["assigned_station"] = station_id
self.logger.info(f"[{material_id}] 机械臂正在移动到加热台{station_id}...")
time.sleep(self.ARM_OPERATION_TIME)
# 步骤5: 放入加热台完成
self._update_data_status(f"{material_id}已放入加热台{station_id}")
self.logger.info(f"[{material_id}] 已放入加热台{station_id} (用时{self.ARM_OPERATION_TIME}s)")
# 释放机械臂
self._release_arm()
with self._tasks_lock:
self._active_tasks[material_id]["status"] = "placed_on_station"
return {
"success": True,
"station_id": station_id,
"material_id": material_id,
"material_number": material_number,
"message": f"{material_id}已成功移动到加热台{station_id}",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
except Exception as e:
self.logger.error(f"[{material_id}] 移动失败: {str(e)}")
if self._arm_lock.locked():
self._release_arm()
return {
"success": False,
"station_id": -1,
"material_id": material_id,
"material_number": material_number,
"message": f"移动失败: {str(e)}",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
def start_heating(
self,
sample_uuids: SampleUUIDsType,
station_id: int,
material_number: int,
) -> StartHeatingResult:
"""
启动指定加热台的加热程序
Args:
station_id: 加热台ID (1-3),从 move_to_heating_station 的 handle 传入
material_number: 物料编号,从 move_to_heating_station 的 handle 传入
Returns:
StartHeatingResult: 包含 station_id, material_number 等用于传递给下一个节点
"""
self.logger.info(f"[加热台{station_id}] 开始加热")
if station_id not in self._heating_stations:
return {
"success": False,
"station_id": station_id,
"material_id": "",
"material_number": material_number,
"message": f"无效的加热台ID: {station_id}",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
with self._stations_lock:
station = self._heating_stations[station_id]
if station.current_material is None:
return {
"success": False,
"station_id": station_id,
"material_id": "",
"material_number": material_number,
"message": f"加热台{station_id}上没有物料",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
if station.state == HeatingStationState.HEATING:
return {
"success": False,
"station_id": station_id,
"material_id": station.current_material,
"material_number": material_number,
"message": f"加热台{station_id}已经在加热中",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
material_id = station.current_material
# 开始加热
station.state = HeatingStationState.HEATING
station.heating_start_time = time.time()
station.heating_progress = 0.0
with self._tasks_lock:
if material_id in self._active_tasks:
self._active_tasks[material_id]["status"] = "heating"
self._update_data_status(f"加热台{station_id}开始加热{material_id}")
# 模拟加热过程 (10秒)
start_time = time.time()
while True:
elapsed = time.time() - start_time
progress = min(100.0, (elapsed / self.HEATING_TIME) * 100)
with self._stations_lock:
self._heating_stations[station_id].heating_progress = progress
self._update_data_status(f"加热台{station_id}加热中: {progress:.1f}%")
if elapsed >= self.HEATING_TIME:
break
time.sleep(1.0)
# 加热完成
with self._stations_lock:
self._heating_stations[station_id].state = HeatingStationState.COMPLETED
self._heating_stations[station_id].heating_progress = 100.0
with self._tasks_lock:
if material_id in self._active_tasks:
self._active_tasks[material_id]["status"] = "heating_completed"
self._update_data_status(f"加热台{station_id}加热完成")
self.logger.info(f"[加热台{station_id}] {material_id}加热完成 (用时{self.HEATING_TIME}s)")
return {
"success": True,
"station_id": station_id,
"material_id": material_id,
"material_number": material_number,
"message": f"加热台{station_id}加热完成",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
def move_to_output(
self,
sample_uuids: SampleUUIDsType,
station_id: int,
material_number: int,
) -> MoveToOutputResult:
"""
将物料从加热台移动到输出位置Cn
Args:
station_id: 加热台ID (1-3),从 start_heating 的 handle 传入
material_number: 物料编号,从 start_heating 的 handle 传入,用于确定输出位置 Cn
Returns:
MoveToOutputResult: 包含执行结果
"""
output_number = material_number # 物料编号决定输出位置
if station_id not in self._heating_stations:
return {
"success": False,
"station_id": station_id,
"material_id": "",
"output_position": f"C{output_number}",
"message": f"无效的加热台ID: {station_id}",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
with self._stations_lock:
station = self._heating_stations[station_id]
material_id = station.current_material
if material_id is None:
return {
"success": False,
"station_id": station_id,
"material_id": "",
"output_position": f"C{output_number}",
"message": f"加热台{station_id}上没有物料",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
if station.state != HeatingStationState.COMPLETED:
return {
"success": False,
"station_id": station_id,
"material_id": material_id,
"output_position": f"C{output_number}",
"message": f"加热台{station_id}尚未完成加热 (当前状态: {station.state.value})",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
output_position = f"C{output_number}"
task_desc = f"从加热台{station_id}移动{material_id}{output_position}"
self.logger.info(f"[任务] {task_desc}")
try:
with self._tasks_lock:
if material_id in self._active_tasks:
self._active_tasks[material_id]["status"] = "waiting_for_arm_output"
# 获取机械臂
self._acquire_arm(task_desc)
with self._tasks_lock:
if material_id in self._active_tasks:
self._active_tasks[material_id]["status"] = "arm_moving_to_output"
# 模拟机械臂操作 (3秒)
self.logger.info(f"[{material_id}] 机械臂正在从加热台{station_id}取出并移动到{output_position}...")
time.sleep(self.ARM_OPERATION_TIME)
# 清空加热台
with self._stations_lock:
self._heating_stations[station_id].state = HeatingStationState.IDLE
self._heating_stations[station_id].current_material = None
self._heating_stations[station_id].material_number = None
self._heating_stations[station_id].heating_progress = 0.0
self._heating_stations[station_id].heating_start_time = None
# 释放机械臂
self._release_arm()
# 任务完成
with self._tasks_lock:
if material_id in self._active_tasks:
self._active_tasks[material_id]["status"] = "completed"
self._active_tasks[material_id]["end_time"] = time.time()
self._update_data_status(f"{material_id}已移动到{output_position}")
self.logger.info(f"[{material_id}] 已成功移动到{output_position} (用时{self.ARM_OPERATION_TIME}s)")
return {
"success": True,
"station_id": station_id,
"material_id": material_id,
"output_position": output_position,
"message": f"{material_id}已成功移动到{output_position}",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
except Exception as e:
self.logger.error(f"移动到输出位置失败: {str(e)}")
if self._arm_lock.locked():
self._release_arm()
return {
"success": False,
"station_id": station_id,
"material_id": "",
"output_position": output_position,
"message": f"移动失败: {str(e)}",
"unilabos_samples": [
LabSample(sample_uuid=sample_uuid, oss_path="", extra={"material_uuid": content} if isinstance(content, str) else content.serialize()) for
sample_uuid, content in sample_uuids.items()]
}
# ============ 状态属性 ============
@property
def status(self) -> str:
return self.data.get("status", "Unknown")
@property
def arm_state(self) -> str:
return self._arm_state.value
@property
def arm_current_task(self) -> str:
return self._arm_current_task or ""
@property
def heating_station_1_state(self) -> str:
with self._stations_lock:
station = self._heating_stations.get(1)
return station.state.value if station else "unknown"
@property
def heating_station_1_material(self) -> str:
with self._stations_lock:
station = self._heating_stations.get(1)
return station.current_material or "" if station else ""
@property
def heating_station_1_progress(self) -> float:
with self._stations_lock:
station = self._heating_stations.get(1)
return station.heating_progress if station else 0.0
@property
def heating_station_2_state(self) -> str:
with self._stations_lock:
station = self._heating_stations.get(2)
return station.state.value if station else "unknown"
@property
def heating_station_2_material(self) -> str:
with self._stations_lock:
station = self._heating_stations.get(2)
return station.current_material or "" if station else ""
@property
def heating_station_2_progress(self) -> float:
with self._stations_lock:
station = self._heating_stations.get(2)
return station.heating_progress if station else 0.0
@property
def heating_station_3_state(self) -> str:
with self._stations_lock:
station = self._heating_stations.get(3)
return station.state.value if station else "unknown"
@property
def heating_station_3_material(self) -> str:
with self._stations_lock:
station = self._heating_stations.get(3)
return station.current_material or "" if station else ""
@property
def heating_station_3_progress(self) -> float:
with self._stations_lock:
station = self._heating_stations.get(3)
return station.heating_progress if station else 0.0
@property
def active_tasks_count(self) -> int:
with self._tasks_lock:
return len(self._active_tasks)
@property
def message(self) -> str:
return self.data.get("message", "")

View File

@@ -1,5 +1,9 @@
# 工作站抽象基类物料系统架构说明 # 工作站抽象基类物料系统架构说明
## 设计理念
基于用户需求"请你帮我系统思考一下,工作站抽象基类的物料系统基类该如何构建",我们最终确定了一个**PyLabRobot Deck为中心**的简化架构。
### 核心原则 ### 核心原则
1. **PyLabRobot为物料管理核心**使用PyLabRobot的Deck系统作为物料管理的基础利用其成熟的Resource体系 1. **PyLabRobot为物料管理核心**使用PyLabRobot的Deck系统作为物料管理的基础利用其成熟的Resource体系

View File

@@ -0,0 +1,12 @@
import pubchempy as pcp
cas = "21324-40-3" # 示例
comps = pcp.get_compounds(cas, namespace="name")
if not comps:
raise ValueError("No hit")
c = comps[0]
print("Canonical SMILES:", c.canonical_smiles)
print("Isomeric SMILES:", c.isomeric_smiles)
print("MW:", c.molecular_weight)

View File

@@ -0,0 +1,7 @@
material_name
LiPF6
LiDFOB
DTD
LiFSI
LiPO2F2
1 material_name
2 LiPF6
3 LiDFOB
4 DTD
5 LiFSI
6 LiPO2F2

View File

@@ -1,157 +0,0 @@
# 批量出库 Excel 模板使用说明
**文件**: `outbound_template.xlsx`
**用途**: 配合 `auto_batch_outbound_from_xlsx()` 方法进行批量出库操作
**API 端点**: `/api/lims/storage/auto-batch-out-bound`
---
## 📋 Excel 列说明
| 列名 | 说明 | 示例 | 必填 |
|------|------|------|------|
| `locationId` | **库位 IDUUID** | `3a19da43-57b5-294f-d663-154a1cc32270` | ✅ 是 |
| `warehouseId` | **仓库 ID 或名称** | `配液站内试剂仓库` | ✅ 是 |
| `quantity` | **出库数量** | `1.0`, `2.0` | ✅ 是 |
| `x` | **X 坐标(库位横向位置)** | `1`, `2`, `3` | ✅ 是 |
| `y` | **Y 坐标(库位纵向位置)** | `1`, `2`, `3` | ✅ 是 |
| `z` | **Z 坐标(库位层数/高度)** | `1`, `2`, `3` | ✅ 是 |
| `备注说明` | 可选备注信息 | `配液站内试剂仓库-A01` | ❌ 否 |
### 📐 坐标说明
**x, y, z** 是库位在仓库内的**三维坐标**
```
仓库(例如 WH4
├── Z=1第1层/加样头面)
│ ├── X=1, Y=1位置 A
│ ├── X=2, Y=1位置 B
│ ├── X=3, Y=1位置 C
│ └── ...
└── Z=2第2层/原液瓶面)
├── X=1, Y=1位置 A
├── X=2, Y=1位置 B
└── ...
```
- **warehouseId**: 指定哪个仓库WH3, WH4, 配液站等)
- **x, y, z**: 在该仓库内的三维坐标
- **locationId**: 该坐标位置的唯一 UUID
### 🎯 起点与终点
**重要说明**:批量出库模板**只规定了出库的"起点"**(从哪里取物料),**没有指定"终点"**(放到哪里)。
```
出库流程:
起点Excel 指定) → 终点LIMS/工作流决定)
locationId, x, y, z → 由 LIMS 系统或当前工作流自动分配
```
**终点由以下方式确定:**
- **LIMS 系统自动分配**:根据当前任务自动规划目标位置
- **工作流预定义**:在创建出库任务时已绑定目标位置
- **暂存区**:默认放到出库暂存区,等待下一步操作
💡 **对比**:上料操作(`auto_feeding4to3`)则有 `targetWH` 参数可以指定目标仓库
---
## 🔍 如何获取 UUID
### 方法 1从配置文件获取
参考 `yibin_electrolyte_config.json` 中的 `warehouse_mapping`
```json
{
"warehouse_mapping": {
"配液站内试剂仓库": {
"site_uuids": {
"A01": "3a19da43-57b5-294f-d663-154a1cc32270",
"B01": "3a19da43-57b5-7394-5f49-54efe2c9bef2",
"C01": "3a19da43-57b5-5e75-552f-8dbd0ad1075f"
}
},
"手动堆栈": {
"site_uuids": {
"A01": "3a19deae-2c7a-36f5-5e41-02c5b66feaea",
"A02": "3a19deae-2c7a-dc6d-c41e-ef285d946cfe"
}
}
}
}
```
### 方法 2通过 API 查询
```python
material_info = hardware_interface.material_id_query(workflow_id)
locations = material_info.get("locations", [])
```
---
## 📝 填写示例
### 示例 1从配液站内试剂仓库出库
| locationId | warehouseId | quantity | x | y | z | 备注说明 |
|------------|-------------|----------|---|---|---|----------|
| `3a19da43-57b5-294f-d663-154a1cc32270` | 配液站内试剂仓库 | 1 | 1 | 1 | 1 | A01 位置 |
| `3a19da43-57b5-7394-5f49-54efe2c9bef2` | 配液站内试剂仓库 | 2 | 2 | 1 | 1 | B01 位置 |
### 示例 2从手动堆栈出库
| locationId | warehouseId | quantity | x | y | z | 备注说明 |
|------------|-------------|----------|---|---|---|----------|
| `3a19deae-2c7a-36f5-5e41-02c5b66feaea` | 手动堆栈 | 1 | 1 | 1 | 1 | A01 |
| `3a19deae-2c7a-dc6d-c41e-ef285d946cfe` | 手动堆栈 | 1 | 1 | 2 | 1 | A02 |
---
## 💻 使用方法
```python
from bioyond_cell_workstation import BioyondCellWorkstation
# 初始化工作站
workstation = BioyondCellWorkstation(config=config, deck=deck)
# 调用批量出库方法
result = workstation.auto_batch_outbound_from_xlsx(
xlsx_path="outbound_template.xlsx"
)
```
---
## ⚠️ 注意事项
1. **locationId 必须是有效的 UUID**,不能使用库位名称
2. **x, y, z 坐标必须与 locationId 对应**,表示该库位在仓库内的位置
3. **quantity 必须是数字**,可以是整数或浮点数
4. Excel 文件必须包含表头行
5. 空行会被自动跳过
6. 确保 UUID 与实际库位对应,否则 API 会报错
---
## 📚 相关文件
- **配置文件**: `yibin_electrolyte_config.json`
- **Python 代码**: `bioyond_cell_workstation.py` (L630-695)
- **生成脚本**: `create_outbound_template.py`
- **上料模板**: `material_template.xlsx`
---
## 🔄 重新生成模板
```bash
conda activate newunilab
python create_outbound_template.py
```

View File

@@ -49,14 +49,6 @@ class BioyondV1RPC(BaseRequest):
self.config = config self.config = config
self.api_key = config["api_key"] self.api_key = config["api_key"]
self.host = config["api_host"] self.host = config["api_host"]
# 初始化 location_mapping
# 直接从 warehouse_mapping 构建,确保数据源所谓的单一和结构化
self.location_mapping = {}
warehouse_mapping = self.config.get("warehouse_mapping", {})
for warehouse_name, warehouse_config in warehouse_mapping.items():
if "site_uuids" in warehouse_config:
self.location_mapping.update(warehouse_config["site_uuids"])
self._logger = SimpleLogger() self._logger = SimpleLogger()
self.material_cache = {} self.material_cache = {}
self._load_material_cache() self._load_material_cache()
@@ -184,40 +176,7 @@ class BioyondV1RPC(BaseRequest):
return {} return {}
print(f"add material data: {response['data']}") print(f"add material data: {response['data']}")
return response.get("data", {})
# 自动更新缓存
data = response.get("data", {})
if data:
if isinstance(data, str):
# 如果返回的是字符串通常是ID
mat_id = data
name = params.get("name")
else:
# 如果返回的是字典尝试获取name和id
name = data.get("name") or params.get("name")
mat_id = data.get("id")
if name and mat_id:
self.material_cache[name] = mat_id
print(f"已自动更新缓存: {name} -> {mat_id}")
# 处理返回数据中的 details (如果有)
# 有些 API 返回结构可能直接包含 details或者在 data 字段中
details = data.get("details", []) if isinstance(data, dict) else []
if not details and isinstance(data, dict):
details = data.get("detail", [])
if details:
for detail in details:
d_name = detail.get("name")
# 尝试从不同字段获取 ID
d_id = detail.get("id") or detail.get("detailMaterialId")
if d_name and d_id:
self.material_cache[d_name] = d_id
print(f"已自动更新 detail 缓存: {d_name} -> {d_id}")
return data
def query_matial_type_id(self, data) -> list: def query_matial_type_id(self, data) -> list:
"""查找物料typeid""" """查找物料typeid"""
@@ -244,7 +203,7 @@ class BioyondV1RPC(BaseRequest):
params={ params={
"apiKey": self.api_key, "apiKey": self.api_key,
"requestTime": self.get_current_time_iso8601(), "requestTime": self.get_current_time_iso8601(),
"data": 0, "data": {},
}) })
if not response or response['code'] != 1: if not response or response['code'] != 1:
return [] return []
@@ -314,19 +273,12 @@ class BioyondV1RPC(BaseRequest):
if not response or response['code'] != 1: if not response or response['code'] != 1:
return {} return {}
# 自动更新缓存 - 移除被删除的物料
for name, mid in list(self.material_cache.items()):
if mid == material_id:
del self.material_cache[name]
print(f"已从缓存移除物料: {name}")
break
return response.get("data", {}) return response.get("data", {})
def material_outbound(self, material_id: str, location_name: str, quantity: int) -> dict: def material_outbound(self, material_id: str, location_name: str, quantity: int) -> dict:
"""指定库位出库物料(通过库位名称)""" """指定库位出库物料(通过库位名称)"""
location_id = self.location_mapping.get(location_name, location_name) # location_name 参数实际上应该直接是 location_id (UUID)
location_id = location_name
params = { params = {
"materialId": material_id, "materialId": material_id,
@@ -1152,10 +1104,6 @@ class BioyondV1RPC(BaseRequest):
for detail_material in detail_materials: for detail_material in detail_materials:
detail_name = detail_material.get("name") detail_name = detail_material.get("name")
detail_id = detail_material.get("detailMaterialId") detail_id = detail_material.get("detailMaterialId")
if not detail_id:
# 尝试其他可能的字段
detail_id = detail_material.get("id")
if detail_name and detail_id: if detail_name and detail_id:
self.material_cache[detail_name] = detail_id self.material_cache[detail_name] = detail_id
print(f"加载detail材料: {detail_name} -> ID: {detail_id}") print(f"加载detail材料: {detail_name} -> ID: {detail_id}")
@@ -1176,14 +1124,6 @@ class BioyondV1RPC(BaseRequest):
print(f"从缓存找到材料: {material_name_or_id} -> ID: {material_id}") print(f"从缓存找到材料: {material_name_or_id} -> ID: {material_id}")
return material_id return material_id
# 如果缓存中没有,尝试刷新缓存
print(f"缓存中未找到材料 '{material_name_or_id}',尝试刷新缓存...")
self.refresh_material_cache()
if material_name_or_id in self.material_cache:
material_id = self.material_cache[material_name_or_id]
print(f"刷新缓存后找到材料: {material_name_or_id} -> ID: {material_id}")
return material_id
print(f"警告: 未在缓存中找到材料名称 '{material_name_or_id}',将使用原值") print(f"警告: 未在缓存中找到材料名称 '{material_name_or_id}',将使用原值")
return material_name_or_id return material_name_or_id

View File

@@ -1,329 +0,0 @@
# config.py
"""
Bioyond工作站配置文件
包含API配置、工作流映射、物料类型映射、仓库库位映射等所有配置信息
"""
from unilabos.resources.bioyond.decks import BIOYOND_PolymerReactionStation_Deck
# ============================================================================
# 基础配置
# ============================================================================
# API配置
API_CONFIG = {
"api_key": "DE9BDDA0",
"api_host": "http://192.168.1.200:44402"
}
# HTTP 报送服务配置
HTTP_SERVICE_CONFIG = {
"http_service_host": "127.0.0.1", # 监听地址
"http_service_port": 8080, # 监听端口
}
# Deck配置 - 反应站工作台配置
DECK_CONFIG = BIOYOND_PolymerReactionStation_Deck(setup=True)
# ============================================================================
# 工作流配置
# ============================================================================
# 工作流ID映射
WORKFLOW_MAPPINGS = {
"reactor_taken_out": "3a16081e-4788-ca37-eff4-ceed8d7019d1",
"reactor_taken_in": "3a160df6-76b3-0957-9eb0-cb496d5721c6",
"Solid_feeding_vials": "3a160877-87e7-7699-7bc6-ec72b05eb5e6",
"Liquid_feeding_vials(non-titration)": "3a167d99-6158-c6f0-15b5-eb030f7d8e47",
"Liquid_feeding_solvents": "3a160824-0665-01ed-285a-51ef817a9046",
"Liquid_feeding(titration)": "3a16082a-96ac-0449-446a-4ed39f3365b6",
"liquid_feeding_beaker": "3a16087e-124f-8ddb-8ec1-c2dff09ca784",
"Drip_back": "3a162cf9-6aac-565a-ddd7-682ba1796a4a",
}
# 工作流名称到显示名称的映射
WORKFLOW_TO_SECTION_MAP = {
'reactor_taken_in': '反应器放入',
'reactor_taken_out': '反应器取出',
'Solid_feeding_vials': '固体投料-小瓶',
'Liquid_feeding_vials(non-titration)': '液体投料-小瓶(非滴定)',
'Liquid_feeding_solvents': '液体投料-溶剂',
'Liquid_feeding(titration)': '液体投料-滴定',
'liquid_feeding_beaker': '液体投料-烧杯',
'Drip_back': '液体回滴'
}
# 工作流步骤ID配置
WORKFLOW_STEP_IDS = {
"reactor_taken_in": {
"config": "60a06f85-c5b3-29eb-180f-4f62dd7e2154"
},
"liquid_feeding_beaker": {
"liquid": "6808cda7-fee7-4092-97f0-5f9c2ffa60e3",
"observe": "1753c0de-dffc-4ee6-8458-805a2e227362"
},
"liquid_feeding_vials_non_titration": {
"liquid": "62ea6e95-3d5d-43db-bc1e-9a1802673861",
"observe": "3a167d99-6172-b67b-5f22-a7892197142e"
},
"liquid_feeding_solvents": {
"liquid": "1fcea355-2545-462b-b727-350b69a313bf",
"observe": "0553dfb3-9ac5-4ace-8e00-2f11029919a8"
},
"solid_feeding_vials": {
"feeding": "f7ae7448-4f20-4c1d-8096-df6fbadd787a",
"observe": "263c7ed5-7277-426b-bdff-d6fbf77bcc05"
},
"liquid_feeding_titration": {
"liquid": "a00ec41b-e666-4422-9c20-bfcd3cd15c54",
"observe": "ac738ff6-4c58-4155-87b1-d6f65a2c9ab5"
},
"drip_back": {
"liquid": "371be86a-ab77-4769-83e5-54580547c48a",
"observe": "ce024b9d-bd20-47b8-9f78-ca5ce7f44cf1"
}
}
# 工作流动作名称配置
ACTION_NAMES = {
"reactor_taken_in": {
"config": "通量-配置",
"stirring": "反应模块-开始搅拌"
},
"solid_feeding_vials": {
"feeding": "粉末加样模块-投料",
"observe": "反应模块-观察搅拌结果"
},
"liquid_feeding_vials_non_titration": {
"liquid": "稀释液瓶加液位-液体投料",
"observe": "反应模块-滴定结果观察"
},
"liquid_feeding_solvents": {
"liquid": "试剂AB放置位-试剂吸液分液",
"observe": "反应模块-观察搅拌结果"
},
"liquid_feeding_titration": {
"liquid": "稀释液瓶加液位-稀释液吸液分液",
"observe": "反应模块-滴定结果观察"
},
"liquid_feeding_beaker": {
"liquid": "烧杯溶液放置位-烧杯吸液分液",
"observe": "反应模块-观察搅拌结果"
},
"drip_back": {
"liquid": "试剂AB放置位-试剂吸液分液",
"observe": "反应模块-向下滴定结果观察"
}
}
# ============================================================================
# 仓库配置
# ============================================================================
# 说明:
# - 出库和入库操作都需要UUID
WAREHOUSE_MAPPING = {
# ========== 反应站仓库 ==========
# 堆栈1左 - 反应站左侧堆栈 (4行×4列=16个库位, A01D04)
"堆栈1左": {
"uuid": "3a14aa17-0d49-dce4-486e-4b5c85c8b366",
"site_uuids": {
"A01": "3a14aa17-0d49-11d7-a6e1-f236b3e5e5a3",
"A02": "3a14aa17-0d49-4bc5-8836-517b75473f5f",
"A03": "3a14aa17-0d49-c2bc-6222-5cee8d2d94f8",
"A04": "3a14aa17-0d49-3ce2-8e9a-008c38d116fb",
"B01": "3a14aa17-0d49-f49c-6b66-b27f185a3b32",
"B02": "3a14aa17-0d49-cf46-df85-a979c9c9920c",
"B03": "3a14aa17-0d49-7698-4a23-f7ffb7d48ba3",
"B04": "3a14aa17-0d49-1231-99be-d5870e6478e9",
"C01": "3a14aa17-0d49-be34-6fae-4aed9d48b70b",
"C02": "3a14aa17-0d49-11d7-0897-34921dcf6b7c",
"C03": "3a14aa17-0d49-9840-0bd5-9c63c1bb2c29",
"C04": "3a14aa17-0d49-8335-3bff-01da69ea4911",
"D01": "3a14aa17-0d49-2bea-c8e5-2b32094935d5",
"D02": "3a14aa17-0d49-cff4-e9e8-5f5f0bc1ef32",
"D03": "3a14aa17-0d49-4948-cb0a-78f30d1ca9b8",
"D04": "3a14aa17-0d49-fd2f-9dfb-a29b11e84099",
},
},
# 堆栈1右 - 反应站右侧堆栈 (4行×4列=16个库位, A05D08)
"堆栈1右": {
"uuid": "3a14aa17-0d49-dce4-486e-4b5c85c8b366",
"site_uuids": {
"A05": "3a14aa17-0d49-2c61-edc8-72a8ca7192dd",
"A06": "3a14aa17-0d49-60c8-2b00-40b17198f397",
"A07": "3a14aa17-0d49-ec5b-0b75-634dce8eed25",
"A08": "3a14aa17-0d49-3ec9-55b3-f3189c4ec53d",
"B05": "3a14aa17-0d49-6a4e-abcf-4c113eaaeaad",
"B06": "3a14aa17-0d49-e3f6-2dd6-28c2e8194fbe",
"B07": "3a14aa17-0d49-11a6-b861-ee895121bf52",
"B08": "3a14aa17-0d49-9c7d-1145-d554a6e482f0",
"C05": "3a14aa17-0d49-45c4-7a34-5105bc3e2368",
"C06": "3a14aa17-0d49-867e-39ab-31b3fe9014be",
"C07": "3a14aa17-0d49-ec56-c4b4-39fd9b2131e7",
"C08": "3a14aa17-0d49-1128-d7d9-ffb1231c98c0",
"D05": "3a14aa17-0d49-e843-f961-ea173326a14b",
"D06": "3a14aa17-0d49-4d26-a985-f188359c4f8b",
"D07": "3a14aa17-0d49-223a-b520-bc092bb42fe0",
"D08": "3a14aa17-0d49-4fa3-401a-6a444e1cca22",
},
},
# 站内试剂存放堆栈
"站内试剂存放堆栈": {
"uuid": "3a14aa3b-9fab-9d8e-d1a7-828f01f51f0c",
"site_uuids": {
"A01": "3a14aa3b-9fab-adac-7b9c-e1ee446b51d5",
"A02": "3a14aa3b-9fab-ca72-febc-b7c304476c78"
}
},
# 测量小瓶仓库(测密度)
"测量小瓶仓库": {
"uuid": "3a15012f-705b-c0de-3f9e-950c205f9921",
"site_uuids": {
"A01": "3a15012f-705e-0524-3161-c523b5aebc97",
"A02": "3a15012f-705e-7cd1-32ab-ad4fd1ab75c8",
"A03": "3a15012f-705e-a5d6-edac-bdbfec236260",
"B01": "3a15012f-705e-e0ee-80e0-10a6b3fc500d",
"B02": "3a15012f-705e-e499-180d-de06d60d0b21",
"B03": "3a15012f-705e-eff6-63f1-09f742096b26"
}
},
# 站内Tip盒堆栈 - 用于存放枪头盒 (耗材)
"站内Tip盒堆栈": {
"uuid": "3a14aa3a-2d3c-b5c1-9ddf-7c4a957d459a",
"site_uuids": {
"A01": "3a14aa3a-2d3d-e700-411a-0ddf85e1f18a",
"A02": "3a14aa3a-2d3d-a7ce-099a-d5632fdafa24",
"A03": "3a14aa3a-2d3d-bdf6-a702-c60b38b08501",
"B01": "3a14aa3a-2d3d-d704-f076-2a8d5bc72cb8",
"B02": "3a14aa3a-2d3d-c350-2526-0778d173a5ac",
"B03": "3a14aa3a-2d3d-bc38-b356-f0de2e44e0c7"
}
},
# ========== 配液站仓库 ==========
"粉末堆栈": {
"uuid": "3a14198e-6928-121f-7ca6-88ad3ae7e6a0",
"site_uuids": {
"A01": "3a14198e-6929-31f0-8a22-0f98f72260df",
"A02": "3a14198e-6929-4379-affa-9a2935c17f99",
"A03": "3a14198e-6929-56da-9a1c-7f5fbd4ae8af",
"A04": "3a14198e-6929-5e99-2b79-80720f7cfb54",
"B01": "3a14198e-6929-f525-9a1b-1857552b28ee",
"B02": "3a14198e-6929-bf98-0fd5-26e1d68bf62d",
"B03": "3a14198e-6929-2d86-a468-602175a2b5aa",
"B04": "3a14198e-6929-1a98-ae57-e97660c489ad",
"C01": "3a14198e-6929-46fe-841e-03dd753f1e4a",
"C02": "3a14198e-6929-72ac-32ce-9b50245682b8",
"C03": "3a14198e-6929-8a0b-b686-6f4a2955c4e2",
"C04": "3a14198e-6929-a0ec-5f15-c0f9f339f963",
"D01": "3a14198e-6929-1bc9-a9bd-3b7ca66e7f95",
"D02": "3a14198e-6929-3bd8-e6c7-4a9fd93be118",
"D03": "3a14198e-6929-dde1-fc78-34a84b71afdf",
"D04": "3a14198e-6929-7ac8-915a-fea51cb2e884"
}
},
"溶液堆栈": {
"uuid": "3a14198e-d723-2c13-7d12-50143e190a23",
"site_uuids": {
"A01": "3a14198e-d724-e036-afdc-2ae39a7f3383",
"A02": "3a14198e-d724-d818-6d4f-5725191a24b5",
"A03": "3a14198e-d724-b5bb-adf3-4c5a0da6fb31",
"A04": "3a14198e-d724-d378-d266-2508a224a19f",
"B01": "3a14198e-d724-afa4-fc82-0ac8a9016791",
"B02": "3a14198e-d724-be8a-5e0b-012675e195c6",
"B03": "3a14198e-d724-ab4e-48cb-817c3c146707",
"B04": "3a14198e-d724-f56e-468b-0110a8feb36a",
"C01": "3a14198e-d724-ca48-bb9e-7e85751e55b6",
"C02": "3a14198e-d724-cc1e-5c2c-228a130f40a8",
"C03": "3a14198e-d724-7f18-1853-39d0c62e1d33",
"C04": "3a14198e-d724-0cf1-dea9-a1f40fe7e13c",
"D01": "3a14198e-d724-df6d-5e32-5483b3cab583",
"D02": "3a14198e-d724-1e28-c885-574c3df468d0",
"D03": "3a14198e-d724-28a2-a760-baa896f46b66",
"D04": "3a14198e-d724-0ddd-9654-f9352a421de9"
}
},
"试剂堆栈": {
"uuid": "3a14198c-c2cc-0290-e086-44a428fba248",
"site_uuids": {
"A01": "3a14198c-c2cf-8b40-af28-b467808f1c36", # x=1, y=1, code=0001-0001
"A02": "3a14198c-c2d0-dc7d-b8d0-e1d88cee3094", # x=1, y=2, code=0001-0002
"A03": "3a14198c-c2d0-354f-39ad-642e1a72fcb8", # x=1, y=3, code=0001-0003
"A04": "3a14198c-c2d0-725e-523d-34c037ac2440", # x=1, y=4, code=0001-0004
"B01": "3a14198c-c2d0-f3e7-871a-e470d144296f", # x=2, y=1, code=0001-0005
"B02": "3a14198c-c2d0-2070-efc8-44e245f10c6f", # x=2, y=2, code=0001-0006
"B03": "3a14198c-c2d0-1559-105d-0ea30682cab4", # x=2, y=3, code=0001-0007
"B04": "3a14198c-c2d0-efce-0939-69ca5a7dfd39" # x=2, y=4, code=0001-0008
}
}
}
# ============================================================================
# 物料类型配置
# ============================================================================
# 说明:
# - 格式: PyLabRobot资源类型名称 → Bioyond系统typeId的UUID
# - 这个映射基于 resource.model 属性 (不是显示名称!)
# - UUID为空表示该类型暂未在Bioyond系统中定义
MATERIAL_TYPE_MAPPINGS = {
# ================================================配液站资源============================================================
# ==================================================样品===============================================================
"BIOYOND_PolymerStation_1FlaskCarrier": ("烧杯", "3a14196b-24f2-ca49-9081-0cab8021bf1a"), # 配液站-样品-烧杯
"BIOYOND_PolymerStation_1BottleCarrier": ("试剂瓶", "3a14196b-8bcf-a460-4f74-23f21ca79e72"), # 配液站-样品-试剂瓶
"BIOYOND_PolymerStation_6StockCarrier": ("分装板", "3a14196e-5dfe-6e21-0c79-fe2036d052c4"), # 配液站-样品-分装板
"BIOYOND_PolymerStation_Liquid_Vial": ("10%分装小瓶", "3a14196c-76be-2279-4e22-7310d69aed68"), # 配液站-样品-分装板-第一排小瓶
"BIOYOND_PolymerStation_Solid_Vial": ("90%分装小瓶", "3a14196c-cdcf-088d-dc7d-5cf38f0ad9ea"), # 配液站-样品-分装板-第二排小瓶
# ==================================================试剂===============================================================
"BIOYOND_PolymerStation_8StockCarrier": ("样品板", "3a14196e-b7a0-a5da-1931-35f3000281e9"), # 配液站-试剂-样品板8孔
"BIOYOND_PolymerStation_Solid_Stock": ("样品瓶", "3a14196a-cf7d-8aea-48d8-b9662c7dba94"), # 配液站-试剂-样品板-样品瓶
}
# ============================================================================
# 动态生成的库位UUID映射从WAREHOUSE_MAPPING中提取
# ============================================================================
LOCATION_MAPPING = {}
for warehouse_name, warehouse_config in WAREHOUSE_MAPPING.items():
if "site_uuids" in warehouse_config:
LOCATION_MAPPING.update(warehouse_config["site_uuids"])
# ============================================================================
# 物料默认参数配置
# ============================================================================
# 说明:
# - 为特定物料名称自动添加默认参数(如密度、分子量、单位等)
# - 格式: 物料名称 → {参数字典}
# - 在创建或更新物料时,会自动合并这些参数到 Parameters 字段
# - unit: 物料的计量单位(会用于 unit 字段)
# - density/densityUnit: 密度信息(会添加到 Parameters 中)
MATERIAL_DEFAULT_PARAMETERS = {
# 溶剂类
"NMP": {
"unit": "毫升",
"density": "1.03",
"densityUnit": "g/mL",
"description": "N-甲基吡咯烷酮 (N-Methyl-2-pyrrolidone)"
},
# 可以继续添加其他物料...
}
# ============================================================================
# 物料类型默认参数配置
# ============================================================================
# 说明:
# - 为特定物料类型UUID自动添加默认参数
# - 格式: Bioyond类型UUID → {参数字典}
# - 优先级低于按名称匹配的配置
MATERIAL_TYPE_PARAMETERS = {
# 示例:
# "3a14196b-24f2-ca49-9081-0cab8021bf1a": { # 烧杯
# "unit": "个"
# }
}

View File

@@ -4,8 +4,7 @@ import time
from typing import Optional, Dict, Any, List from typing import Optional, Dict, Any, List
from typing_extensions import TypedDict from typing_extensions import TypedDict
import requests import requests
import pint from unilabos.devices.workstation.bioyond_studio.config import API_CONFIG
from unilabos.devices.workstation.bioyond_studio.bioyond_rpc import BioyondException from unilabos.devices.workstation.bioyond_studio.bioyond_rpc import BioyondException
from unilabos.devices.workstation.bioyond_studio.station import BioyondWorkstation from unilabos.devices.workstation.bioyond_studio.station import BioyondWorkstation
@@ -26,89 +25,13 @@ class ComputeExperimentDesignReturn(TypedDict):
class BioyondDispensingStation(BioyondWorkstation): class BioyondDispensingStation(BioyondWorkstation):
def __init__( def __init__(
self, self,
config: dict = None, config,
deck=None, # 桌子
protocol_type=None, deck,
*args,
**kwargs, **kwargs,
): ):
"""初始化配液站 super().__init__(config, deck, *args, **kwargs)
Args:
config: 配置字典,应包含material_type_mappings等配置
deck: Deck对象
protocol_type: 协议类型(由ROS系统传递,此处忽略)
**kwargs: 其他可能的参数
"""
if config is None:
config = {}
# 将 kwargs 合并到 config 中 (处理扁平化配置如 api_key)
config.update(kwargs)
if deck is None and config:
deck = config.get('deck')
# 🔧 修复: 确保 Deck 上的 warehouses 具有正确的 UUID (必须在 super().__init__ 之前执行,因为父类会触发同步)
# 从配置中读取 warehouse_mapping并应用到实际的 deck 资源上
if config and "warehouse_mapping" in config and deck:
warehouse_mapping = config["warehouse_mapping"]
print(f"正在根据配置更新 Deck warehouse UUIDs... (共有 {len(warehouse_mapping)} 个配置)")
user_deck = deck
# 初始化 warehouses 字典
if not hasattr(user_deck, "warehouses") or user_deck.warehouses is None:
user_deck.warehouses = {}
# 1. 尝试从 children 中查找匹配的资源
for child in user_deck.children:
# 简单判断: 如果名字在 mapping 中,就认为是 warehouse
if child.name in warehouse_mapping:
user_deck.warehouses[child.name] = child
print(f" - 从子资源中找到 warehouse: {child.name}")
# 2. 如果还是没找到,且 Deck 类有 setup 方法,尝试调用 setup (针对 Deck 对象正确但未初始化的情况)
if not user_deck.warehouses and hasattr(user_deck, "setup"):
print(" - 尝试调用 deck.setup() 初始化仓库...")
try:
user_deck.setup()
# setup 后重新检查
if hasattr(user_deck, "warehouses") and user_deck.warehouses:
print(f" - setup() 成功,找到 {len(user_deck.warehouses)} 个仓库")
except Exception as e:
print(f" - 调用 setup() 失败: {e}")
# 3. 如果仍然为空,可能需要手动创建 (仅针对特定已知的 Deck 类型进行补救,这里暂时只打印警告)
if not user_deck.warehouses:
print(" - ⚠️ 仍然无法找到任何 warehouse 资源!")
for wh_name, wh_config in warehouse_mapping.items():
target_uuid = wh_config.get("uuid")
# 尝试在 deck.warehouses 中查找
wh_resource = None
if hasattr(user_deck, "warehouses") and wh_name in user_deck.warehouses:
wh_resource = user_deck.warehouses[wh_name]
# 如果没找到,尝试在所有子资源中查找
if not wh_resource:
wh_resource = user_deck.get_resource(wh_name)
if wh_resource:
if target_uuid:
current_uuid = getattr(wh_resource, "uuid", None)
print(f"✅ 更新仓库 '{wh_name}' UUID: {current_uuid} -> {target_uuid}")
# 动态添加 uuid 属性
wh_resource.uuid = target_uuid
# 同时也确保 category 正确,避免 graphio 识别错误
# wh_resource.category = "warehouse"
else:
print(f"⚠️ 仓库 '{wh_name}' 在配置中没有 UUID")
else:
print(f"❌ 在 Deck 中未找到配置的仓库: '{wh_name}'")
super().__init__(bioyond_config=config, deck=deck)
# self.config = config # self.config = config
# self.api_key = config["api_key"] # self.api_key = config["api_key"]
# self.host = config["api_host"] # self.host = config["api_host"]
@@ -120,41 +43,6 @@ class BioyondDispensingStation(BioyondWorkstation):
# 用于跟踪任务完成状态的字典: {orderCode: {status, order_id, timestamp}} # 用于跟踪任务完成状态的字典: {orderCode: {status, order_id, timestamp}}
self.order_completion_status = {} self.order_completion_status = {}
# 初始化 pint 单位注册表
self.ureg = pint.UnitRegistry()
# 化合物信息
self.compound_info = {
"MolWt": {
"MDA": 108.14 * self.ureg.g / self.ureg.mol,
"TDA": 122.16 * self.ureg.g / self.ureg.mol,
"PAPP": 521.62 * self.ureg.g / self.ureg.mol,
"BTDA": 322.23 * self.ureg.g / self.ureg.mol,
"BPDA": 294.22 * self.ureg.g / self.ureg.mol,
"6FAP": 366.26 * self.ureg.g / self.ureg.mol,
"PMDA": 218.12 * self.ureg.g / self.ureg.mol,
"MPDA": 108.14 * self.ureg.g / self.ureg.mol,
"SIDA": 248.51 * self.ureg.g / self.ureg.mol,
"ODA": 200.236 * self.ureg.g / self.ureg.mol,
"4,4'-ODA": 200.236 * self.ureg.g / self.ureg.mol,
"134": 292.34 * self.ureg.g / self.ureg.mol,
},
"FuncGroup": {
"MDA": "Amine",
"TDA": "Amine",
"PAPP": "Amine",
"BTDA": "Anhydride",
"BPDA": "Anhydride",
"6FAP": "Amine",
"MPDA": "Amine",
"SIDA": "Amine",
"PMDA": "Anhydride",
"ODA": "Amine",
"4,4'-ODA": "Amine",
"134": "Amine",
}
}
def _post_project_api(self, endpoint: str, data: Any) -> Dict[str, Any]: def _post_project_api(self, endpoint: str, data: Any) -> Dict[str, Any]:
"""项目接口通用POST调用 """项目接口通用POST调用
@@ -166,7 +54,7 @@ class BioyondDispensingStation(BioyondWorkstation):
dict: 服务端响应失败时返回 {code:0,message,...} dict: 服务端响应失败时返回 {code:0,message,...}
""" """
request_data = { request_data = {
"apiKey": self.bioyond_config["api_key"], "apiKey": API_CONFIG["api_key"],
"requestTime": self.hardware_interface.get_current_time_iso8601(), "requestTime": self.hardware_interface.get_current_time_iso8601(),
"data": data "data": data
} }
@@ -197,7 +85,7 @@ class BioyondDispensingStation(BioyondWorkstation):
dict: 服务端响应失败时返回 {code:0,message,...} dict: 服务端响应失败时返回 {code:0,message,...}
""" """
request_data = { request_data = {
"apiKey": self.bioyond_config["api_key"], "apiKey": API_CONFIG["api_key"],
"requestTime": self.hardware_interface.get_current_time_iso8601(), "requestTime": self.hardware_interface.get_current_time_iso8601(),
"data": data "data": data
} }
@@ -230,22 +118,20 @@ class BioyondDispensingStation(BioyondWorkstation):
ratio = json.loads(ratio) ratio = json.loads(ratio)
except Exception: except Exception:
ratio = {} ratio = {}
root = str(Path(__file__).resolve().parents[3])
if root not in sys.path:
sys.path.append(root)
try:
mod = importlib.import_module("tem.compute")
except Exception as e:
raise BioyondException(f"无法导入计算模块: {e}")
try: try:
wp = float(wt_percent) if isinstance(wt_percent, str) else wt_percent wp = float(wt_percent) if isinstance(wt_percent, str) else wt_percent
mt = float(m_tot) if isinstance(m_tot, str) else m_tot mt = float(m_tot) if isinstance(m_tot, str) else m_tot
tp = float(titration_percent) if isinstance(titration_percent, str) else titration_percent tp = float(titration_percent) if isinstance(titration_percent, str) else titration_percent
except Exception as e: except Exception as e:
raise BioyondException(f"参数解析失败: {e}") raise BioyondException(f"参数解析失败: {e}")
res = mod.generate_experiment_design(ratio=ratio, wt_percent=wp, m_tot=mt, titration_percent=tp)
# 2. 调用内部计算方法
res = self._generate_experiment_design(
ratio=ratio,
wt_percent=wp,
m_tot=mt,
titration_percent=tp
)
# 3. 构造返回结果
out = { out = {
"solutions": res.get("solutions", []), "solutions": res.get("solutions", []),
"titration": res.get("titration", {}), "titration": res.get("titration", {}),
@@ -254,248 +140,11 @@ class BioyondDispensingStation(BioyondWorkstation):
"return_info": json.dumps(res, ensure_ascii=False) "return_info": json.dumps(res, ensure_ascii=False)
} }
return out return out
except BioyondException: except BioyondException:
raise raise
except Exception as e: except Exception as e:
raise BioyondException(str(e)) raise BioyondException(str(e))
def _generate_experiment_design(
self,
ratio: dict,
wt_percent: float = 0.25,
m_tot: float = 70,
titration_percent: float = 0.03,
) -> dict:
"""内部方法:生成实验设计
根据FuncGroup自动区分二胺和二酐每种二胺单独配溶液严格按照ratio顺序投料
参数:
ratio: 化合物配比字典格式: {"compound_name": ratio_value}
wt_percent: 固体重量百分比
m_tot: 反应混合物总质量(g)
titration_percent: 滴定溶液百分比
返回:
包含实验设计详细参数的字典
"""
# 溶剂密度
ρ_solvent = 1.03 * self.ureg.g / self.ureg.ml
# 二酐溶解度
solubility = 0.02 * self.ureg.g / self.ureg.ml
# 投入固体时最小溶剂体积
V_min = 30 * self.ureg.ml
m_tot = m_tot * self.ureg.g
# 保持ratio中的顺序
compound_names = list(ratio.keys())
compound_ratios = list(ratio.values())
# 验证所有化合物是否在 compound_info 中定义
undefined_compounds = [name for name in compound_names if name not in self.compound_info["MolWt"]]
if undefined_compounds:
available = list(self.compound_info["MolWt"].keys())
raise ValueError(
f"以下化合物未在 compound_info 中定义: {undefined_compounds}"
f"可用的化合物: {available}"
)
# 获取各化合物的分子量和官能团类型
molecular_weights = [self.compound_info["MolWt"][name] for name in compound_names]
func_groups = [self.compound_info["FuncGroup"][name] for name in compound_names]
# 记录化合物信息用于调试
self.hardware_interface._logger.info(f"化合物名称: {compound_names}")
self.hardware_interface._logger.info(f"官能团类型: {func_groups}")
# 按原始顺序分离二胺和二酐
ordered_compounds = list(zip(compound_names, compound_ratios, molecular_weights, func_groups))
diamine_compounds = [(name, ratio_val, mw, i) for i, (name, ratio_val, mw, fg) in enumerate(ordered_compounds) if fg == "Amine"]
anhydride_compounds = [(name, ratio_val, mw, i) for i, (name, ratio_val, mw, fg) in enumerate(ordered_compounds) if fg == "Anhydride"]
if not diamine_compounds or not anhydride_compounds:
raise ValueError(
f"需要同时包含二胺(Amine)和二酐(Anhydride)化合物。"
f"当前二胺: {[c[0] for c in diamine_compounds]}, "
f"当前二酐: {[c[0] for c in anhydride_compounds]}"
)
# 计算加权平均分子量 (基于摩尔比)
total_molar_ratio = sum(compound_ratios)
weighted_molecular_weight = sum(ratio_val * mw for ratio_val, mw in zip(compound_ratios, molecular_weights))
# 取最后一个二酐用于滴定
titration_anhydride = anhydride_compounds[-1]
solid_anhydrides = anhydride_compounds[:-1] if len(anhydride_compounds) > 1 else []
# 二胺溶液配制参数 - 每种二胺单独配制
diamine_solutions = []
total_diamine_volume = 0 * self.ureg.ml
# 计算反应物的总摩尔量
n_reactant = m_tot * wt_percent / weighted_molecular_weight
for name, ratio_val, mw, order_index in diamine_compounds:
# 跳过 SIDA
if name == "SIDA":
continue
# 计算该二胺需要的摩尔数
n_diamine_needed = n_reactant * ratio_val
# 二胺溶液配制参数 (每种二胺固定配制参数)
m_diamine_solid = 5.0 * self.ureg.g # 每种二胺固体质量
V_solvent_for_this = 20 * self.ureg.ml # 每种二胺溶剂体积
m_solvent_for_this = ρ_solvent * V_solvent_for_this
# 计算该二胺溶液的浓度
c_diamine = (m_diamine_solid / mw) / V_solvent_for_this
# 计算需要移取的溶液体积
V_diamine_needed = n_diamine_needed / c_diamine
diamine_solutions.append({
"name": name,
"order": order_index,
"solid_mass": m_diamine_solid.magnitude,
"solvent_volume": V_solvent_for_this.magnitude,
"concentration": c_diamine.magnitude,
"volume_needed": V_diamine_needed.magnitude,
"molar_ratio": ratio_val
})
total_diamine_volume += V_diamine_needed
# 按原始顺序排序
diamine_solutions.sort(key=lambda x: x["order"])
# 计算滴定二酐的质量
titration_name, titration_ratio, titration_mw, _ = titration_anhydride
m_titration_anhydride = n_reactant * titration_ratio * titration_mw
m_titration_90 = m_titration_anhydride * (1 - titration_percent)
m_titration_10 = m_titration_anhydride * titration_percent
# 计算其他固体二酐的质量 (按顺序)
solid_anhydride_masses = []
for name, ratio_val, mw, order_index in solid_anhydrides:
mass = n_reactant * ratio_val * mw
solid_anhydride_masses.append({
"name": name,
"order": order_index,
"mass": mass.magnitude,
"molar_ratio": ratio_val
})
# 按原始顺序排序
solid_anhydride_masses.sort(key=lambda x: x["order"])
# 计算溶剂用量
total_diamine_solution_mass = sum(
sol["volume_needed"] * ρ_solvent for sol in diamine_solutions
) * self.ureg.ml
# 预估滴定溶剂量、计算补加溶剂量
m_solvent_titration = m_titration_10 / solubility * ρ_solvent
m_solvent_add = m_tot * (1 - wt_percent) - total_diamine_solution_mass - m_solvent_titration
# 检查最小溶剂体积要求
total_liquid_volume = (total_diamine_solution_mass + m_solvent_add) / ρ_solvent
m_tot_min = V_min / total_liquid_volume * m_tot
# 如果需要,按比例放大
scale_factor = 1.0
if m_tot_min > m_tot:
scale_factor = (m_tot_min / m_tot).magnitude
m_titration_90 *= scale_factor
m_titration_10 *= scale_factor
m_solvent_add *= scale_factor
m_solvent_titration *= scale_factor
# 更新二胺溶液用量
for sol in diamine_solutions:
sol["volume_needed"] *= scale_factor
# 更新固体二酐用量
for anhydride in solid_anhydride_masses:
anhydride["mass"] *= scale_factor
m_tot = m_tot_min
# 生成投料顺序
feeding_order = []
# 1. 固体二酐 (按顺序)
for anhydride in solid_anhydride_masses:
feeding_order.append({
"step": len(feeding_order) + 1,
"type": "solid_anhydride",
"name": anhydride["name"],
"amount": anhydride["mass"],
"order": anhydride["order"]
})
# 2. 二胺溶液 (按顺序)
for sol in diamine_solutions:
feeding_order.append({
"step": len(feeding_order) + 1,
"type": "diamine_solution",
"name": sol["name"],
"amount": sol["volume_needed"],
"order": sol["order"]
})
# 3. 主要二酐粉末
feeding_order.append({
"step": len(feeding_order) + 1,
"type": "main_anhydride",
"name": titration_name,
"amount": m_titration_90.magnitude,
"order": titration_anhydride[3]
})
# 4. 补加溶剂
if m_solvent_add > 0:
feeding_order.append({
"step": len(feeding_order) + 1,
"type": "additional_solvent",
"name": "溶剂",
"amount": m_solvent_add.magnitude,
"order": 999
})
# 5. 滴定二酐溶液
feeding_order.append({
"step": len(feeding_order) + 1,
"type": "titration_anhydride",
"name": f"{titration_name} 滴定液",
"amount": m_titration_10.magnitude,
"titration_solvent": m_solvent_titration.magnitude,
"order": titration_anhydride[3]
})
# 返回实验设计结果
results = {
"total_mass": m_tot.magnitude,
"scale_factor": scale_factor,
"solutions": diamine_solutions,
"solids": solid_anhydride_masses,
"titration": {
"name": titration_name,
"main_portion": m_titration_90.magnitude,
"titration_portion": m_titration_10.magnitude,
"titration_solvent": m_solvent_titration.magnitude,
},
"solvents": {
"additional_solvent": m_solvent_add.magnitude,
"total_liquid_volume": total_liquid_volume.magnitude
},
"feeding_order": feeding_order,
"minimum_required_mass": m_tot_min.magnitude
}
return results
# 90%10%小瓶投料任务创建方法 # 90%10%小瓶投料任务创建方法
def create_90_10_vial_feeding_task(self, def create_90_10_vial_feeding_task(self,
order_name: str = None, order_name: str = None,
@@ -1312,108 +961,6 @@ class BioyondDispensingStation(BioyondWorkstation):
'actualVolume': actual_volume 'actualVolume': actual_volume
} }
def _simplify_report(self, report) -> Dict[str, Any]:
"""简化实验报告,只保留关键信息,去除冗余的工作流参数"""
if not isinstance(report, dict):
return report
data = report.get('data', {})
if not isinstance(data, dict):
return report
# 提取关键信息
simplified = {
'name': data.get('name'),
'code': data.get('code'),
'requester': data.get('requester'),
'workflowName': data.get('workflowName'),
'workflowStep': data.get('workflowStep'),
'requestTime': data.get('requestTime'),
'startPreparationTime': data.get('startPreparationTime'),
'completeTime': data.get('completeTime'),
'useTime': data.get('useTime'),
'status': data.get('status'),
'statusName': data.get('statusName'),
}
# 提取物料信息(简化版)
pre_intakes = data.get('preIntakes', [])
if pre_intakes and isinstance(pre_intakes, list):
first_intake = pre_intakes[0]
sample_materials = first_intake.get('sampleMaterials', [])
# 简化物料信息
simplified_materials = []
for material in sample_materials:
if isinstance(material, dict):
mat_info = {
'materialName': material.get('materialName'),
'materialTypeName': material.get('materialTypeName'),
'materialCode': material.get('materialCode'),
'materialLocation': material.get('materialLocation'),
}
# 解析parameters中的关键信息如密度、加料历史等
params_str = material.get('parameters', '{}')
try:
params = json.loads(params_str) if isinstance(params_str, str) else params_str
if isinstance(params, dict):
# 只保留关键参数
if 'density' in params:
mat_info['density'] = params['density']
if 'feedingHistory' in params:
mat_info['feedingHistory'] = params['feedingHistory']
if 'liquidVolume' in params:
mat_info['liquidVolume'] = params['liquidVolume']
if 'm_diamine_tot' in params:
mat_info['m_diamine_tot'] = params['m_diamine_tot']
if 'wt_diamine' in params:
mat_info['wt_diamine'] = params['wt_diamine']
except:
pass
simplified_materials.append(mat_info)
simplified['sampleMaterials'] = simplified_materials
# 提取extraProperties中的实际值
extra_props = first_intake.get('extraProperties', {})
if isinstance(extra_props, dict):
simplified_extra = {}
for key, value in extra_props.items():
try:
parsed_value = json.loads(value) if isinstance(value, str) else value
simplified_extra[key] = parsed_value
except:
simplified_extra[key] = value
simplified['extraProperties'] = simplified_extra
return {
'data': simplified,
'code': report.get('code'),
'message': report.get('message'),
'timestamp': report.get('timestamp')
}
def scheduler_start(self) -> dict:
"""启动调度器 - 启动Bioyond工作站的任务调度器开始执行队列中的任务
Returns:
dict: 包含return_info的字典return_info为整型(1=成功)
Raises:
BioyondException: 调度器启动失败时抛出异常
"""
result = self.hardware_interface.scheduler_start()
self.hardware_interface._logger.info(f"调度器启动结果: {result}")
if result != 1:
error_msg = "启动调度器失败: 有未处理错误调度无法启动。请检查Bioyond系统状态。"
self.hardware_interface._logger.error(error_msg)
raise BioyondException(error_msg)
return {"return_info": result}
# 等待多个任务完成并获取实验报告 # 等待多个任务完成并获取实验报告
def wait_for_multiple_orders_and_get_reports(self, def wait_for_multiple_orders_and_get_reports(self,
batch_create_result: str = None, batch_create_result: str = None,
@@ -1455,12 +1002,7 @@ class BioyondDispensingStation(BioyondWorkstation):
# 验证batch_create_result参数 # 验证batch_create_result参数
if not batch_create_result or batch_create_result == "": if not batch_create_result or batch_create_result == "":
raise BioyondException( raise BioyondException("batch_create_result参数为空请确保从batch_create节点正确连接handle")
"batch_create_result参数为空请确保:\n"
"1. batch_create节点与wait节点之间正确连接了handle\n"
"2. batch_create节点成功执行并返回了结果\n"
"3. 检查上游batch_create任务是否成功创建了订单"
)
# 解析batch_create_result JSON对象 # 解析batch_create_result JSON对象
try: try:
@@ -1489,17 +1031,7 @@ class BioyondDispensingStation(BioyondWorkstation):
# 验证提取的数据 # 验证提取的数据
if not order_codes: if not order_codes:
self.hardware_interface._logger.error( raise BioyondException("batch_create_result中未找到order_codes字段或为空")
f"batch_create任务未生成任何订单。batch_create_result内容: {batch_create_result}"
)
raise BioyondException(
"batch_create_result中未找到order_codes或为空。\n"
"可能的原因:\n"
"1. batch_create任务执行失败检查任务是否报错\n"
"2. 物料配置问题(如'物料样品板分配失败'\n"
"3. Bioyond系统状态异常\n"
f"请检查batch_create任务的执行结果"
)
if not order_ids: if not order_ids:
raise BioyondException("batch_create_result中未找到order_ids字段或为空") raise BioyondException("batch_create_result中未找到order_ids字段或为空")
@@ -1582,8 +1114,6 @@ class BioyondDispensingStation(BioyondWorkstation):
self.hardware_interface._logger.info( self.hardware_interface._logger.info(
f"成功获取任务 {order_code} 的实验报告" f"成功获取任务 {order_code} 的实验报告"
) )
# 简化报告,去除冗余信息
report = self._simplify_report(report)
reports.append({ reports.append({
"order_code": order_code, "order_code": order_code,
@@ -1758,7 +1288,7 @@ class BioyondDispensingStation(BioyondWorkstation):
f"开始执行批量物料转移: {len(transfer_groups)}组任务 -> {target_device_id}" f"开始执行批量物料转移: {len(transfer_groups)}组任务 -> {target_device_id}"
) )
warehouse_mapping = self.bioyond_config.get("warehouse_mapping", {}) from .config import WAREHOUSE_MAPPING
results = [] results = []
successful_count = 0 successful_count = 0
failed_count = 0 failed_count = 0

View File

@@ -6,7 +6,6 @@ Bioyond Workstation Implementation
""" """
import time import time
import traceback import traceback
import threading
from datetime import datetime from datetime import datetime
from typing import Dict, Any, List, Optional, Union from typing import Dict, Any, List, Optional, Union
import json import json
@@ -28,90 +27,6 @@ from pylabrobot.resources.resource import Resource as ResourcePLR
from unilabos.devices.workstation.workstation_http_service import WorkstationHTTPService from unilabos.devices.workstation.workstation_http_service import WorkstationHTTPService
class ConnectionMonitor:
"""Bioyond连接监控器"""
def __init__(self, workstation, check_interval=30):
self.workstation = workstation
self.check_interval = check_interval
self._running = False
self._thread = None
self._last_status = "unknown"
def start(self):
if self._running:
return
self._running = True
self._thread = threading.Thread(target=self._monitor_loop, daemon=True, name="BioyondConnectionMonitor")
self._thread.start()
logger.info("Bioyond连接监控器已启动")
def stop(self):
self._running = False
if self._thread:
self._thread.join(timeout=2)
logger.info("Bioyond连接监控器已停止")
def _monitor_loop(self):
while self._running:
try:
# 使用 lightweight API 检查连接
# query_matial_type_list 是比较快的查询
start_time = time.time()
result = self.workstation.hardware_interface.material_type_list()
status = "online" if result else "offline"
msg = "Connection established" if status == "online" else "Failed to get material type list"
if status != self._last_status:
logger.info(f"Bioyond连接状态变更: {self._last_status} -> {status}")
self._publish_event(status, msg)
self._last_status = status
# 发布心跳 (可选,或者只在状态变更时发布)
# self._publish_event(status, msg)
except Exception as e:
logger.error(f"Bioyond连接检查异常: {e}")
if self._last_status != "error":
self._publish_event("error", str(e))
self._last_status = "error"
time.sleep(self.check_interval)
def _publish_event(self, status, message):
try:
if hasattr(self.workstation, "_ros_node") and self.workstation._ros_node:
event_data = {
"status": status,
"message": message,
"timestamp": datetime.now().isoformat()
}
# 动态发布消息,需要在 ROS2DeviceNode 中有对应支持
# 这里假设通用事件发布机制,使用 String 类型的 topic
# 话题: /<namespace>/events/device_status
ns = self.workstation._ros_node.namespace
topic = f"{ns}/events/device_status"
# 使用 ROS2DeviceNode 的发布功能
# 如果没有预定义的 publisher需要动态创建
# 注意workstation base node 可能没有自动创建 arbitrary publishers 的机制
# 这里我们先尝试用 String json 发布
# 在 ROS2DeviceNode 中通常需要先 create_publisher
# 为了简单起见,我们检查是否已有 publisher没有则创建
if not hasattr(self.workstation, "_device_status_pub"):
self.workstation._device_status_pub = self.workstation._ros_node.create_publisher(
String, topic, 10
)
self.workstation._device_status_pub.publish(
convert_to_ros_msg(String, json.dumps(event_data, ensure_ascii=False))
)
except Exception as e:
logger.error(f"发布设备状态事件失败: {e}")
class BioyondResourceSynchronizer(ResourceSynchronizer): class BioyondResourceSynchronizer(ResourceSynchronizer):
"""Bioyond资源同步器 """Bioyond资源同步器
@@ -257,8 +172,9 @@ class BioyondResourceSynchronizer(ResourceSynchronizer):
else: else:
logger.info(f"[同步→Bioyond] 物料不存在于 Bioyond将创建新物料并入库") logger.info(f"[同步→Bioyond] 物料不存在于 Bioyond将创建新物料并入库")
# 第1步从配置中获取仓库配置 # 第1步获取仓库配置
warehouse_mapping = self.bioyond_config.get("warehouse_mapping", {}) from .config import WAREHOUSE_MAPPING
warehouse_mapping = WAREHOUSE_MAPPING
# 确定目标仓库名称 # 确定目标仓库名称
parent_name = None parent_name = None
@@ -320,20 +236,14 @@ class BioyondResourceSynchronizer(ResourceSynchronizer):
# 第2步转换为 Bioyond 格式 # 第2步转换为 Bioyond 格式
logger.info(f"[同步→Bioyond] 🔄 转换物料为 Bioyond 格式...") logger.info(f"[同步→Bioyond] 🔄 转换物料为 Bioyond 格式...")
# 从配置中获取物料默认参数 # 导入物料默认参数配置
material_default_params = self.workstation.bioyond_config.get("material_default_parameters", {}) from .config import MATERIAL_DEFAULT_PARAMETERS
material_type_params = self.workstation.bioyond_config.get("material_type_parameters", {})
# 合并参数配置:物料名称参数 + typeId参数转换为 type:<uuid> 格式)
merged_params = material_default_params.copy()
for type_id, params in material_type_params.items():
merged_params[f"type:{type_id}"] = params
bioyond_material = resource_plr_to_bioyond( bioyond_material = resource_plr_to_bioyond(
[resource], [resource],
type_mapping=self.workstation.bioyond_config["material_type_mappings"], type_mapping=self.workstation.bioyond_config["material_type_mappings"],
warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"], warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"],
material_params=merged_params material_params=MATERIAL_DEFAULT_PARAMETERS
)[0] )[0]
logger.info(f"[同步→Bioyond] 🔧 准备覆盖locations字段目标仓库: {parent_name}, 库位: {update_site}, UUID: {target_location_uuid[:8]}...") logger.info(f"[同步→Bioyond] 🔧 准备覆盖locations字段目标仓库: {parent_name}, 库位: {update_site}, UUID: {target_location_uuid[:8]}...")
@@ -556,20 +466,13 @@ class BioyondResourceSynchronizer(ResourceSynchronizer):
return material_bioyond_id return material_bioyond_id
# 转换为 Bioyond 格式 # 转换为 Bioyond 格式
# 从配置中获取物料默认参数 from .config import MATERIAL_DEFAULT_PARAMETERS
material_default_params = self.workstation.bioyond_config.get("material_default_parameters", {})
material_type_params = self.workstation.bioyond_config.get("material_type_parameters", {})
# 合并参数配置:物料名称参数 + typeId参数转换为 type:<uuid> 格式)
merged_params = material_default_params.copy()
for type_id, params in material_type_params.items():
merged_params[f"type:{type_id}"] = params
bioyond_material = resource_plr_to_bioyond( bioyond_material = resource_plr_to_bioyond(
[resource], [resource],
type_mapping=self.workstation.bioyond_config["material_type_mappings"], type_mapping=self.workstation.bioyond_config["material_type_mappings"],
warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"], warehouse_mapping=self.workstation.bioyond_config["warehouse_mapping"],
material_params=merged_params material_params=MATERIAL_DEFAULT_PARAMETERS
)[0] )[0]
# ⚠️ 关键:创建物料时不设置 locations让 Bioyond 系统暂不分配库位 # ⚠️ 关键:创建物料时不设置 locations让 Bioyond 系统暂不分配库位
@@ -623,7 +526,8 @@ class BioyondResourceSynchronizer(ResourceSynchronizer):
logger.info(f"[物料入库] 目标库位: {update_site}") logger.info(f"[物料入库] 目标库位: {update_site}")
# 获取仓库配置和目标库位 UUID # 获取仓库配置和目标库位 UUID
warehouse_mapping = self.workstation.bioyond_config.get("warehouse_mapping", {}) from .config import WAREHOUSE_MAPPING
warehouse_mapping = WAREHOUSE_MAPPING
parent_name = None parent_name = None
target_location_uuid = None target_location_uuid = None
@@ -678,44 +582,6 @@ class BioyondWorkstation(WorkstationBase):
集成Bioyond物料管理的工作站实现 集成Bioyond物料管理的工作站实现
""" """
def _publish_task_status(
self,
task_id: str,
task_type: str,
status: str,
result: dict = None,
progress: float = 0.0,
task_code: str = None
):
"""发布任务状态事件"""
try:
if not getattr(self, "_ros_node", None):
return
event_data = {
"task_id": task_id,
"task_code": task_code,
"task_type": task_type,
"status": status,
"progress": progress,
"timestamp": datetime.now().isoformat()
}
if result:
event_data["result"] = result
topic = f"{self._ros_node.namespace}/events/task_status"
if not hasattr(self, "_task_status_pub"):
self._task_status_pub = self._ros_node.create_publisher(
String, topic, 10
)
self._task_status_pub.publish(
convert_to_ros_msg(String, json.dumps(event_data, ensure_ascii=False))
)
except Exception as e:
logger.error(f"发布任务状态事件失败: {e}")
def __init__( def __init__(
self, self,
bioyond_config: Optional[Dict[str, Any]] = None, bioyond_config: Optional[Dict[str, Any]] = None,
@@ -737,28 +603,10 @@ class BioyondWorkstation(WorkstationBase):
raise ValueError("Deck 配置不能为空,请在配置文件中添加正确的 deck 配置") raise ValueError("Deck 配置不能为空,请在配置文件中添加正确的 deck 配置")
# 初始化 warehouses 属性 # 初始化 warehouses 属性
if not hasattr(self.deck, "warehouses") or self.deck.warehouses is None: self.deck.warehouses = {}
self.deck.warehouses = {} for resource in self.deck.children:
if isinstance(resource, WareHouse):
# 仅当 warehouses 为空时尝试重新扫描(避免覆盖子类的修复) self.deck.warehouses[resource.name] = resource
if not self.deck.warehouses:
for resource in self.deck.children:
# 兼容性增强: 只要是仓库类别或者是 WareHouse 实例均可
is_warehouse = isinstance(resource, WareHouse) or getattr(resource, "category", "") == "warehouse"
# 如果配置中有定义,也可以认定为 warehouse
if not is_warehouse and "warehouse_mapping" in bioyond_config:
if resource.name in bioyond_config["warehouse_mapping"]:
is_warehouse = True
if is_warehouse:
self.deck.warehouses[resource.name] = resource
# 确保 category 被正确设置,方便后续使用
if getattr(resource, "category", "") != "warehouse":
try:
resource.category = "warehouse"
except:
pass
# 创建通信模块 # 创建通信模块
self._create_communication_module(bioyond_config) self._create_communication_module(bioyond_config)
@@ -777,22 +625,18 @@ class BioyondWorkstation(WorkstationBase):
self._set_workflow_mappings(bioyond_config["workflow_mappings"]) self._set_workflow_mappings(bioyond_config["workflow_mappings"])
# 准备 HTTP 报送接收服务配置(延迟到 post_init 启动) # 准备 HTTP 报送接收服务配置(延迟到 post_init 启动)
# 从 bioyond_config 中的 http_service_config 获取 # 从 bioyond_config 中获取,如果没有则使用默认值
http_service_cfg = bioyond_config.get("http_service_config", {})
self._http_service_config = { self._http_service_config = {
"host": http_service_cfg.get("http_service_host", "127.0.0.1"), "host": bioyond_config.get("http_service_host", bioyond_config.get("HTTP_host", "")),
"port": http_service_cfg.get("http_service_port", 8080) "port": bioyond_config.get("http_service_port", bioyond_config.get("HTTP_port", 0))
} }
self.http_service = None # 将在 post_init 启动 self.http_service = None # 将在 post_init 启动
self.connection_monitor = None # 将在 post_init 启动
logger.info(f"Bioyond工作站初始化完成") logger.info(f"Bioyond工作站初始化完成")
def __del__(self): def __del__(self):
"""析构函数:清理资源,停止 HTTP 服务""" """析构函数:清理资源,停止 HTTP 服务"""
try: try:
if hasattr(self, 'connection_monitor') and self.connection_monitor:
self.connection_monitor.stop()
if hasattr(self, 'http_service') and self.http_service is not None: if hasattr(self, 'http_service') and self.http_service is not None:
logger.info("正在停止 HTTP 报送服务...") logger.info("正在停止 HTTP 报送服务...")
self.http_service.stop() self.http_service.stop()
@@ -802,13 +646,6 @@ class BioyondWorkstation(WorkstationBase):
def post_init(self, ros_node: ROS2WorkstationNode): def post_init(self, ros_node: ROS2WorkstationNode):
self._ros_node = ros_node self._ros_node = ros_node
# 启动连接监控
try:
self.connection_monitor = ConnectionMonitor(self)
self.connection_monitor.start()
except Exception as e:
logger.error(f"启动连接监控失败: {e}")
# 启动 HTTP 报送接收服务(现在 device_id 已可用) # 启动 HTTP 报送接收服务(现在 device_id 已可用)
# ⚠️ 检查子类是否已经自己管理 HTTP 服务 # ⚠️ 检查子类是否已经自己管理 HTTP 服务
if self.bioyond_config.get("_disable_auto_http_service"): if self.bioyond_config.get("_disable_auto_http_service"):
@@ -853,14 +690,14 @@ class BioyondWorkstation(WorkstationBase):
def _create_communication_module(self, config: Optional[Dict[str, Any]] = None) -> None: def _create_communication_module(self, config: Optional[Dict[str, Any]] = None) -> None:
"""创建Bioyond通信模块""" """创建Bioyond通信模块"""
# 直接使用传入的配置,不再使用默认值 # 使用传入的 config 参数(来自 bioyond_config
# 所有配置必须从 JSON 文件中提供 # 不再依赖全局变量 API_CONFIG 等
if config: if config:
self.bioyond_config = config self.bioyond_config = config
else: else:
# 如果没有配置,使用空字典(会导致后续错误,但这是预期的 # 如果没有传入配置,创建空配置(用于测试或兼容性
self.bioyond_config = {} self.bioyond_config = {}
print("警告: 未提供 bioyond_config请确保在 JSON 配置文件中提供完整配置")
self.hardware_interface = BioyondV1RPC(self.bioyond_config) self.hardware_interface = BioyondV1RPC(self.bioyond_config)
@@ -1174,15 +1011,7 @@ class BioyondWorkstation(WorkstationBase):
workflow_id = self._get_workflow(actual_workflow_name) workflow_id = self._get_workflow(actual_workflow_name)
if workflow_id: if workflow_id:
# 兼容 BioyondReactionStation 中 workflow_sequence 被重写为 property 的情况 self.workflow_sequence.append(workflow_id)
if isinstance(self.workflow_sequence, list):
self.workflow_sequence.append(workflow_id)
elif hasattr(self, "_cached_workflow_sequence") and isinstance(self._cached_workflow_sequence, list):
self._cached_workflow_sequence.append(workflow_id)
else:
print(f"❌ 无法添加工作流: workflow_sequence 类型错误 {type(self.workflow_sequence)}")
return False
print(f"添加工作流到执行顺序: {actual_workflow_name} -> {workflow_id}") print(f"添加工作流到执行顺序: {actual_workflow_name} -> {workflow_id}")
return True return True
return False return False
@@ -1383,22 +1212,6 @@ class BioyondWorkstation(WorkstationBase):
# TODO: 根据实际业务需求处理步骤完成逻辑 # TODO: 根据实际业务需求处理步骤完成逻辑
# 例如:更新数据库、触发后续流程等 # 例如:更新数据库、触发后续流程等
# 发布任务状态事件 (running/progress update)
self._publish_task_status(
task_id=data.get('orderCode'), # 使用 OrderCode 作为关联 ID
task_code=data.get('orderCode'),
task_type="bioyond_step",
status="running",
progress=0.5, # 步骤完成视为任务进行中
result={"step_name": data.get('stepName'), "step_id": data.get('stepId')}
)
# 更新物料信息
# 步骤完成后,物料状态可能发生变化(如位置、用量等),触发同步
logger.info(f"[步骤完成报送] 触发物料同步...")
self.resource_synchronizer.sync_from_external()
return { return {
"processed": True, "processed": True,
"step_id": data.get('stepId'), "step_id": data.get('stepId'),
@@ -1433,17 +1246,6 @@ class BioyondWorkstation(WorkstationBase):
# TODO: 根据实际业务需求处理通量完成逻辑 # TODO: 根据实际业务需求处理通量完成逻辑
# 发布任务状态事件
self._publish_task_status(
task_id=data.get('orderCode'),
task_code=data.get('orderCode'),
task_type="bioyond_sample",
status="running",
progress=0.7,
result={"sample_id": data.get('sampleId'), "status": status_desc}
)
return { return {
"processed": True, "processed": True,
"sample_id": data.get('sampleId'), "sample_id": data.get('sampleId'),
@@ -1483,32 +1285,6 @@ class BioyondWorkstation(WorkstationBase):
# TODO: 根据实际业务需求处理任务完成逻辑 # TODO: 根据实际业务需求处理任务完成逻辑
# 例如:更新物料库存、生成报表等 # 例如:更新物料库存、生成报表等
# 映射状态到事件状态
event_status = "completed"
if str(data.get('status')) in ["-11", "-12"]:
event_status = "error"
elif str(data.get('status')) == "30":
event_status = "completed"
else:
event_status = "running" # 其他状态视为运行中(或根据实际定义)
# 发布任务状态事件
self._publish_task_status(
task_id=data.get('orderCode'),
task_code=data.get('orderCode'),
task_type="bioyond_order",
status=event_status,
progress=1.0 if event_status in ["completed", "error"] else 0.9,
result={"order_name": data.get('orderName'), "status": status_desc, "materials_count": len(used_materials)}
)
# 更新物料信息
# 任务完成后,且状态为完成时,触发同步以更新最终物料状态
if event_status == "completed":
logger.info(f"[任务完成报送] 触发物料同步...")
self.resource_synchronizer.sync_from_external()
return { return {
"processed": True, "processed": True,
"order_code": data.get('orderCode'), "order_code": data.get('orderCode'),

View File

@@ -459,12 +459,12 @@ class WorkstationHTTPHandler(BaseHTTPRequestHandler):
# 验证必需字段 # 验证必需字段
if 'brand' in request_data: if 'brand' in request_data:
if request_data['brand'] == "bioyond": # 奔曜 if request_data['brand'] == "bioyond": # 奔曜
material_data = request_data["text"] error_msg = request_data["text"]
logger.info(f"收到奔曜物料变更报送: {material_data}") logger.info(f"收到奔曜错误处理报送: {error_msg}")
return HttpResponse( return HttpResponse(
success=True, success=True,
message=f"物料变更报送已收到: {material_data}", message=f"错误处理报送已收到: {error_msg}",
acknowledgment_id=f"MATERIAL_{int(time.time() * 1000)}_{material_data.get('id', 'unknown')}", acknowledgment_id=f"ERROR_{int(time.time() * 1000)}_{error_msg.get('action_id', 'unknown')}",
data=None data=None
) )
else: else:

View File

@@ -0,0 +1,589 @@
workstation.bioyond_dispensing_station:
category:
- workstation
- bioyond
class:
action_value_mappings:
auto-batch_create_90_10_vial_feeding_tasks:
feedback: {}
goal: {}
goal_default:
delay_time: null
hold_m_name: null
liquid_material_name: NMP
speed: null
temperature: null
titration: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
delay_time:
type: string
hold_m_name:
type: string
liquid_material_name:
default: NMP
type: string
speed:
type: string
temperature:
type: string
titration:
type: string
required:
- titration
type: object
result: {}
required:
- goal
title: batch_create_90_10_vial_feeding_tasks参数
type: object
type: UniLabJsonCommand
auto-batch_create_diamine_solution_tasks:
feedback: {}
goal: {}
goal_default:
delay_time: null
liquid_material_name: NMP
solutions: null
speed: null
temperature: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
delay_time:
type: string
liquid_material_name:
default: NMP
type: string
solutions:
type: string
speed:
type: string
temperature:
type: string
required:
- solutions
type: object
result: {}
required:
- goal
title: batch_create_diamine_solution_tasks参数
type: object
type: UniLabJsonCommand
auto-brief_step_parameters:
feedback: {}
goal: {}
goal_default:
data: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
data:
type: object
required:
- data
type: object
result: {}
required:
- goal
title: brief_step_parameters参数
type: object
type: UniLabJsonCommand
auto-compute_experiment_design:
feedback: {}
goal: {}
goal_default:
m_tot: '70'
ratio: null
titration_percent: '0.03'
wt_percent: '0.25'
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
m_tot:
default: '70'
type: string
ratio:
type: object
titration_percent:
default: '0.03'
type: string
wt_percent:
default: '0.25'
type: string
required:
- ratio
type: object
result:
properties:
feeding_order:
items: {}
title: Feeding Order
type: array
return_info:
title: Return Info
type: string
solutions:
items: {}
title: Solutions
type: array
solvents:
additionalProperties: true
title: Solvents
type: object
titration:
additionalProperties: true
title: Titration
type: object
required:
- solutions
- titration
- solvents
- feeding_order
- return_info
title: ComputeExperimentDesignReturn
type: object
required:
- goal
title: compute_experiment_design参数
type: object
type: UniLabJsonCommand
auto-process_order_finish_report:
feedback: {}
goal: {}
goal_default:
report_request: null
used_materials: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
report_request:
type: string
used_materials:
type: string
required:
- report_request
- used_materials
type: object
result: {}
required:
- goal
title: process_order_finish_report参数
type: object
type: UniLabJsonCommand
auto-project_order_report:
feedback: {}
goal: {}
goal_default:
order_id: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
order_id:
type: string
required:
- order_id
type: object
result: {}
required:
- goal
title: project_order_report参数
type: object
type: UniLabJsonCommand
auto-query_resource_by_name:
feedback: {}
goal: {}
goal_default:
material_name: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
material_name:
type: string
required:
- material_name
type: object
result: {}
required:
- goal
title: query_resource_by_name参数
type: object
type: UniLabJsonCommand
auto-transfer_materials_to_reaction_station:
feedback: {}
goal: {}
goal_default:
target_device_id: null
transfer_groups: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
target_device_id:
type: string
transfer_groups:
type: array
required:
- target_device_id
- transfer_groups
type: object
result: {}
required:
- goal
title: transfer_materials_to_reaction_station参数
type: object
type: UniLabJsonCommand
auto-wait_for_multiple_orders_and_get_reports:
feedback: {}
goal: {}
goal_default:
batch_create_result: null
check_interval: 10
timeout: 7200
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
batch_create_result:
type: string
check_interval:
default: 10
type: integer
timeout:
default: 7200
type: integer
required: []
type: object
result: {}
required:
- goal
title: wait_for_multiple_orders_and_get_reports参数
type: object
type: UniLabJsonCommand
auto-workflow_sample_locations:
feedback: {}
goal: {}
goal_default:
workflow_id: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
workflow_id:
type: string
required:
- workflow_id
type: object
result: {}
required:
- goal
title: workflow_sample_locations参数
type: object
type: UniLabJsonCommand
create_90_10_vial_feeding_task:
feedback: {}
goal:
delay_time: delay_time
hold_m_name: hold_m_name
order_name: order_name
percent_10_1_assign_material_name: percent_10_1_assign_material_name
percent_10_1_liquid_material_name: percent_10_1_liquid_material_name
percent_10_1_target_weigh: percent_10_1_target_weigh
percent_10_1_volume: percent_10_1_volume
percent_10_2_assign_material_name: percent_10_2_assign_material_name
percent_10_2_liquid_material_name: percent_10_2_liquid_material_name
percent_10_2_target_weigh: percent_10_2_target_weigh
percent_10_2_volume: percent_10_2_volume
percent_10_3_assign_material_name: percent_10_3_assign_material_name
percent_10_3_liquid_material_name: percent_10_3_liquid_material_name
percent_10_3_target_weigh: percent_10_3_target_weigh
percent_10_3_volume: percent_10_3_volume
percent_90_1_assign_material_name: percent_90_1_assign_material_name
percent_90_1_target_weigh: percent_90_1_target_weigh
percent_90_2_assign_material_name: percent_90_2_assign_material_name
percent_90_2_target_weigh: percent_90_2_target_weigh
percent_90_3_assign_material_name: percent_90_3_assign_material_name
percent_90_3_target_weigh: percent_90_3_target_weigh
speed: speed
temperature: temperature
goal_default:
delay_time: ''
hold_m_name: ''
order_name: ''
percent_10_1_assign_material_name: ''
percent_10_1_liquid_material_name: ''
percent_10_1_target_weigh: ''
percent_10_1_volume: ''
percent_10_2_assign_material_name: ''
percent_10_2_liquid_material_name: ''
percent_10_2_target_weigh: ''
percent_10_2_volume: ''
percent_10_3_assign_material_name: ''
percent_10_3_liquid_material_name: ''
percent_10_3_target_weigh: ''
percent_10_3_volume: ''
percent_90_1_assign_material_name: ''
percent_90_1_target_weigh: ''
percent_90_2_assign_material_name: ''
percent_90_2_target_weigh: ''
percent_90_3_assign_material_name: ''
percent_90_3_target_weigh: ''
speed: ''
temperature: ''
handles: {}
result:
return_info: return_info
schema:
description: ''
properties:
feedback:
properties: {}
required: []
title: DispenStationVialFeed_Feedback
type: object
goal:
properties:
delay_time:
type: string
hold_m_name:
type: string
order_name:
type: string
percent_10_1_assign_material_name:
type: string
percent_10_1_liquid_material_name:
type: string
percent_10_1_target_weigh:
type: string
percent_10_1_volume:
type: string
percent_10_2_assign_material_name:
type: string
percent_10_2_liquid_material_name:
type: string
percent_10_2_target_weigh:
type: string
percent_10_2_volume:
type: string
percent_10_3_assign_material_name:
type: string
percent_10_3_liquid_material_name:
type: string
percent_10_3_target_weigh:
type: string
percent_10_3_volume:
type: string
percent_90_1_assign_material_name:
type: string
percent_90_1_target_weigh:
type: string
percent_90_2_assign_material_name:
type: string
percent_90_2_target_weigh:
type: string
percent_90_3_assign_material_name:
type: string
percent_90_3_target_weigh:
type: string
speed:
type: string
temperature:
type: string
required:
- order_name
- percent_90_1_assign_material_name
- percent_90_1_target_weigh
- percent_90_2_assign_material_name
- percent_90_2_target_weigh
- percent_90_3_assign_material_name
- percent_90_3_target_weigh
- percent_10_1_assign_material_name
- percent_10_1_target_weigh
- percent_10_1_volume
- percent_10_1_liquid_material_name
- percent_10_2_assign_material_name
- percent_10_2_target_weigh
- percent_10_2_volume
- percent_10_2_liquid_material_name
- percent_10_3_assign_material_name
- percent_10_3_target_weigh
- percent_10_3_volume
- percent_10_3_liquid_material_name
- speed
- temperature
- delay_time
- hold_m_name
title: DispenStationVialFeed_Goal
type: object
result:
properties:
return_info:
type: string
required:
- return_info
title: DispenStationVialFeed_Result
type: object
required:
- goal
title: DispenStationVialFeed
type: object
type: DispenStationVialFeed
create_diamine_solution_task:
feedback: {}
goal:
delay_time: delay_time
hold_m_name: hold_m_name
liquid_material_name: liquid_material_name
material_name: material_name
order_name: order_name
speed: speed
target_weigh: target_weigh
temperature: temperature
volume: volume
goal_default:
delay_time: ''
hold_m_name: ''
liquid_material_name: ''
material_name: ''
order_name: ''
speed: ''
target_weigh: ''
temperature: ''
volume: ''
handles: {}
result:
return_info: return_info
schema:
description: ''
properties:
feedback:
properties: {}
required: []
title: DispenStationSolnPrep_Feedback
type: object
goal:
properties:
delay_time:
type: string
hold_m_name:
type: string
liquid_material_name:
type: string
material_name:
type: string
order_name:
type: string
speed:
type: string
target_weigh:
type: string
temperature:
type: string
volume:
type: string
required:
- order_name
- material_name
- target_weigh
- volume
- liquid_material_name
- speed
- temperature
- delay_time
- hold_m_name
title: DispenStationSolnPrep_Goal
type: object
result:
properties:
return_info:
type: string
required:
- return_info
title: DispenStationSolnPrep_Result
type: object
required:
- goal
title: DispenStationSolnPrep
type: object
type: DispenStationSolnPrep
module: unilabos.devices.workstation.bioyond_studio.dispensing_station:BioyondDispensingStation
status_types: {}
type: python
config_info: []
description: ''
handles: []
icon: ''
init_param_schema:
config:
properties:
config:
type: string
deck:
type: string
required:
- config
- deck
type: object
data:
properties: {}
required: []
type: object
version: 1.0.0

File diff suppressed because it is too large Load Diff

View File

@@ -30,6 +30,71 @@ bioyond_dispensing_station:
title: brief_step_parameters参数 title: brief_step_parameters参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-compute_experiment_design:
feedback: {}
goal: {}
goal_default:
m_tot: '70'
ratio: null
titration_percent: '0.03'
wt_percent: '0.25'
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
m_tot:
default: '70'
type: string
ratio:
type: object
titration_percent:
default: '0.03'
type: string
wt_percent:
default: '0.25'
type: string
required:
- ratio
type: object
result:
properties:
feeding_order:
items: {}
title: Feeding Order
type: array
return_info:
title: Return Info
type: string
solutions:
items: {}
title: Solutions
type: array
solvents:
additionalProperties: true
title: Solvents
type: object
titration:
additionalProperties: true
title: Titration
type: object
required:
- solutions
- titration
- solvents
- feeding_order
- return_info
title: ComputeExperimentDesignReturn
type: object
required:
- goal
title: compute_experiment_design参数
type: object
type: UniLabJsonCommand
auto-process_order_finish_report: auto-process_order_finish_report:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -109,6 +174,35 @@ bioyond_dispensing_station:
title: query_resource_by_name参数 title: query_resource_by_name参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-transfer_materials_to_reaction_station:
feedback: {}
goal: {}
goal_default:
target_device_id: null
transfer_groups: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
target_device_id:
type: string
transfer_groups:
type: array
required:
- target_device_id
- transfer_groups
type: object
result: {}
required:
- goal
title: transfer_materials_to_reaction_station参数
type: object
type: UniLabJsonCommand
auto-workflow_sample_locations: auto-workflow_sample_locations:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -300,99 +394,6 @@ bioyond_dispensing_station:
title: BatchCreateDiamineSolutionTasks title: BatchCreateDiamineSolutionTasks
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
compute_experiment_design:
feedback: {}
goal:
m_tot: m_tot
ratio: ratio
titration_percent: titration_percent
wt_percent: wt_percent
goal_default:
m_tot: '70'
ratio: ''
titration_percent: '0.03'
wt_percent: '0.25'
handles:
output:
- data_key: solutions
data_source: executor
data_type: array
handler_key: solutions
io_type: sink
label: Solution Data From Python
- data_key: titration
data_source: executor
data_type: object
handler_key: titration
io_type: sink
label: Titration Data From Calculation Node
- data_key: solvents
data_source: executor
data_type: object
handler_key: solvents
io_type: sink
label: Solvents Data From Calculation Node
- data_key: feeding_order
data_source: executor
data_type: array
handler_key: feeding_order
io_type: sink
label: Feeding Order Data From Calculation Node
result:
feeding_order: feeding_order
return_info: return_info
solutions: solutions
solvents: solvents
titration: titration
schema:
description: 计算实验设计输出solutions/titration/solvents/feeding_order用于后续节点。
properties:
feedback: {}
goal:
properties:
m_tot:
default: '70'
description: 总质量(g)
type: string
ratio:
description: 组分摩尔比的对象,保持输入顺序,如{"MDA":1,"BTDA":1}
type: string
titration_percent:
default: '0.03'
description: 滴定比例(10%部分)
type: string
wt_percent:
default: '0.25'
description: 目标固含质量分数
type: string
required:
- ratio
type: object
result:
properties:
feeding_order:
type: array
return_info:
type: string
solutions:
type: array
solvents:
type: object
titration:
type: object
required:
- solutions
- titration
- solvents
- feeding_order
- return_info
title: ComputeExperimentDesign_Result
type: object
required:
- goal
title: ComputeExperimentDesign
type: object
type: UniLabJsonCommand
create_90_10_vial_feeding_task: create_90_10_vial_feeding_task:
feedback: {} feedback: {}
goal: goal:
@@ -619,89 +620,6 @@ bioyond_dispensing_station:
title: DispenStationSolnPrep title: DispenStationSolnPrep
type: object type: object
type: DispenStationSolnPrep type: DispenStationSolnPrep
scheduler_start:
feedback: {}
goal: {}
goal_default: {}
handles: {}
result:
return_info: return_info
schema:
description: 启动调度器 - 启动Bioyond配液站的任务调度器开始执行队列中的任务
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result:
properties:
return_info:
description: 调度器启动结果成功返回1失败返回0
type: integer
required:
- return_info
title: scheduler_start结果
type: object
required:
- goal
title: scheduler_start参数
type: object
type: UniLabJsonCommand
transfer_materials_to_reaction_station:
feedback: {}
goal:
target_device_id: target_device_id
transfer_groups: transfer_groups
goal_default:
target_device_id: ''
transfer_groups: ''
handles: {}
placeholder_keys:
target_device_id: unilabos_devices
result: {}
schema:
description: 将配液站完成的物料(溶液、样品等)转移到指定反应站的堆栈库位。支持配置多组转移任务,每组包含物料名称、目标堆栈和目标库位。
properties:
feedback: {}
goal:
properties:
target_device_id:
description: 目标反应站设备ID从设备列表中选择所有转移组都使用同一个目标设备
type: string
transfer_groups:
description: 转移任务组列表,每组包含物料名称、目标堆栈和目标库位,可以添加多组
items:
properties:
materials:
description: 物料名称手动输入系统将通过RPC查询验证
type: string
target_sites:
description: 目标库位(手动输入,如"A01"
type: string
target_stack:
description: 目标堆栈名称(从列表选择)
enum:
- 堆栈1左
- 堆栈1右
- 站内试剂存放堆栈
type: string
required:
- materials
- target_stack
- target_sites
type: object
type: array
required:
- target_device_id
- transfer_groups
type: object
result: {}
required:
- goal
title: transfer_materials_to_reaction_station参数
type: object
type: UniLabJsonCommand
wait_for_multiple_orders_and_get_reports: wait_for_multiple_orders_and_get_reports:
feedback: {} feedback: {}
goal: goal:
@@ -770,7 +688,7 @@ bioyond_dispensing_station:
title: WaitForMultipleOrdersAndGetReports title: WaitForMultipleOrdersAndGetReports
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
module: unilabos.devices.workstation.bioyond_studio.dispensing_station.dispensing_station:BioyondDispensingStation module: unilabos.devices.workstation.bioyond_studio.dispensing_station:BioyondDispensingStation
status_types: {} status_types: {}
type: python type: python
config_info: [] config_info: []
@@ -781,16 +699,15 @@ bioyond_dispensing_station:
config: config:
properties: properties:
config: config:
type: object type: string
deck: deck:
type: string type: string
protocol_type: required:
type: string - config
required: [] - deck
type: object type: object
data: data:
properties: {} properties: {}
required: [] required: []
type: object type: object
model: {}
version: 1.0.0 version: 1.0.0

View File

@@ -405,7 +405,7 @@ coincellassemblyworkstation_device:
goal: goal:
properties: properties:
bottle_num: bottle_num:
type: string type: integer
required: required:
- bottle_num - bottle_num
type: object type: object

View File

@@ -638,7 +638,7 @@ liquid_handler:
placeholder_keys: {} placeholder_keys: {}
result: {} result: {}
schema: schema:
description: 吸头迭代函数。用于自动管理和切换枪头盒中的吸头,实现批量实验中的吸头自动分配和追踪。该函数监控吸头使用状态,自动切换到下一个可用吸头位置,确保实验流程的连续性。适用于高通量实验、批量处理、自动化流水线等需要大量吸头管理的应用场景。 description: 吸头迭代函数。用于自动管理和切换吸头架中的吸头,实现批量实验中的吸头自动分配和追踪。该函数监控吸头使用状态,自动切换到下一个可用吸头位置,确保实验流程的连续性。适用于高通量实验、批量处理、自动化流水线等需要大量吸头管理的应用场景。
properties: properties:
feedback: {} feedback: {}
goal: goal:
@@ -712,43 +712,6 @@ liquid_handler:
title: set_group参数 title: set_group参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-set_liquid_from_plate:
feedback: {}
goal: {}
goal_default:
liquid_names: null
plate: null
volumes: null
well_names: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
liquid_names:
type: string
plate:
type: string
volumes:
type: string
well_names:
type: string
required:
- plate
- well_names
- liquid_names
- volumes
type: object
result: {}
required:
- goal
title: set_liquid_from_plate参数
type: object
type: UniLabJsonCommand
auto-set_tiprack: auto-set_tiprack:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -758,7 +721,7 @@ liquid_handler:
placeholder_keys: {} placeholder_keys: {}
result: {} result: {}
schema: schema:
description: 枪头盒设置函数。用于配置和初始化液体处理系统的枪头盒信息,包括枪头盒位置、类型、容量等参数。该函数建立吸头资源管理系统,为后续的吸头选择和使用提供基础配置。适用于系统初始化、枪头盒更换、实验配置等需要吸头资源管理的操作场景。 description: 吸头架设置函数。用于配置和初始化液体处理系统的吸头架信息,包括吸头架位置、类型、容量等参数。该函数建立吸头资源管理系统,为后续的吸头选择和使用提供基础配置。适用于系统初始化、吸头架更换、实验配置等需要吸头资源管理的操作场景。
properties: properties:
feedback: {} feedback: {}
goal: goal:
@@ -4130,32 +4093,32 @@ liquid_handler:
- 0 - 0
handles: handles:
input: input:
- data_key: sources - data_key: liquid
data_source: handle data_source: handle
data_type: resource data_type: resource
handler_key: sources handler_key: sources
label: 待移动液体 label: sources
- data_key: targets - data_key: liquid
data_source: handle
data_type: resource
handler_key: targets
label: 转移目标
- data_key: tip_racks
data_source: handle
data_type: resource
handler_key: tip_rack
label: 枪头盒
output:
- data_key: sources.@flatten
data_source: executor data_source: executor
data_type: resource data_type: resource
handler_key: targets
label: targets
- data_key: liquid
data_source: executor
data_type: resource
handler_key: tip_rack
label: tip_rack
output:
- data_key: liquid
data_source: handle
data_type: resource
handler_key: sources_out handler_key: sources_out
label: 移液后源孔 label: sources
- data_key: targets.@flatten - data_key: liquid
data_source: executor data_source: executor
data_type: resource data_type: resource
handler_key: targets_out handler_key: targets_out
label: 移液后目标孔 label: targets
placeholder_keys: placeholder_keys:
sources: unilabos_resources sources: unilabos_resources
targets: unilabos_resources targets: unilabos_resources
@@ -5151,34 +5114,19 @@ liquid_handler.biomek:
- 0 - 0
handles: handles:
input: input:
- data_key: sources - data_key: liquid
data_source: handle data_source: handle
data_type: resource data_type: resource
handler_key: sources handler_key: liquid-input
io_type: target io_type: target
label: 待移动液体 label: Liquid Input
- data_key: targets
data_source: handle
data_type: resource
handler_key: targets
label: 转移目标
- data_key: tip_racks
data_source: handle
data_type: resource
handler_key: tip_rack
label: 枪头盒
output: output:
- data_key: sources.@flatten - data_key: liquid
data_source: executor data_source: executor
data_type: resource data_type: resource
handler_key: sources_out handler_key: liquid-output
io_type: source io_type: source
label: 移液后源孔 label: Liquid Output
- data_key: targets.@flatten
data_source: executor
data_type: resource
handler_key: targets_out
label: 移液后目标孔
placeholder_keys: placeholder_keys:
sources: unilabos_resources sources: unilabos_resources
targets: unilabos_resources targets: unilabos_resources
@@ -9330,19 +9278,7 @@ liquid_handler.prcxi:
z: 0.0 z: 0.0
sample_id: '' sample_id: ''
type: '' type: ''
handles: handles: {}
input:
- data_key: wells
data_source: handle
data_type: resource
handler_key: input_wells
label: 待设定液体孔
output:
- data_key: wells.@flatten
data_source: executor
data_type: resource
handler_key: output_wells
label: 已设定液体孔
placeholder_keys: placeholder_keys:
wells: unilabos_resources wells: unilabos_resources
result: {} result: {}
@@ -9458,352 +9394,6 @@ liquid_handler.prcxi:
title: LiquidHandlerSetLiquid title: LiquidHandlerSetLiquid
type: object type: object
type: LiquidHandlerSetLiquid type: LiquidHandlerSetLiquid
set_liquid_from_plate:
feedback: {}
goal: {}
goal_default:
liquid_names: null
plate: null
volumes: null
well_names: null
handles:
input:
- data_key: '@this.0@@@plate'
data_source: handle
data_type: resource
handler_key: input_plate
label: 待设定液体板
output:
- data_key: plate.@flatten
data_source: executor
data_type: resource
handler_key: output_plate
label: 已设定液体板
- data_key: wells.@flatten
data_source: executor
data_type: resource
handler_key: output_wells
label: 已设定液体孔
- data_key: volumes
data_source: executor
data_type: number_array
handler_key: output_volumes
label: 各孔设定体积
placeholder_keys:
plate: unilabos_resources
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
liquid_names:
items:
type: string
type: array
plate:
properties:
category:
type: string
children:
items:
type: string
type: array
config:
type: string
data:
type: string
id:
type: string
name:
type: string
parent:
type: string
pose:
properties:
orientation:
properties:
w:
type: number
x:
type: number
y:
type: number
z:
type: number
required:
- x
- y
- z
- w
title: orientation
type: object
position:
properties:
x:
type: number
y:
type: number
z:
type: number
required:
- x
- y
- z
title: position
type: object
required:
- position
- orientation
title: pose
type: object
sample_id:
type: string
type:
type: string
required:
- id
- name
- sample_id
- children
- parent
- type
- category
- pose
- config
- data
title: plate
type: object
volumes:
items:
type: number
type: array
well_names:
items:
type: string
type: array
required:
- plate
- well_names
- liquid_names
- volumes
type: object
result:
$defs:
ResourceDict:
properties:
class:
description: Resource class name
title: Class
type: string
config:
additionalProperties: true
description: Resource configuration
title: Config
type: object
data:
additionalProperties: true
description: 'Resource data, eg: container liquid data'
title: Data
type: object
description:
default: ''
description: Resource description
title: Description
type: string
extra:
additionalProperties: true
description: 'Extra data, eg: slot index'
title: Extra
type: object
icon:
default: ''
description: Resource icon
title: Icon
type: string
id:
description: Resource ID
title: Id
type: string
model:
additionalProperties: true
description: Resource model
title: Model
type: object
name:
description: Resource name
title: Name
type: string
parent:
anyOf:
- $ref: '#/$defs/ResourceDict'
- type: 'null'
default: null
description: Parent resource object
parent_uuid:
anyOf:
- type: string
- type: 'null'
default: null
description: Parent resource uuid
title: Parent Uuid
pose:
$ref: '#/$defs/ResourceDictPosition'
description: Resource position
schema:
additionalProperties: true
description: Resource schema
title: Schema
type: object
type:
anyOf:
- const: device
type: string
- type: string
description: Resource type
title: Type
uuid:
description: Resource UUID
title: Uuid
type: string
required:
- id
- uuid
- name
- type
- class
- config
- data
- extra
title: ResourceDict
type: object
ResourceDictPosition:
properties:
cross_section_type:
default: rectangle
description: Cross section type
enum:
- rectangle
- circle
- rounded_rectangle
title: Cross Section Type
type: string
layout:
default: x-y
description: Resource layout
enum:
- 2d
- x-y
- z-y
- x-z
title: Layout
type: string
position:
$ref: '#/$defs/ResourceDictPositionObject'
description: Resource position
position3d:
$ref: '#/$defs/ResourceDictPositionObject'
description: Resource position in 3D space
rotation:
$ref: '#/$defs/ResourceDictPositionObject'
description: Resource rotation
scale:
$ref: '#/$defs/ResourceDictPositionScale'
description: Resource scale
size:
$ref: '#/$defs/ResourceDictPositionSize'
description: Resource size
title: ResourceDictPosition
type: object
ResourceDictPositionObject:
properties:
x:
default: 0.0
description: X coordinate
title: X
type: number
y:
default: 0.0
description: Y coordinate
title: Y
type: number
z:
default: 0.0
description: Z coordinate
title: Z
type: number
title: ResourceDictPositionObject
type: object
ResourceDictPositionScale:
properties:
x:
default: 0.0
description: x scale
title: X
type: number
y:
default: 0.0
description: y scale
title: Y
type: number
z:
default: 0.0
description: z scale
title: Z
type: number
title: ResourceDictPositionScale
type: object
ResourceDictPositionSize:
properties:
depth:
default: 0.0
description: Depth
title: Depth
type: number
height:
default: 0.0
description: Height
title: Height
type: number
width:
default: 0.0
description: Width
title: Width
type: number
title: ResourceDictPositionSize
type: object
properties:
plate:
items:
items:
$ref: '#/$defs/ResourceDict'
type: array
title: Plate
type: array
volumes:
items:
type: number
title: Volumes
type: array
wells:
items:
items:
$ref: '#/$defs/ResourceDict'
type: array
title: Wells
type: array
required:
- plate
- wells
- volumes
title: SetLiquidFromPlateReturn
type: object
required:
- goal
title: set_liquid_from_plate参数
type: object
type: UniLabJsonCommand
set_tiprack: set_tiprack:
feedback: {} feedback: {}
goal: goal:
@@ -10149,32 +9739,32 @@ liquid_handler.prcxi:
- 0 - 0
handles: handles:
input: input:
- data_key: sources - data_key: liquid
data_source: handle data_source: handle
data_type: resource data_type: resource
handler_key: sources_identifier handler_key: sources
label: 待移动液体 label: sources
- data_key: targets - data_key: liquid
data_source: handle
data_type: resource
handler_key: targets_identifier
label: 转移目标
- data_key: tip_rack
data_source: handle
data_type: resource
handler_key: tip_rack_identifier
label: 枪头盒
output:
- data_key: sources.@flatten
data_source: executor data_source: executor
data_type: resource data_type: resource
handler_key: targets
label: targets
- data_key: liquid
data_source: executor
data_type: resource
handler_key: tip_rack
label: tip_rack
output:
- data_key: liquid
data_source: handle
data_type: resource
handler_key: sources_out handler_key: sources_out
label: 移液后源孔 label: sources
- data_key: targets.@flatten - data_key: liquid
data_source: executor data_source: executor
data_type: resource data_type: resource
handler_key: targets_out handler_key: targets_out
label: 移液后目标孔 label: targets
placeholder_keys: placeholder_keys:
sources: unilabos_resources sources: unilabos_resources
targets: unilabos_resources targets: unilabos_resources

View File

@@ -49,7 +49,32 @@ opcua_example:
title: load_config参数 title: load_config参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-refresh_node_values: auto-post_init:
feedback: {}
goal: {}
goal_default:
ros_node: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
ros_node:
type: string
required:
- ros_node
type: object
result: {}
required:
- goal
title: post_init参数
type: object
type: UniLabJsonCommand
auto-print_cache_stats:
feedback: {} feedback: {}
goal: {} goal: {}
goal_default: {} goal_default: {}
@@ -67,7 +92,32 @@ opcua_example:
result: {} result: {}
required: required:
- goal - goal
title: refresh_node_values参数 title: print_cache_stats参数
type: object
type: UniLabJsonCommand
auto-read_node:
feedback: {}
goal: {}
goal_default:
node_name: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
node_name:
type: string
required:
- node_name
type: object
result: {}
required:
- goal
title: read_node参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-set_node_value: auto-set_node_value:
@@ -99,50 +149,9 @@ opcua_example:
title: set_node_value参数 title: set_node_value参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-start_node_refresh:
feedback: {}
goal: {}
goal_default: {}
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result: {}
required:
- goal
title: start_node_refresh参数
type: object
type: UniLabJsonCommand
auto-stop_node_refresh:
feedback: {}
goal: {}
goal_default: {}
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result: {}
required:
- goal
title: stop_node_refresh参数
type: object
type: UniLabJsonCommand
module: unilabos.device_comms.opcua_client.client:OpcUaClient module: unilabos.device_comms.opcua_client.client:OpcUaClient
status_types: status_types:
cache_stats: dict
node_value: String node_value: String
type: python type: python
config_info: [] config_info: []
@@ -152,15 +161,23 @@ opcua_example:
init_param_schema: init_param_schema:
config: config:
properties: properties:
cache_timeout:
default: 5.0
type: number
config_path: config_path:
type: string type: string
deck:
type: string
password: password:
type: string type: string
refresh_interval: subscription_interval:
default: 1.0 default: 500
type: number type: integer
url: url:
type: string type: string
use_subscription:
default: true
type: boolean
username: username:
type: string type: string
required: required:
@@ -168,9 +185,12 @@ opcua_example:
type: object type: object
data: data:
properties: properties:
cache_stats:
type: object
node_value: node_value:
type: string type: string
required: required:
- node_value - node_value
- cache_stats
type: object type: object
version: 1.0.0 version: 1.0.0

View File

@@ -4,81 +4,6 @@ reaction_station.bioyond:
- reaction_station_bioyond - reaction_station_bioyond
class: class:
action_value_mappings: action_value_mappings:
add_time_constraint:
feedback: {}
goal:
duration: duration
end_point: end_point
end_step_key: end_step_key
start_point: start_point
start_step_key: start_step_key
goal_default:
duration: 0
end_point: 0
end_step_key: ''
start_point: 0
start_step_key: ''
handles: {}
result: {}
schema:
description: 添加时间约束 - 在两个工作流之间添加时间约束
properties:
feedback: {}
goal:
properties:
duration:
description: 时间(秒)
type: integer
end_point:
default: Start
description: 终点计时点 (Start=开始前, End=结束后)
enum:
- Start
- End
type: string
end_step_key:
description: 终点步骤Key (可选, 默认为空则自动选择)
type: string
start_point:
default: Start
description: 起点计时点 (Start=开始前, End=结束后)
enum:
- Start
- End
type: string
start_step_key:
description: 起点步骤Key (例如 "feeding", "liquid", 可选, 默认为空则自动选择)
type: string
required:
- duration
type: object
result: {}
required:
- goal
title: add_time_constraint参数
type: object
type: UniLabJsonCommand
auto-clear_workflows:
feedback: {}
goal: {}
goal_default: {}
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result: {}
required:
- goal
title: clear_workflows参数
type: object
type: UniLabJsonCommand
auto-create_order: auto-create_order:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -206,35 +131,6 @@ reaction_station.bioyond:
title: process_web_workflows参数 title: process_web_workflows参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-set_reactor_temperature:
feedback: {}
goal: {}
goal_default:
reactor_id: null
temperature: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
reactor_id:
type: integer
temperature:
type: number
required:
- reactor_id
- temperature
type: object
result: {}
required:
- goal
title: set_reactor_temperature参数
type: object
type: UniLabJsonCommand
auto-skip_titration_steps: auto-skip_titration_steps:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -260,27 +156,6 @@ reaction_station.bioyond:
title: skip_titration_steps参数 title: skip_titration_steps参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-sync_workflow_sequence_from_bioyond:
feedback: {}
goal: {}
goal_default: {}
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result: {}
required:
- goal
title: sync_workflow_sequence_from_bioyond参数
type: object
type: UniLabJsonCommand
auto-wait_for_multiple_orders_and_get_reports: auto-wait_for_multiple_orders_and_get_reports:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -313,33 +188,6 @@ reaction_station.bioyond:
title: wait_for_multiple_orders_and_get_reports参数 title: wait_for_multiple_orders_and_get_reports参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
auto-workflow_sequence:
feedback: {}
goal: {}
goal_default:
value: null
handles: {}
placeholder_keys: {}
result: {}
schema:
description: ''
properties:
feedback: {}
goal:
properties:
value:
items:
type: string
type: array
required:
- value
type: object
result: {}
required:
- goal
title: workflow_sequence参数
type: object
type: UniLabJsonCommand
auto-workflow_step_query: auto-workflow_step_query:
feedback: {} feedback: {}
goal: {} goal: {}
@@ -365,36 +213,6 @@ reaction_station.bioyond:
title: workflow_step_query参数 title: workflow_step_query参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
clean_all_server_workflows:
feedback: {}
goal: {}
goal_default: {}
handles: {}
result:
code: code
message: message
schema:
description: 清空服务端所有非核心工作流 (保留核心流程)
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result:
properties:
code:
description: 操作结果代码(1表示成功)
type: integer
message:
description: 结果描述
type: string
type: object
required:
- goal
title: clean_all_server_workflows参数
type: object
type: UniLabJsonCommand
drip_back: drip_back:
feedback: {} feedback: {}
goal: goal:
@@ -429,19 +247,13 @@ reaction_station.bioyond:
description: 观察时间(分钟) description: 观察时间(分钟)
type: string type: string
titration_type: titration_type:
description: 是否滴定(NO=否, YES=是) description: 是否滴定(1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
torque_variation: torque_variation:
description: 是否观察 (NO=否, YES=是) description: 是否观察 (1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
volume: volume:
description: 分液公式(mL) description: 分液公式(μL)
type: string type: string
required: required:
- volume - volume
@@ -541,19 +353,13 @@ reaction_station.bioyond:
description: 观察时间(分钟) description: 观察时间(分钟)
type: string type: string
titration_type: titration_type:
description: 是否滴定(NO=否, YES=是) description: 是否滴定(1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
torque_variation: torque_variation:
description: 是否观察 (NO=否, YES=是) description: 是否观察 (1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
volume: volume:
description: 分液公式(mL) description: 分液公式(μL)
type: string type: string
required: required:
- volume - volume
@@ -597,7 +403,7 @@ reaction_station.bioyond:
label: Solvents Data From Calculation Node label: Solvents Data From Calculation Node
result: {} result: {}
schema: schema:
description: 液体投料-溶剂。可以直接提供volume(mL),或通过solvents对象自动从additional_solvent(mL)计算volume。 description: 液体投料-溶剂。可以直接提供volume(μL),或通过solvents对象自动从additional_solvent(mL)计算volume。
properties: properties:
feedback: {} feedback: {}
goal: goal:
@@ -617,21 +423,15 @@ reaction_station.bioyond:
description: 观察时间(分钟),默认360 description: 观察时间(分钟),默认360
type: string type: string
titration_type: titration_type:
default: 'NO' default: '1'
description: 是否滴定(NO=否, YES=是),默认NO description: 是否滴定(1=否, 2=是),默认1
enum:
- 'NO'
- 'YES'
type: string type: string
torque_variation: torque_variation:
default: 'YES' default: '2'
description: 是否观察 (NO=否, YES=是),默认YES description: 是否观察 (1=否, 2=是),默认2
enum:
- 'NO'
- 'YES'
type: string type: string
volume: volume:
description: 分液量(mL)。可直接提供,或通过solvents参数自动计算 description: 分液量(μL)。可直接提供,或通过solvents参数自动计算
type: string type: string
required: required:
- assign_material_name - assign_material_name
@@ -704,21 +504,15 @@ reaction_station.bioyond:
description: 观察时间(分钟),默认90 description: 观察时间(分钟),默认90
type: string type: string
titration_type: titration_type:
default: 'YES' default: '2'
description: 是否滴定(NO=否, YES=是),默认YES description: 是否滴定(1=否, 2=是),默认2
enum:
- 'NO'
- 'YES'
type: string type: string
torque_variation: torque_variation:
default: 'YES' default: '2'
description: 是否观察 (NO=否, YES=是),默认YES description: 是否观察 (1=否, 2=是),默认2
enum:
- 'NO'
- 'YES'
type: string type: string
volume_formula: volume_formula:
description: 分液公式(mL)。可直接提供固定公式,或留空由系统根据x_value、feeding_order_data、extracted_actuals自动生成 description: 分液公式(μL)。可直接提供固定公式,或留空由系统根据x_value、feeding_order_data、extracted_actuals自动生成
type: string type: string
x_value: x_value:
description: 公式中的x值,手工输入,格式为"{{1-2-3}}"(包含双花括号)。用于自动公式计算 description: 公式中的x值,手工输入,格式为"{{1-2-3}}"(包含双花括号)。用于自动公式计算
@@ -766,19 +560,13 @@ reaction_station.bioyond:
description: 观察时间(分钟) description: 观察时间(分钟)
type: string type: string
titration_type: titration_type:
description: 是否滴定(NO=否, YES=是) description: 是否滴定(1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
torque_variation: torque_variation:
description: 是否观察 (NO=否, YES=是) description: 是否观察 (1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
volume_formula: volume_formula:
description: 分液公式(mL) description: 分液公式(μL)
type: string type: string
required: required:
- volume_formula - volume_formula
@@ -892,35 +680,6 @@ reaction_station.bioyond:
title: reactor_taken_out参数 title: reactor_taken_out参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
scheduler_start:
feedback: {}
goal: {}
goal_default: {}
handles: {}
result:
return_info: return_info
schema:
description: 启动调度器 - 启动Bioyond工作站的任务调度器开始执行队列中的任务
properties:
feedback: {}
goal:
properties: {}
required: []
type: object
result:
properties:
return_info:
description: 调度器启动结果成功返回1失败返回0
type: integer
required:
- return_info
title: scheduler_start结果
type: object
required:
- goal
title: scheduler_start参数
type: object
type: UniLabJsonCommand
solid_feeding_vials: solid_feeding_vials:
feedback: {} feedback: {}
goal: goal:
@@ -947,11 +706,7 @@ reaction_station.bioyond:
description: 物料名称(用于获取试剂瓶位ID) description: 物料名称(用于获取试剂瓶位ID)
type: string type: string
material_id: material_id:
description: 粉末类型IDSalt=盐21分钟Flour=面粉27分钟BTDA=BTDA38分钟 description: 粉末类型ID1=盐21分钟2=面粉27分钟3=BTDA38分钟
enum:
- Salt
- Flour
- BTDA
type: string type: string
temperature: temperature:
description: 温度设定(°C) description: 温度设定(°C)
@@ -960,10 +715,7 @@ reaction_station.bioyond:
description: 观察时间(分钟) description: 观察时间(分钟)
type: string type: string
torque_variation: torque_variation:
description: 是否观察 (NO=否, YES=是) description: 是否观察 (1=否, 2=是)
enum:
- 'NO'
- 'YES'
type: string type: string
required: required:
- assign_material_name - assign_material_name
@@ -978,10 +730,10 @@ reaction_station.bioyond:
title: solid_feeding_vials参数 title: solid_feeding_vials参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
module: unilabos.devices.workstation.bioyond_studio.reaction_station.reaction_station:BioyondReactionStation module: unilabos.devices.workstation.bioyond_studio.reaction_station:BioyondReactionStation
protocol_type: [] protocol_type: []
status_types: status_types:
workflow_sequence: str workflow_sequence: String
type: python type: python
config_info: [] config_info: []
description: Bioyond反应站 description: Bioyond反应站
@@ -1001,7 +753,9 @@ reaction_station.bioyond:
data: data:
properties: properties:
workflow_sequence: workflow_sequence:
type: string items:
type: string
type: array
required: required:
- workflow_sequence - workflow_sequence
type: object type: object
@@ -1037,7 +791,7 @@ reaction_station.reactor:
title: update_metrics参数 title: update_metrics参数
type: object type: object
type: UniLabJsonCommand type: UniLabJsonCommand
module: unilabos.devices.workstation.bioyond_studio.reaction_station.reaction_station:BioyondReactor module: unilabos.devices.workstation.bioyond_studio.reaction_station:BioyondReactor
status_types: {} status_types: {}
type: python type: python
config_info: [] config_info: []

View File

@@ -5792,481 +5792,3 @@ virtual_vacuum_pump:
- status - status
type: object type: object
version: 1.0.0 version: 1.0.0
virtual_workbench:
category:
- virtual_device
class:
action_value_mappings:
auto-move_to_heating_station:
feedback: {}
goal: {}
goal_default:
material_number: null
handles:
input:
- data_key: material_number
data_source: handle
data_type: workbench_material
handler_key: material_input
label: 物料编号
output:
- data_key: station_id
data_source: executor
data_type: workbench_station
handler_key: heating_station_output
label: 加热台ID
- data_key: material_number
data_source: executor
data_type: workbench_material
handler_key: material_number_output
label: 物料编号
placeholder_keys: {}
result: {}
schema:
description: 将物料从An位置移动到空闲加热台返回分配的加热台ID
properties:
feedback: {}
goal:
properties:
material_number:
description: 物料编号1-5物料ID自动生成为A{n}
type: integer
required:
- material_number
type: object
result:
$defs:
LabSample:
properties:
extra:
additionalProperties: true
title: Extra
type: object
oss_path:
title: Oss Path
type: string
sample_uuid:
title: Sample Uuid
type: string
required:
- sample_uuid
- oss_path
- extra
title: LabSample
type: object
description: move_to_heating_station 返回类型
properties:
material_id:
title: Material Id
type: string
material_number:
title: Material Number
type: integer
message:
title: Message
type: string
station_id:
description: 分配的加热台ID
title: Station Id
type: integer
success:
title: Success
type: boolean
unilabos_samples:
items:
$ref: '#/$defs/LabSample'
title: Unilabos Samples
type: array
required:
- success
- station_id
- material_id
- material_number
- message
- unilabos_samples
title: MoveToHeatingStationResult
type: object
required:
- goal
title: move_to_heating_station参数
type: object
type: UniLabJsonCommand
auto-move_to_output:
feedback: {}
goal: {}
goal_default:
material_number: null
station_id: null
handles:
input:
- data_key: station_id
data_source: handle
data_type: workbench_station
handler_key: output_station_input
label: 加热台ID
- data_key: material_number
data_source: handle
data_type: workbench_material
handler_key: output_material_input
label: 物料编号
placeholder_keys: {}
result: {}
schema:
description: 将物料从加热台移动到输出位置Cn
properties:
feedback: {}
goal:
properties:
material_number:
description: 物料编号用于确定输出位置Cn
type: integer
station_id:
description: 加热台ID1-3从上一节点传入
type: integer
required:
- station_id
- material_number
type: object
result:
$defs:
LabSample:
properties:
extra:
additionalProperties: true
title: Extra
type: object
oss_path:
title: Oss Path
type: string
sample_uuid:
title: Sample Uuid
type: string
required:
- sample_uuid
- oss_path
- extra
title: LabSample
type: object
description: move_to_output 返回类型
properties:
material_id:
title: Material Id
type: string
station_id:
title: Station Id
type: integer
success:
title: Success
type: boolean
unilabos_samples:
items:
$ref: '#/$defs/LabSample'
title: Unilabos Samples
type: array
required:
- success
- station_id
- material_id
- unilabos_samples
title: MoveToOutputResult
type: object
required:
- goal
title: move_to_output参数
type: object
type: UniLabJsonCommand
auto-prepare_materials:
feedback: {}
goal: {}
goal_default:
count: 5
handles:
output:
- data_key: material_1
data_source: executor
data_type: workbench_material
handler_key: channel_1
label: 实验1
- data_key: material_2
data_source: executor
data_type: workbench_material
handler_key: channel_2
label: 实验2
- data_key: material_3
data_source: executor
data_type: workbench_material
handler_key: channel_3
label: 实验3
- data_key: material_4
data_source: executor
data_type: workbench_material
handler_key: channel_4
label: 实验4
- data_key: material_5
data_source: executor
data_type: workbench_material
handler_key: channel_5
label: 实验5
placeholder_keys: {}
result: {}
schema:
description: 批量准备物料 - 虚拟起始节点生成A1-A5物料输出5个handle供后续节点使用
properties:
feedback: {}
goal:
properties:
count:
default: 5
description: 待生成的物料数量默认5 (生成 A1-A5)
type: integer
required: []
type: object
result:
$defs:
LabSample:
properties:
extra:
additionalProperties: true
title: Extra
type: object
oss_path:
title: Oss Path
type: string
sample_uuid:
title: Sample Uuid
type: string
required:
- sample_uuid
- oss_path
- extra
title: LabSample
type: object
description: prepare_materials 返回类型 - 批量准备物料
properties:
count:
title: Count
type: integer
material_1:
title: Material 1
type: integer
material_2:
title: Material 2
type: integer
material_3:
title: Material 3
type: integer
material_4:
title: Material 4
type: integer
material_5:
title: Material 5
type: integer
message:
title: Message
type: string
success:
title: Success
type: boolean
unilabos_samples:
items:
$ref: '#/$defs/LabSample'
title: Unilabos Samples
type: array
required:
- success
- count
- material_1
- material_2
- material_3
- material_4
- material_5
- message
- unilabos_samples
title: PrepareMaterialsResult
type: object
required:
- goal
title: prepare_materials参数
type: object
type: UniLabJsonCommand
auto-start_heating:
feedback: {}
goal: {}
goal_default:
material_number: null
station_id: null
handles:
input:
- data_key: station_id
data_source: handle
data_type: workbench_station
handler_key: station_id_input
label: 加热台ID
- data_key: material_number
data_source: handle
data_type: workbench_material
handler_key: material_number_input
label: 物料编号
output:
- data_key: station_id
data_source: executor
data_type: workbench_station
handler_key: heating_done_station
label: 加热完成-加热台ID
- data_key: material_number
data_source: executor
data_type: workbench_material
handler_key: heating_done_material
label: 加热完成-物料编号
placeholder_keys: {}
result: {}
schema:
description: 启动指定加热台的加热程序
properties:
feedback: {}
goal:
properties:
material_number:
description: 物料编号,从上一节点传入
type: integer
station_id:
description: 加热台ID1-3从上一节点传入
type: integer
required:
- station_id
- material_number
type: object
result:
$defs:
LabSample:
properties:
extra:
additionalProperties: true
title: Extra
type: object
oss_path:
title: Oss Path
type: string
sample_uuid:
title: Sample Uuid
type: string
required:
- sample_uuid
- oss_path
- extra
title: LabSample
type: object
description: start_heating 返回类型
properties:
material_id:
title: Material Id
type: string
material_number:
title: Material Number
type: integer
message:
title: Message
type: string
station_id:
title: Station Id
type: integer
success:
title: Success
type: boolean
unilabos_samples:
items:
$ref: '#/$defs/LabSample'
title: Unilabos Samples
type: array
required:
- success
- station_id
- material_id
- material_number
- message
- unilabos_samples
title: StartHeatingResult
type: object
required:
- goal
title: start_heating参数
type: object
type: UniLabJsonCommand
module: unilabos.devices.virtual.workbench:VirtualWorkbench
status_types:
active_tasks_count: int
arm_current_task: str
arm_state: str
heating_station_1_material: str
heating_station_1_progress: float
heating_station_1_state: str
heating_station_2_material: str
heating_station_2_progress: float
heating_station_2_state: str
heating_station_3_material: str
heating_station_3_progress: float
heating_station_3_state: str
message: str
status: str
type: python
config_info: []
description: Virtual Workbench with 1 robotic arm and 3 heating stations for concurrent
material processing
handles: []
icon: ''
init_param_schema:
config:
properties:
config:
type: string
device_id:
type: string
required: []
type: object
data:
properties:
active_tasks_count:
type: integer
arm_current_task:
type: string
arm_state:
type: string
heating_station_1_material:
type: string
heating_station_1_progress:
type: number
heating_station_1_state:
type: string
heating_station_2_material:
type: string
heating_station_2_progress:
type: number
heating_station_2_state:
type: string
heating_station_3_material:
type: string
heating_station_3_progress:
type: number
heating_station_3_state:
type: string
message:
type: string
status:
type: string
required:
- status
- arm_state
- arm_current_task
- heating_station_1_state
- heating_station_1_material
- heating_station_1_progress
- heating_station_2_state
- heating_station_2_material
- heating_station_2_progress
- heating_station_3_state
- heating_station_3_material
- heating_station_3_progress
- active_tasks_count
- message
type: object
version: 1.0.0

View File

@@ -4,8 +4,6 @@ import os
import sys import sys
import inspect import inspect
import importlib import importlib
import threading
from concurrent.futures import ThreadPoolExecutor, as_completed
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Union, Tuple from typing import Any, Dict, List, Union, Tuple
@@ -62,7 +60,6 @@ class Registry:
self.device_module_to_registry = {} self.device_module_to_registry = {}
self.resource_type_registry = {} self.resource_type_registry = {}
self._setup_called = False # 跟踪setup是否已调用 self._setup_called = False # 跟踪setup是否已调用
self._registry_lock = threading.Lock() # 多线程加载时的锁
# 其他状态变量 # 其他状态变量
# self.is_host_mode = False # 移至BasicConfig中 # self.is_host_mode = False # 移至BasicConfig中
@@ -74,20 +71,6 @@ class Registry:
from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type
# 获取 HostNode 类的增强信息,用于自动生成 action schema
host_node_enhanced_info = get_enhanced_class_info(
"unilabos.ros.nodes.presets.host_node:HostNode", use_dynamic=True
)
# 为 test_latency 生成 schema保留原有 description
test_latency_method_info = host_node_enhanced_info.get("action_methods", {}).get("test_latency", {})
test_latency_schema = self._generate_unilab_json_command_schema(
test_latency_method_info.get("args", []),
"test_latency",
test_latency_method_info.get("return_annotation"),
)
test_latency_schema["description"] = "用于测试延迟的动作,返回延迟时间和时间差。"
self.device_type_registry.update( self.device_type_registry.update(
{ {
"host_node": { "host_node": {
@@ -141,47 +124,28 @@ class Registry:
"output": [ "output": [
{ {
"handler_key": "labware", "handler_key": "labware",
"data_type": "resource",
"label": "Labware", "label": "Labware",
"data_source": "executor",
"data_key": "created_resource_tree.@flatten",
},
{
"handler_key": "liquid_slots",
"data_type": "resource", "data_type": "resource",
"label": "LiquidSlots", "data_source": "handle",
"data_source": "executor", "data_key": "liquid",
"data_key": "liquid_input_resource_tree.@flatten", }
},
{
"handler_key": "materials",
"data_type": "resource",
"label": "AllMaterials",
"data_source": "executor",
"data_key": "[created_resource_tree,liquid_input_resource_tree].@flatten.@flatten",
},
] ]
}, },
"placeholder_keys": { "placeholder_keys": {
"res_id": "unilabos_resources", # 将当前实验室的全部物料id作为下拉框可选择 "res_id": "unilabos_resources", # 将当前实验室的全部物料id作为下拉框可选择
"device_id": "unilabos_devices", # 将当前实验室的全部设备id作为下拉框可选择 "device_id": "unilabos_devices", # 将当前实验室的全部设备id作为下拉框可选择
"parent": "unilabos_nodes", # 将当前实验室的设备/物料作为下拉框可选择 "parent": "unilabos_nodes", # 将当前实验室的设备/物料作为下拉框可选择
"class_name": "unilabos_class",
}, },
}, },
"test_latency": { "test_latency": {
"type": ( "type": self.EmptyIn,
"UniLabJsonCommandAsync"
if test_latency_method_info.get("is_async", False)
else "UniLabJsonCommand"
),
"goal": {}, "goal": {},
"feedback": {}, "feedback": {},
"result": {}, "result": {},
"schema": test_latency_schema, "schema": ros_action_to_json_schema(
"goal_default": { self.EmptyIn, "用于测试延迟的动作,返回延迟时间和时间差。"
arg["name"]: arg["default"] for arg in test_latency_method_info.get("args", []) ),
}, "goal_default": {},
"handles": {}, "handles": {},
}, },
"auto-test_resource": { "auto-test_resource": {
@@ -222,17 +186,7 @@ class Registry:
"resources": "unilabos_resources", "resources": "unilabos_resources",
}, },
"goal_default": {}, "goal_default": {},
"handles": { "handles": {},
"input": [
{
"handler_key": "input_resources",
"data_type": "resource",
"label": "InputResources",
"data_source": "handle",
"data_key": "resources", # 不为空
},
]
},
}, },
}, },
}, },
@@ -264,115 +218,67 @@ class Registry:
# 标记setup已被调用 # 标记setup已被调用
self._setup_called = True self._setup_called = True
def _load_single_resource_file(
self, file: Path, complete_registry: bool, upload_registry: bool
) -> Tuple[Dict[str, Any], Dict[str, Any], bool]:
"""
加载单个资源文件 (线程安全)
Returns:
(data, complete_data, is_valid): 资源数据, 完整数据, 是否有效
"""
try:
with open(file, encoding="utf-8", mode="r") as f:
data = yaml.safe_load(io.StringIO(f.read()))
except Exception as e:
logger.warning(f"[UniLab Registry] 读取资源文件失败: {file}, 错误: {e}")
return {}, {}, False
if not data:
return {}, {}, False
complete_data = {}
for resource_id, resource_info in data.items():
if "version" not in resource_info:
resource_info["version"] = "1.0.0"
if "category" not in resource_info:
resource_info["category"] = [file.stem]
elif file.stem not in resource_info["category"]:
resource_info["category"].append(file.stem)
elif not isinstance(resource_info.get("category"), list):
resource_info["category"] = [resource_info["category"]]
if "config_info" not in resource_info:
resource_info["config_info"] = []
if "icon" not in resource_info:
resource_info["icon"] = ""
if "handles" not in resource_info:
resource_info["handles"] = []
if "init_param_schema" not in resource_info:
resource_info["init_param_schema"] = {}
if "config_info" in resource_info:
del resource_info["config_info"]
if "file_path" in resource_info:
del resource_info["file_path"]
complete_data[resource_id] = copy.deepcopy(dict(sorted(resource_info.items())))
if upload_registry:
class_info = resource_info.get("class", {})
if len(class_info) and "module" in class_info:
if class_info.get("type") == "pylabrobot":
res_class = get_class(class_info["module"])
if callable(res_class) and not isinstance(res_class, type):
res_instance = res_class(res_class.__name__)
res_ulr = tree_to_list([resource_plr_to_ulab(res_instance)])
resource_info["config_info"] = res_ulr
resource_info["registry_type"] = "resource"
resource_info["file_path"] = str(file.absolute()).replace("\\", "/")
complete_data = dict(sorted(complete_data.items()))
complete_data = copy.deepcopy(complete_data)
if complete_registry:
try:
with open(file, "w", encoding="utf-8") as f:
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
except Exception as e:
logger.warning(f"[UniLab Registry] 写入资源文件失败: {file}, 错误: {e}")
return data, complete_data, True
def load_resource_types(self, path: os.PathLike, complete_registry: bool, upload_registry: bool): def load_resource_types(self, path: os.PathLike, complete_registry: bool, upload_registry: bool):
abs_path = Path(path).absolute() abs_path = Path(path).absolute()
resource_path = abs_path / "resources" resource_path = abs_path / "resources"
files = list(resource_path.glob("*/*.yaml")) files = list(resource_path.glob("*/*.yaml"))
logger.debug(f"[UniLab Registry] resources: {resource_path.exists()}, total: {len(files)}") logger.trace(f"[UniLab Registry] load resources? {resource_path.exists()}, total: {len(files)}")
if not files:
return
# 使用线程池并行加载
max_workers = min(8, len(files))
results = []
with ThreadPoolExecutor(max_workers=max_workers) as executor:
future_to_file = {
executor.submit(self._load_single_resource_file, file, complete_registry, upload_registry): file
for file in files
}
for future in as_completed(future_to_file):
file = future_to_file[future]
try:
data, complete_data, is_valid = future.result()
if is_valid:
results.append((file, data))
except Exception as e:
logger.warning(f"[UniLab Registry] 处理资源文件异常: {file}, 错误: {e}")
# 线程安全地更新注册表
current_resource_number = len(self.resource_type_registry) + 1 current_resource_number = len(self.resource_type_registry) + 1
with self._registry_lock: for i, file in enumerate(files):
for i, (file, data) in enumerate(results): with open(file, encoding="utf-8", mode="r") as f:
data = yaml.safe_load(io.StringIO(f.read()))
complete_data = {}
if data:
# 为每个资源添加文件路径信息
for resource_id, resource_info in data.items():
if "version" not in resource_info:
resource_info["version"] = "1.0.0"
if "category" not in resource_info:
resource_info["category"] = [file.stem]
elif file.stem not in resource_info["category"]:
resource_info["category"].append(file.stem)
elif not isinstance(resource_info.get("category"), list):
resource_info["category"] = [resource_info["category"]]
if "config_info" not in resource_info:
resource_info["config_info"] = []
if "icon" not in resource_info:
resource_info["icon"] = ""
if "handles" not in resource_info:
resource_info["handles"] = []
if "init_param_schema" not in resource_info:
resource_info["init_param_schema"] = {}
if "config_info" in resource_info:
del resource_info["config_info"]
if "file_path" in resource_info:
del resource_info["file_path"]
complete_data[resource_id] = copy.deepcopy(dict(sorted(resource_info.items())))
if upload_registry:
class_info = resource_info.get("class", {})
if len(class_info) and "module" in class_info:
if class_info.get("type") == "pylabrobot":
res_class = get_class(class_info["module"])
if callable(res_class) and not isinstance(
res_class, type
): # 有的是类,有的是函数,这里暂时只登记函数类的
res_instance = res_class(res_class.__name__)
res_ulr = tree_to_list([resource_plr_to_ulab(res_instance)])
resource_info["config_info"] = res_ulr
resource_info["registry_type"] = "resource"
resource_info["file_path"] = str(file.absolute()).replace("\\", "/")
complete_data = dict(sorted(complete_data.items()))
complete_data = copy.deepcopy(complete_data)
if complete_registry:
with open(file, "w", encoding="utf-8") as f:
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
self.resource_type_registry.update(data) self.resource_type_registry.update(data)
logger.trace( logger.trace( # type: ignore
f"[UniLab Registry] Resource-{current_resource_number} File-{i+1}/{len(results)} " f"[UniLab Registry] Resource-{current_resource_number} File-{i+1}/{len(files)} "
+ f"Add {list(data.keys())}" + f"Add {list(data.keys())}"
) )
current_resource_number += 1 current_resource_number += 1
else:
# 记录无效文件 logger.debug(f"[UniLab Registry] Res File-{i+1}/{len(files)} Not Valid YAML File: {file.absolute()}")
valid_files = {r[0] for r in results}
for file in files:
if file not in valid_files:
logger.debug(f"[UniLab Registry] Res File Not Valid YAML File: {file.absolute()}")
def _extract_class_docstrings(self, module_string: str) -> Dict[str, str]: def _extract_class_docstrings(self, module_string: str) -> Dict[str, str]:
""" """
@@ -549,11 +455,7 @@ class Registry:
return status_schema return status_schema
def _generate_unilab_json_command_schema( def _generate_unilab_json_command_schema(
self, self, method_args: List[Dict[str, Any]], method_name: str, return_annotation: Any = None
method_args: List[Dict[str, Any]],
method_name: str,
return_annotation: Any = None,
previous_schema: Dict[str, Any] | None = None,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
""" """
根据UniLabJsonCommand方法信息生成JSON Schema暂不支持嵌套类型 根据UniLabJsonCommand方法信息生成JSON Schema暂不支持嵌套类型
@@ -562,7 +464,6 @@ class Registry:
method_args: 方法信息字典包含args等 method_args: 方法信息字典包含args等
method_name: 方法名称 method_name: 方法名称
return_annotation: 返回类型注解用于生成result schema仅支持TypedDict return_annotation: 返回类型注解用于生成result schema仅支持TypedDict
previous_schema: 之前的 schema用于保留 goal/feedback/result 下一级字段的 description
Returns: Returns:
JSON Schema格式的参数schema JSON Schema格式的参数schema
@@ -596,7 +497,7 @@ class Registry:
if return_annotation is not None and self._is_typed_dict(return_annotation): if return_annotation is not None and self._is_typed_dict(return_annotation):
result_schema = self._generate_typed_dict_result_schema(return_annotation) result_schema = self._generate_typed_dict_result_schema(return_annotation)
final_schema = { return {
"title": f"{method_name}参数", "title": f"{method_name}参数",
"description": f"", "description": f"",
"type": "object", "type": "object",
@@ -604,40 +505,6 @@ class Registry:
"required": ["goal"], "required": ["goal"],
} }
# 保留之前 schema 中 goal/feedback/result 下一级字段的 description
if previous_schema:
self._preserve_field_descriptions(final_schema, previous_schema)
return final_schema
def _preserve_field_descriptions(self, new_schema: Dict[str, Any], previous_schema: Dict[str, Any]) -> None:
"""
保留之前 schema 中 goal/feedback/result 下一级字段的 description 和 title
Args:
new_schema: 新生成的 schema会被修改
previous_schema: 之前的 schema
"""
for section in ["goal", "feedback", "result"]:
new_section = new_schema.get("properties", {}).get(section, {})
prev_section = previous_schema.get("properties", {}).get(section, {})
if not new_section or not prev_section:
continue
new_props = new_section.get("properties", {})
prev_props = prev_section.get("properties", {})
for field_name, field_schema in new_props.items():
if field_name in prev_props:
prev_field = prev_props[field_name]
# 保留字段的 description
if "description" in prev_field and prev_field["description"]:
field_schema["description"] = prev_field["description"]
# 保留字段的 title用户自定义的中文名
if "title" in prev_field and prev_field["title"]:
field_schema["title"] = prev_field["title"]
def _is_typed_dict(self, annotation: Any) -> bool: def _is_typed_dict(self, annotation: Any) -> bool:
""" """
检查类型注解是否是TypedDict 检查类型注解是否是TypedDict
@@ -724,244 +591,209 @@ class Registry:
"handles": {}, "handles": {},
} }
def _load_single_device_file(
self, file: Path, complete_registry: bool, get_yaml_from_goal_type
) -> Tuple[Dict[str, Any], Dict[str, Any], bool, List[str]]:
"""
加载单个设备文件 (线程安全)
Returns:
(data, complete_data, is_valid, device_ids): 设备数据, 完整数据, 是否有效, 设备ID列表
"""
try:
with open(file, encoding="utf-8", mode="r") as f:
data = yaml.safe_load(io.StringIO(f.read()))
except Exception as e:
logger.warning(f"[UniLab Registry] 读取设备文件失败: {file}, 错误: {e}")
return {}, {}, False, []
if not data:
return {}, {}, False, []
complete_data = {}
action_str_type_mapping = {
"UniLabJsonCommand": "UniLabJsonCommand",
"UniLabJsonCommandAsync": "UniLabJsonCommandAsync",
}
status_str_type_mapping = {}
device_ids = []
for device_id, device_config in data.items():
if "version" not in device_config:
device_config["version"] = "1.0.0"
if "category" not in device_config:
device_config["category"] = [file.stem]
elif file.stem not in device_config["category"]:
device_config["category"].append(file.stem)
if "config_info" not in device_config:
device_config["config_info"] = []
if "description" not in device_config:
device_config["description"] = ""
if "icon" not in device_config:
device_config["icon"] = ""
if "handles" not in device_config:
device_config["handles"] = []
if "init_param_schema" not in device_config:
device_config["init_param_schema"] = {}
if "class" in device_config:
if "status_types" not in device_config["class"] or device_config["class"]["status_types"] is None:
device_config["class"]["status_types"] = {}
if (
"action_value_mappings" not in device_config["class"]
or device_config["class"]["action_value_mappings"] is None
):
device_config["class"]["action_value_mappings"] = {}
enhanced_info = {}
if complete_registry:
device_config["class"]["status_types"].clear()
enhanced_info = get_enhanced_class_info(device_config["class"]["module"], use_dynamic=True)
if not enhanced_info.get("dynamic_import_success", False):
continue
device_config["class"]["status_types"].update(
{k: v["return_type"] for k, v in enhanced_info["status_methods"].items()}
)
for status_name, status_type in device_config["class"]["status_types"].items():
if isinstance(status_type, tuple) or status_type in ["Any", "None", "Unknown"]:
status_type = "String"
device_config["class"]["status_types"][status_name] = status_type
try:
target_type = self._replace_type_with_class(status_type, device_id, f"状态 {status_name}")
except ROSMsgNotFound:
continue
if target_type in [dict, list]:
target_type = String
status_str_type_mapping[status_type] = target_type
device_config["class"]["status_types"] = dict(sorted(device_config["class"]["status_types"].items()))
if complete_registry:
old_action_configs = {}
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
old_action_configs[action_name] = action_config
device_config["class"]["action_value_mappings"] = {
k: v
for k, v in device_config["class"]["action_value_mappings"].items()
if not k.startswith("auto-")
}
device_config["class"]["action_value_mappings"].update(
{
f"auto-{k}": {
"type": "UniLabJsonCommandAsync" if v["is_async"] else "UniLabJsonCommand",
"goal": {},
"feedback": {},
"result": {},
"schema": self._generate_unilab_json_command_schema(
v["args"],
k,
v.get("return_annotation"),
old_action_configs.get(f"auto-{k}", {}).get("schema"),
),
"goal_default": {i["name"]: i["default"] for i in v["args"]},
"handles": old_action_configs.get(f"auto-{k}", {}).get("handles", []),
"placeholder_keys": {
i["name"]: (
"unilabos_resources"
if i["type"] == "unilabos.registry.placeholder_type:ResourceSlot"
or i["type"] == ("list", "unilabos.registry.placeholder_type:ResourceSlot")
else "unilabos_devices"
)
for i in v["args"]
if i.get("type", "")
in [
"unilabos.registry.placeholder_type:ResourceSlot",
"unilabos.registry.placeholder_type:DeviceSlot",
("list", "unilabos.registry.placeholder_type:ResourceSlot"),
("list", "unilabos.registry.placeholder_type:DeviceSlot"),
]
},
}
for k, v in enhanced_info["action_methods"].items()
if k not in device_config["class"]["action_value_mappings"]
}
)
for action_name, old_config in old_action_configs.items():
if action_name in device_config["class"]["action_value_mappings"]:
old_schema = old_config.get("schema", {})
if "description" in old_schema and old_schema["description"]:
device_config["class"]["action_value_mappings"][action_name]["schema"][
"description"
] = old_schema["description"]
device_config["init_param_schema"] = {}
device_config["init_param_schema"]["config"] = self._generate_unilab_json_command_schema(
enhanced_info["init_params"], "__init__"
)["properties"]["goal"]
device_config["init_param_schema"]["data"] = self._generate_status_types_schema(
enhanced_info["status_methods"]
)
device_config.pop("schema", None)
device_config["class"]["action_value_mappings"] = dict(
sorted(device_config["class"]["action_value_mappings"].items())
)
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
if "handles" not in action_config:
action_config["handles"] = {}
elif isinstance(action_config["handles"], list):
if len(action_config["handles"]):
logger.error(f"设备{device_id} {action_name} 的handles配置错误应该是字典类型")
continue
else:
action_config["handles"] = {}
if "type" in action_config:
action_type_str: str = action_config["type"]
if not action_type_str.startswith("UniLabJsonCommand"):
try:
target_type = self._replace_type_with_class(
action_type_str, device_id, f"动作 {action_name}"
)
except ROSMsgNotFound:
continue
action_str_type_mapping[action_type_str] = target_type
if target_type is not None:
action_config["goal_default"] = yaml.safe_load(
io.StringIO(get_yaml_from_goal_type(target_type.Goal))
)
action_config["schema"] = ros_action_to_json_schema(target_type)
else:
logger.warning(
f"[UniLab Registry] 设备 {device_id} 的动作 {action_name} 类型为空,跳过替换"
)
complete_data[device_id] = copy.deepcopy(dict(sorted(device_config.items())))
for status_name, status_type in device_config["class"]["status_types"].items():
device_config["class"]["status_types"][status_name] = status_str_type_mapping[status_type]
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
if action_config["type"] not in action_str_type_mapping:
continue
action_config["type"] = action_str_type_mapping[action_config["type"]]
self._add_builtin_actions(device_config, device_id)
device_config["file_path"] = str(file.absolute()).replace("\\", "/")
device_config["registry_type"] = "device"
device_ids.append(device_id)
complete_data = dict(sorted(complete_data.items()))
complete_data = copy.deepcopy(complete_data)
try:
with open(file, "w", encoding="utf-8") as f:
yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
except Exception as e:
logger.warning(f"[UniLab Registry] 写入设备文件失败: {file}, 错误: {e}")
return data, complete_data, True, device_ids
def load_device_types(self, path: os.PathLike, complete_registry: bool): def load_device_types(self, path: os.PathLike, complete_registry: bool):
# return
abs_path = Path(path).absolute() abs_path = Path(path).absolute()
devices_path = abs_path / "devices" devices_path = abs_path / "devices"
device_comms_path = abs_path / "device_comms" device_comms_path = abs_path / "device_comms"
files = list(devices_path.glob("*.yaml")) + list(device_comms_path.glob("*.yaml")) files = list(devices_path.glob("*.yaml")) + list(device_comms_path.glob("*.yaml"))
logger.trace( logger.trace( # type: ignore
f"[UniLab Registry] devices: {devices_path.exists()}, device_comms: {device_comms_path.exists()}, " f"[UniLab Registry] devices: {devices_path.exists()}, device_comms: {device_comms_path.exists()}, "
+ f"total: {len(files)}" + f"total: {len(files)}"
) )
current_device_number = len(self.device_type_registry) + 1
if not files:
return
from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type from unilabos.app.web.utils.action_utils import get_yaml_from_goal_type
# 使用线程池并行加载 for i, file in enumerate(files):
max_workers = min(8, len(files)) with open(file, encoding="utf-8", mode="r") as f:
results = [] data = yaml.safe_load(io.StringIO(f.read()))
complete_data = {}
with ThreadPoolExecutor(max_workers=max_workers) as executor: action_str_type_mapping = {
future_to_file = { "UniLabJsonCommand": "UniLabJsonCommand",
executor.submit(self._load_single_device_file, file, complete_registry, get_yaml_from_goal_type): file "UniLabJsonCommandAsync": "UniLabJsonCommandAsync",
for file in files
} }
for future in as_completed(future_to_file): status_str_type_mapping = {}
file = future_to_file[future] if data:
try: # 在添加到注册表前处理类型替换
data, complete_data, is_valid, device_ids = future.result() for device_id, device_config in data.items():
if is_valid: # 添加文件路径信息 - 使用规范化的完整文件路径
results.append((file, data, device_ids)) if "version" not in device_config:
except Exception as e: device_config["version"] = "1.0.0"
logger.warning(f"[UniLab Registry] 处理设备文件异常: {file}, 错误: {e}") if "category" not in device_config:
device_config["category"] = [file.stem]
elif file.stem not in device_config["category"]:
device_config["category"].append(file.stem)
if "config_info" not in device_config:
device_config["config_info"] = []
if "description" not in device_config:
device_config["description"] = ""
if "icon" not in device_config:
device_config["icon"] = ""
if "handles" not in device_config:
device_config["handles"] = []
if "init_param_schema" not in device_config:
device_config["init_param_schema"] = {}
if "class" in device_config:
if (
"status_types" not in device_config["class"]
or device_config["class"]["status_types"] is None
):
device_config["class"]["status_types"] = {}
if (
"action_value_mappings" not in device_config["class"]
or device_config["class"]["action_value_mappings"] is None
):
device_config["class"]["action_value_mappings"] = {}
enhanced_info = {}
if complete_registry:
device_config["class"]["status_types"].clear()
enhanced_info = get_enhanced_class_info(device_config["class"]["module"], use_dynamic=True)
if not enhanced_info.get("dynamic_import_success", False):
continue
device_config["class"]["status_types"].update(
{k: v["return_type"] for k, v in enhanced_info["status_methods"].items()}
)
for status_name, status_type in device_config["class"]["status_types"].items():
if isinstance(status_type, tuple) or status_type in ["Any", "None", "Unknown"]:
status_type = "String" # 替换成ROS的String便于显示
device_config["class"]["status_types"][status_name] = status_type
try:
target_type = self._replace_type_with_class(
status_type, device_id, f"状态 {status_name}"
)
except ROSMsgNotFound:
continue
if target_type in [
dict,
list,
]: # 对于嵌套类型返回的对象,暂时处理成字符串,无法直接进行转换
target_type = String
status_str_type_mapping[status_type] = target_type
device_config["class"]["status_types"] = dict(
sorted(device_config["class"]["status_types"].items())
)
if complete_registry:
# 保存原有的description信息
old_descriptions = {}
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
if "description" in action_config.get("schema", {}):
description = action_config["schema"]["description"]
if len(description):
old_descriptions[action_name] = action_config["schema"]["description"]
# 线程安全地更新注册表 device_config["class"]["action_value_mappings"] = {
current_device_number = len(self.device_type_registry) + 1 k: v
with self._registry_lock: for k, v in device_config["class"]["action_value_mappings"].items()
for file, data, device_ids in results: if not k.startswith("auto-")
self.device_type_registry.update(data) }
for device_id in device_ids: # 处理动作值映射
logger.trace( device_config["class"]["action_value_mappings"].update(
f"[UniLab Registry] Device-{current_device_number} Add {device_id} " {
f"auto-{k}": {
"type": "UniLabJsonCommandAsync" if v["is_async"] else "UniLabJsonCommand",
"goal": {},
"feedback": {},
"result": {},
"schema": self._generate_unilab_json_command_schema(
v["args"], k, v.get("return_annotation")
),
"goal_default": {i["name"]: i["default"] for i in v["args"]},
"handles": [],
"placeholder_keys": {
i["name"]: (
"unilabos_resources"
if i["type"] == "unilabos.registry.placeholder_type:ResourceSlot"
or i["type"]
== ("list", "unilabos.registry.placeholder_type:ResourceSlot")
else "unilabos_devices"
)
for i in v["args"]
if i.get("type", "")
in [
"unilabos.registry.placeholder_type:ResourceSlot",
"unilabos.registry.placeholder_type:DeviceSlot",
("list", "unilabos.registry.placeholder_type:ResourceSlot"),
("list", "unilabos.registry.placeholder_type:DeviceSlot"),
]
},
}
# 不生成已配置action的动作
for k, v in enhanced_info["action_methods"].items()
if k not in device_config["class"]["action_value_mappings"]
}
)
# 恢复原有的description信息auto开头的不修改
for action_name, description in old_descriptions.items():
if action_name in device_config["class"]["action_value_mappings"]: # 有一些会被删除
device_config["class"]["action_value_mappings"][action_name]["schema"][
"description"
] = description
device_config["init_param_schema"] = {}
device_config["init_param_schema"]["config"] = self._generate_unilab_json_command_schema(
enhanced_info["init_params"], "__init__"
)["properties"]["goal"]
device_config["init_param_schema"]["data"] = self._generate_status_types_schema(
enhanced_info["status_methods"]
)
device_config.pop("schema", None)
device_config["class"]["action_value_mappings"] = dict(
sorted(device_config["class"]["action_value_mappings"].items())
)
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
if "handles" not in action_config:
action_config["handles"] = {}
elif isinstance(action_config["handles"], list):
if len(action_config["handles"]):
logger.error(f"设备{device_id} {action_name} 的handles配置错误应该是字典类型")
continue
else:
action_config["handles"] = {}
if "type" in action_config:
action_type_str: str = action_config["type"]
# 通过Json发放指令而不是通过特殊的ros action进行处理
if not action_type_str.startswith("UniLabJsonCommand"):
try:
target_type = self._replace_type_with_class(
action_type_str, device_id, f"动作 {action_name}"
)
except ROSMsgNotFound:
continue
action_str_type_mapping[action_type_str] = target_type
if target_type is not None:
action_config["goal_default"] = yaml.safe_load(
io.StringIO(get_yaml_from_goal_type(target_type.Goal))
)
action_config["schema"] = ros_action_to_json_schema(target_type)
else:
logger.warning(
f"[UniLab Registry] 设备 {device_id} 的动作 {action_name} 类型为空,跳过替换"
)
complete_data[device_id] = copy.deepcopy(dict(sorted(device_config.items()))) # 稍后dump到文件
for status_name, status_type in device_config["class"]["status_types"].items():
device_config["class"]["status_types"][status_name] = status_str_type_mapping[status_type]
for action_name, action_config in device_config["class"]["action_value_mappings"].items():
if action_config["type"] not in action_str_type_mapping:
continue
action_config["type"] = action_str_type_mapping[action_config["type"]]
# 添加内置的驱动命令动作
self._add_builtin_actions(device_config, device_id)
device_config["file_path"] = str(file.absolute()).replace("\\", "/")
device_config["registry_type"] = "device"
logger.trace( # type: ignore
f"[UniLab Registry] Device-{current_device_number} File-{i+1}/{len(files)} Add {device_id} "
+ f"[{data[device_id].get('name', '未命名设备')}]" + f"[{data[device_id].get('name', '未命名设备')}]"
) )
current_device_number += 1 current_device_number += 1
complete_data = dict(sorted(complete_data.items()))
# 记录无效文件 complete_data = copy.deepcopy(complete_data)
valid_files = {r[0] for r in results} with open(file, "w", encoding="utf-8") as f:
for file in files: yaml.dump(complete_data, f, allow_unicode=True, default_flow_style=False, Dumper=NoAliasDumper)
if file not in valid_files: self.device_type_registry.update(data)
logger.debug(f"[UniLab Registry] Device File Not Valid YAML File: {file.absolute()}") else:
logger.debug(
f"[UniLab Registry] Device File-{i+1}/{len(files)} Not Valid YAML File: {file.absolute()}"
)
def obtain_registry_device_info(self): def obtain_registry_device_info(self):
devices = [] devices = []

View File

@@ -20,17 +20,6 @@ BIOYOND_PolymerStation_Liquid_Vial:
icon: '' icon: ''
init_param_schema: {} init_param_schema: {}
version: 1.0.0 version: 1.0.0
BIOYOND_PolymerStation_Measurement_Vial:
category:
- bottles
class:
module: unilabos.resources.bioyond.bottles:BIOYOND_PolymerStation_Measurement_Vial
type: pylabrobot
description: 聚合站-测量小瓶(测密度)
handles: []
icon: ''
init_param_schema: {}
version: 1.0.0
BIOYOND_PolymerStation_Reactor: BIOYOND_PolymerStation_Reactor:
category: category:
- bottles - bottles

View File

@@ -84,12 +84,12 @@ def bioyond_warehouse_reagent_storage(name: str) -> WareHouse:
) )
def bioyond_warehouse_tipbox_storage(name: str) -> WareHouse: def bioyond_warehouse_tipbox_storage(name: str) -> WareHouse:
"""创建BioYond站内Tip盒堆栈A01B03, 2行×3列""" """创建BioYond站内Tip盒堆栈A01B03),用于存放枪头盒"""
return warehouse_factory( return warehouse_factory(
name=name, name=name,
num_items_x=3, # 3列01-03 num_items_x=3, # 3列01-03
num_items_y=2, # 2行A-B num_items_y=2, # 2行A-B
num_items_z=1, # 1层 num_items_z=1, # 1层
dx=10.0, dx=10.0,
dy=10.0, dy=10.0,
dz=10.0, dz=10.0,

View File

@@ -193,20 +193,3 @@ def BIOYOND_PolymerStation_Flask(
barcode=barcode, barcode=barcode,
model="BIOYOND_PolymerStation_Flask", model="BIOYOND_PolymerStation_Flask",
) )
def BIOYOND_PolymerStation_Measurement_Vial(
name: str,
diameter: float = 25.0,
height: float = 60.0,
max_volume: float = 20000.0, # 20mL
barcode: str = None,
) -> Bottle:
"""创建测量小瓶"""
return Bottle(
name=name,
diameter=diameter,
height=height,
max_volume=max_volume,
barcode=barcode,
model="BIOYOND_PolymerStation_Measurement_Vial",
)

View File

@@ -18,12 +18,9 @@ from unilabos.resources.bioyond.YB_warehouses import (
bioyond_warehouse_1x8x4, bioyond_warehouse_1x8x4,
bioyond_warehouse_reagent_storage, bioyond_warehouse_reagent_storage,
# bioyond_warehouse_liquid_preparation, # bioyond_warehouse_liquid_preparation,
bioyond_warehouse_tipbox_storage, # 新增Tip盒堆栈
bioyond_warehouse_density_vial, bioyond_warehouse_density_vial,
) )
from unilabos.resources.bioyond.warehouses import (
bioyond_warehouse_tipbox_storage_left, # 新增Tip盒堆栈(左)
bioyond_warehouse_tipbox_storage_right, # 新增Tip盒堆栈(右)
)
class BIOYOND_PolymerReactionStation_Deck(Deck): class BIOYOND_PolymerReactionStation_Deck(Deck):
@@ -50,22 +47,24 @@ class BIOYOND_PolymerReactionStation_Deck(Deck):
"堆栈1右": bioyond_warehouse_1x4x4_right("堆栈1右"), # 右侧堆栈: A05D08 "堆栈1右": bioyond_warehouse_1x4x4_right("堆栈1右"), # 右侧堆栈: A05D08
"站内试剂存放堆栈": bioyond_warehouse_reagent_storage("站内试剂存放堆栈"), # A01A02 "站内试剂存放堆栈": bioyond_warehouse_reagent_storage("站内试剂存放堆栈"), # A01A02
# "移液站内10%分装液体准备仓库": bioyond_warehouse_liquid_preparation("移液站内10%分装液体准备仓库"), # A01B04 # "移液站内10%分装液体准备仓库": bioyond_warehouse_liquid_preparation("移液站内10%分装液体准备仓库"), # A01B04
"站内Tip盒堆栈(左)": bioyond_warehouse_tipbox_storage_left("站内Tip盒堆栈(左)"), # A02B03 "站内Tip盒堆栈": bioyond_warehouse_tipbox_storage("站内Tip盒堆栈"), # A01B03, 存放枪头盒.
"站内Tip盒堆栈(右)": bioyond_warehouse_tipbox_storage_right("站内Tip盒堆栈(右)"), # A01B01
"测量小瓶仓库(测密度)": bioyond_warehouse_density_vial("测量小瓶仓库(测密度)"), # A01B03 "测量小瓶仓库(测密度)": bioyond_warehouse_density_vial("测量小瓶仓库(测密度)"), # A01B03
} }
self.warehouse_locations = { self.warehouse_locations = {
"堆栈1左": Coordinate(-200.0, 400.0, 0.0), # 左侧位置 "堆栈1左": Coordinate(0.0, 430.0, 0.0), # 左侧位置
"堆栈1右": Coordinate(2350.0, 400.0, 0.0), # 右侧位置 "堆栈1右": Coordinate(2500.0, 430.0, 0.0), # 右侧位置
"站内试剂存放堆栈": Coordinate(640.0, 400.0, 0.0), "站内试剂存放堆栈": Coordinate(640.0, 480.0, 0.0),
"站内Tip盒堆栈(左)": Coordinate(300.0, 100.0, 0.0), # "移液站内10%分装液体准备仓库": Coordinate(1200.0, 600.0, 0.0),
"站内Tip盒堆栈(右)": Coordinate(2250.0, 100.0, 0.0), # 向右偏移 2 * item_dx (137.0) "站内Tip盒堆栈": Coordinate(300.0, 150.0, 0.0),
"测量小瓶仓库(测密度)": Coordinate(1000.0, 530.0, 0.0), "测量小瓶仓库(测密度)": Coordinate(922.0, 552.0, 0.0),
} }
self.warehouses["站内试剂存放堆栈"].rotation = Rotation(z=90)
self.warehouses["测量小瓶仓库(测密度)"].rotation = Rotation(z=270)
for warehouse_name, warehouse in self.warehouses.items(): for warehouse_name, warehouse in self.warehouses.items():
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name]) self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
class BIOYOND_PolymerPreparationStation_Deck(Deck): class BIOYOND_PolymerPreparationStation_Deck(Deck):
def __init__( def __init__(
self, self,
@@ -93,9 +92,9 @@ class BIOYOND_PolymerPreparationStation_Deck(Deck):
"溶液堆栈": bioyond_warehouse_1x4x4("溶液堆栈"), # 4行×4列 (A01-D04) "溶液堆栈": bioyond_warehouse_1x4x4("溶液堆栈"), # 4行×4列 (A01-D04)
} }
self.warehouse_locations = { self.warehouse_locations = {
"粉末堆栈": Coordinate(-200.0, 400.0, 0.0), "粉末堆栈": Coordinate(0.0, 450.0, 0.0),
"试剂堆栈": Coordinate(1750.0, 160.0, 0.0), "试剂堆栈": Coordinate(1850.0, 200.0, 0.0),
"溶液堆栈": Coordinate(2350.0, 400.0, 0.0), "溶液堆栈": Coordinate(2500.0, 450.0, 0.0),
} }
for warehouse_name, warehouse in self.warehouses.items(): for warehouse_name, warehouse in self.warehouses.items():
@@ -149,7 +148,6 @@ class BIOYOND_YB_Deck(Deck):
for warehouse_name, warehouse in self.warehouses.items(): for warehouse_name, warehouse in self.warehouses.items():
self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name]) self.assign_child_resource(warehouse, location=self.warehouse_locations[warehouse_name])
def YB_Deck(name: str) -> Deck: def YB_Deck(name: str) -> Deck:
by=BIOYOND_YB_Deck(name=name) by=BIOYOND_YB_Deck(name=name)
by.setup() by.setup()

View File

@@ -46,80 +46,48 @@ def bioyond_warehouse_1x4x4_right(name: str) -> WareHouse:
) )
def bioyond_warehouse_density_vial(name: str) -> WareHouse: def bioyond_warehouse_density_vial(name: str) -> WareHouse:
"""创建测量小瓶仓库(测密度) - 竖向排列2列3行 """创建测量小瓶仓库(测密度) A01B03"""
布局(从下到上,从左到右):
| A03 | B03 | ← 顶部
| A02 | B02 | ← 中部
| A01 | B01 | ← 底部
"""
return warehouse_factory( return warehouse_factory(
name=name, name=name,
num_items_x=2, # 2列(A, B num_items_x=3, # 3列(01-03
num_items_y=3, # 3行(01-03从下到上 num_items_y=2, # 2行(A-B
num_items_z=1, # 1层 num_items_z=1, # 1层
dx=10.0, dx=10.0,
dy=10.0, dy=10.0,
dz=10.0, dz=10.0,
item_dx=40.0, # 列间距A到B的横向距离 item_dx=40.0,
item_dy=40.0, # 行间距01到02到03的竖向距离 item_dy=40.0,
item_dz=50.0, item_dz=50.0,
# ⭐ 竖向warehouse槽位尺寸也是竖向的小瓶已经是正方形无需调整 # 用更小的 resource_size 来表现 "小点的孔位"
resource_size_x=30.0, resource_size_x=30.0,
resource_size_y=30.0, resource_size_y=30.0,
resource_size_z=12.0, resource_size_z=12.0,
category="warehouse", category="warehouse",
col_offset=0, col_offset=0,
layout="vertical-col-major", # ⭐ 竖向warehouse专用布局
)
def bioyond_warehouse_reagent_storage(name: str) -> WareHouse:
"""创建BioYond站内试剂存放堆栈 - 竖向排列1列2行
布局(竖向,从下到上):
| A02 | ← 顶部
| A01 | ← 底部
"""
return warehouse_factory(
name=name,
num_items_x=1, # 1列
num_items_y=2, # 2行01-02从下到上
num_items_z=1, # 1层
dx=10.0,
dy=10.0,
dz=10.0,
item_dx=96.0, # 列间距这里只有1列不重要
item_dy=137.0, # 行间距A01到A02的竖向距离
item_dz=120.0,
# ⭐ 竖向warehouse交换槽位尺寸使槽位框也是竖向的
resource_size_x=86.0, # 原来的 resource_size_y
resource_size_y=127.0, # 原来的 resource_size_x
resource_size_z=25.0,
category="warehouse",
layout="vertical-col-major", # ⭐ 竖向warehouse专用布局
)
def bioyond_warehouse_tipbox_storage_left(name: str) -> WareHouse:
"""创建BioYond站内Tip盒堆栈左侧部分A02B032列2行"""
return warehouse_factory(
name=name,
num_items_x=2, # 2列
num_items_y=2, # 2行A-B
num_items_z=1, # 1层
dx=10.0,
dy=10.0,
dz=10.0,
item_dx=137.0,
item_dy=96.0,
item_dz=120.0,
category="warehouse",
col_offset=1, # 从02开始: A02, A03
layout="row-major", layout="row-major",
) )
def bioyond_warehouse_tipbox_storage_right(name: str) -> WareHouse: def bioyond_warehouse_reagent_storage(name: str) -> WareHouse:
"""创建BioYond站内Tip盒堆栈右侧部分A01B011列2行""" """创建BioYond站内试剂存放堆栈A01A02, 1行×2列"""
return warehouse_factory( return warehouse_factory(
name=name, name=name,
num_items_x=1, # 1列 num_items_x=2, # 2列01-02
num_items_y=1, # 1行A
num_items_z=1, # 1层
dx=10.0,
dy=10.0,
dz=10.0,
item_dx=137.0,
item_dy=96.0,
item_dz=120.0,
category="warehouse",
)
def bioyond_warehouse_tipbox_storage(name: str) -> WareHouse:
"""创建BioYond站内Tip盒堆栈A01B03用于存放枪头盒"""
return warehouse_factory(
name=name,
num_items_x=3, # 3列01-03
num_items_y=2, # 2行A-B num_items_y=2, # 2行A-B
num_items_z=1, # 1层 num_items_z=1, # 1层
dx=10.0, dx=10.0,
@@ -129,7 +97,7 @@ def bioyond_warehouse_tipbox_storage_right(name: str) -> WareHouse:
item_dy=96.0, item_dy=96.0,
item_dz=120.0, item_dz=120.0,
category="warehouse", category="warehouse",
col_offset=0, # 从01开始: A01 col_offset=0,
layout="row-major", layout="row-major",
) )

View File

@@ -27,7 +27,7 @@ class RegularContainer(Container):
def get_regular_container(name="container"): def get_regular_container(name="container"):
r = RegularContainer(name=name) r = RegularContainer(name=name)
r.category = "container" r.category = "container"
return r return RegularContainer(name=name)
# #
# class RegularContainer(object): # class RegularContainer(object):

View File

@@ -151,40 +151,12 @@ def canonicalize_links_ports(links: List[Dict[str, Any]], resource_tree_set: Res
""" """
# 构建 id 到 uuid 的映射 # 构建 id 到 uuid 的映射
id_to_uuid: Dict[str, str] = {} id_to_uuid: Dict[str, str] = {}
uuid_to_id: Dict[str, str] = {}
for node in resource_tree_set.all_nodes: for node in resource_tree_set.all_nodes:
id_to_uuid[node.res_content.id] = node.res_content.uuid id_to_uuid[node.res_content.id] = node.res_content.uuid
uuid_to_id[node.res_content.uuid] = node.res_content.id
# 第三遍处理:为每个 link 添加 source_uuid 和 target_uuid
for link in links:
source_id = link.get("source")
target_id = link.get("target")
# 添加 source_uuid
if source_id and source_id in id_to_uuid:
link["source_uuid"] = id_to_uuid[source_id]
# 添加 target_uuid
if target_id and target_id in id_to_uuid:
link["target_uuid"] = id_to_uuid[target_id]
source_uuid = link.get("source_uuid")
target_uuid = link.get("target_uuid")
# 添加 source_uuid
if source_uuid and source_uuid in uuid_to_id:
link["source"] = uuid_to_id[source_uuid]
# 添加 target_uuid
if target_uuid and target_uuid in uuid_to_id:
link["target"] = uuid_to_id[target_uuid]
# 第一遍处理将字符串类型的port转换为字典格式 # 第一遍处理将字符串类型的port转换为字典格式
for link in links: for link in links:
port = link.get("port") port = link.get("port")
if port is None:
continue
if link.get("type", "physical") == "physical": if link.get("type", "physical") == "physical":
link["type"] = "fluid" link["type"] = "fluid"
if isinstance(port, int): if isinstance(port, int):
@@ -207,15 +179,13 @@ def canonicalize_links_ports(links: List[Dict[str, Any]], resource_tree_set: Res
link["port"] = {link["source"]: None, link["target"]: None} link["port"] = {link["source"]: None, link["target"]: None}
# 构建边字典,键为(source节点, target节点)值为对应的port信息 # 构建边字典,键为(source节点, target节点)值为对应的port信息
edges = {(link["source"], link["target"]): link["port"] for link in links if link.get("port")} edges = {(link["source"], link["target"]): link["port"] for link in links}
# 第二遍处理填充反向边的dest信息 # 第二遍处理填充反向边的dest信息
delete_reverses = [] delete_reverses = []
for i, link in enumerate(links): for i, link in enumerate(links):
s, t = link["source"], link["target"] s, t = link["source"], link["target"]
current_port = link.get("port") current_port = link["port"]
if current_port is None:
continue
if current_port.get(t) is None: if current_port.get(t) is None:
reverse_key = (t, s) reverse_key = (t, s)
reverse_port = edges.get(reverse_key) reverse_port = edges.get(reverse_key)
@@ -230,6 +200,20 @@ def canonicalize_links_ports(links: List[Dict[str, Any]], resource_tree_set: Res
current_port[t] = current_port[s] current_port[t] = current_port[s]
# 删除已被使用反向端口信息的反向边 # 删除已被使用反向端口信息的反向边
standardized_links = [link for i, link in enumerate(links) if i not in delete_reverses] standardized_links = [link for i, link in enumerate(links) if i not in delete_reverses]
# 第三遍处理:为每个 link 添加 source_uuid 和 target_uuid
for link in standardized_links:
source_id = link.get("source")
target_id = link.get("target")
# 添加 source_uuid
if source_id and source_id in id_to_uuid:
link["source_uuid"] = id_to_uuid[source_id]
# 添加 target_uuid
if target_id and target_id in id_to_uuid:
link["target_uuid"] = id_to_uuid[target_id]
return standardized_links return standardized_links
@@ -276,7 +260,7 @@ def read_node_link_json(
resource_tree_set = canonicalize_nodes_data(nodes) resource_tree_set = canonicalize_nodes_data(nodes)
# 标准化边数据 # 标准化边数据
links = data.get("links", data.get("edges", [])) links = data.get("links", [])
standardized_links = canonicalize_links_ports(links, resource_tree_set) standardized_links = canonicalize_links_ports(links, resource_tree_set)
# 构建 NetworkX 图(需要转换回 dict 格式) # 构建 NetworkX 图(需要转换回 dict 格式)
@@ -300,8 +284,6 @@ def modify_to_backend_format(data: list[dict[str, Any]]) -> list[dict[str, Any]]
edge["sourceHandle"] = port[source] edge["sourceHandle"] = port[source]
elif "source_port" in edge: elif "source_port" in edge:
edge["sourceHandle"] = edge.pop("source_port") edge["sourceHandle"] = edge.pop("source_port")
elif "source_handle" in edge:
edge["sourceHandle"] = edge.pop("source_handle")
else: else:
typ = edge.get("type") typ = edge.get("type")
if typ == "communication": if typ == "communication":
@@ -310,8 +292,6 @@ def modify_to_backend_format(data: list[dict[str, Any]]) -> list[dict[str, Any]]
edge["targetHandle"] = port[target] edge["targetHandle"] = port[target]
elif "target_port" in edge: elif "target_port" in edge:
edge["targetHandle"] = edge.pop("target_port") edge["targetHandle"] = edge.pop("target_port")
elif "target_handle" in edge:
edge["targetHandle"] = edge.pop("target_handle")
else: else:
typ = edge.get("type") typ = edge.get("type")
if typ == "communication": if typ == "communication":
@@ -617,8 +597,6 @@ def resource_plr_to_ulab(resource_plr: "ResourcePLR", parent_name: str = None, w
"tube": "tube", "tube": "tube",
"bottle_carrier": "bottle_carrier", "bottle_carrier": "bottle_carrier",
"plate_adapter": "plate_adapter", "plate_adapter": "plate_adapter",
"electrode_sheet": "electrode_sheet",
"material_hole": "material_hole",
} }
if source in replace_info: if source in replace_info:
return replace_info[source] return replace_info[source]
@@ -801,22 +779,6 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
if not locations: if not locations:
logger.debug(f"[物料位置] {unique_name} 没有location信息跳过warehouse放置") logger.debug(f"[物料位置] {unique_name} 没有location信息跳过warehouse放置")
# ⭐ 预先检查如果物料的任何location在竖向warehouse中提前交换尺寸
# 这样可以避免多个location时尺寸不一致的问题
needs_size_swap = False
for loc in locations:
wh_name_check = loc.get("whName")
if wh_name_check in ["站内试剂存放堆栈", "测量小瓶仓库(测密度)"]:
needs_size_swap = True
break
if needs_size_swap and hasattr(plr_material, 'size_x') and hasattr(plr_material, 'size_y'):
original_x = plr_material.size_x
original_y = plr_material.size_y
plr_material.size_x = original_y
plr_material.size_y = original_x
logger.debug(f" 物料 {unique_name} 将放入竖向warehouse预先交换尺寸: {original_x}×{original_y}{plr_material.size_x}×{plr_material.size_y}")
for loc in locations: for loc in locations:
wh_name = loc.get("whName") wh_name = loc.get("whName")
logger.debug(f"[物料位置] {unique_name} 尝试放置到 warehouse: {wh_name} (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')}, z={loc.get('z')})") logger.debug(f"[物料位置] {unique_name} 尝试放置到 warehouse: {wh_name} (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')}, z={loc.get('z')})")
@@ -833,20 +795,12 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
logger.warning(f"物料 {material['name']} 的列号 x={x_val} 超出范围无法映射到堆栈1左或堆栈1右") logger.warning(f"物料 {material['name']} 的列号 x={x_val} 超出范围无法映射到堆栈1左或堆栈1右")
continue continue
# 特殊处理: Bioyond的"站内Tip盒堆栈"也需要进行拆分映射
if wh_name == "站内Tip盒堆栈":
y_val = loc.get("y", 1)
if y_val == 1:
wh_name = "站内Tip盒堆栈(右)"
elif y_val in [2, 3]:
wh_name = "站内Tip盒堆栈(左)"
y = y - 1 # 调整列号,因为左侧仓库对应的 Bioyond y=2 实际上是它的第1列
if hasattr(deck, "warehouses") and wh_name in deck.warehouses: if hasattr(deck, "warehouses") and wh_name in deck.warehouses:
warehouse = deck.warehouses[wh_name] warehouse = deck.warehouses[wh_name]
logger.debug(f"[Warehouse匹配] 找到warehouse: {wh_name} (容量: {warehouse.capacity}, 行×列: {warehouse.num_items_x}×{warehouse.num_items_y})") logger.debug(f"[Warehouse匹配] 找到warehouse: {wh_name} (容量: {warehouse.capacity}, 行×列: {warehouse.num_items_x}×{warehouse.num_items_y})")
# Bioyond坐标映射 (重要!): x→行(1=A,2=B...), y→列(1=01,2=02...), z→层(通常=1) # Bioyond坐标映射 (重要!): x→行(1=A,2=B...), y→列(1=01,2=02...), z→层(通常=1)
# PyLabRobot warehouse是列优先存储: A01,B01,C01,D01, A02,B02,C02,D02, ...
x = loc.get("x", 1) # 行号 (1-based: 1=A, 2=B, 3=C, 4=D) x = loc.get("x", 1) # 行号 (1-based: 1=A, 2=B, 3=C, 4=D)
y = loc.get("y", 1) # 列号 (1-based: 1=01, 2=02, 3=03...) y = loc.get("y", 1) # 列号 (1-based: 1=01, 2=02, 3=03...)
z = loc.get("z", 1) # 层号 (1-based, 通常为1) z = loc.get("z", 1) # 层号 (1-based, 通常为1)
@@ -855,23 +809,12 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
if wh_name == "堆栈1右": if wh_name == "堆栈1右":
y = y - 4 # 将5-8映射到1-4 y = y - 4 # 将5-8映射到1-4
# 特殊处理向warehouse站内试剂存放堆栈、测量小瓶仓库 # 特殊处理对于1行×N列的横向warehouse站内试剂存放堆栈)
# 这些warehouse使用 vertical-col-major 布局 # Bioyond的y坐标表示线性位置序号而不是列号
if wh_name in ["站内试剂存放堆栈", "测量小瓶仓库(测密度)"]: if warehouse.num_items_y == 1:
# vertical-col-major 布局的坐标映射: # 1行warehouse: 直接用y作为线性索引
# - Bioyond的x(1=A,2=B)对应warehouse的列(col, x方向) idx = y - 1
# - Bioyond的y(1=01,2=02,3=03)对应warehouse的行(row, y方向),从下到上 logger.debug(f"1行warehouse {wh_name}: y={y} → idx={idx}")
# vertical-col-major 中: row=0 对应底部row=n-1 对应顶部
# Bioyond y=1(01) 对应底部 → row=0, y=2(02) 对应中间 → row=1
# 索引计算: idx = row * num_cols + col
col_idx = x - 1 # Bioyond的x(A,B) → col索引(0,1)
row_idx = y - 1 # Bioyond的y(01,02,03) → row索引(0,1,2)
layer_idx = z - 1
idx = layer_idx * (warehouse.num_items_x * warehouse.num_items_y) + row_idx * warehouse.num_items_y + col_idx
logger.debug(f"🔍 竖向warehouse {wh_name}: Bioyond(x={x},y={y},z={z}) → warehouse(col={col_idx},row={row_idx},layer={layer_idx}) → idx={idx}, capacity={warehouse.capacity}")
# 普通横向warehouse的处理
else: else:
# 多行warehouse: 根据 layout 使用不同的索引计算 # 多行warehouse: 根据 layout 使用不同的索引计算
row_idx = x - 1 # x表示行: 转为0-based row_idx = x - 1 # x表示行: 转为0-based
@@ -895,7 +838,6 @@ def resource_bioyond_to_plr(bioyond_materials: list[dict], type_mapping: Dict[st
if 0 <= idx < warehouse.capacity: if 0 <= idx < warehouse.capacity:
if warehouse[idx] is None or isinstance(warehouse[idx], ResourceHolder): if warehouse[idx] is None or isinstance(warehouse[idx], ResourceHolder):
# 物料尺寸已在放入warehouse前根据需要进行了交换
warehouse[idx] = plr_material warehouse[idx] = plr_material
logger.debug(f"✅ 物料 {unique_name} 放置到 {wh_name}[{idx}] (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')})") logger.debug(f"✅ 物料 {unique_name} 放置到 {wh_name}[{idx}] (Bioyond坐标: x={loc.get('x')}, y={loc.get('y')})")
else: else:
@@ -1069,24 +1011,11 @@ def resource_plr_to_bioyond(plr_resources: list[ResourcePLR], type_mapping: dict
logger.debug(f" 📭 [单瓶物料] {resource.name} 无液体,使用资源名: {material_name}") logger.debug(f" 📭 [单瓶物料] {resource.name} 无液体,使用资源名: {material_name}")
# 🎯 处理物料默认参数和单位 # 🎯 处理物料默认参数和单位
# 优先级: typeId参数 > 物料名称参数 > 默认值 # 检查是否有该物料名称的默认参数配置
default_unit = "" # 默认单位 default_unit = "" # 默认单位
material_parameters = {} material_parameters = {}
# 1⃣ 首先检查是否有 typeId 对应的参数配置(从 material_params 中获取key 格式为 "type:<typeId>" if material_name in material_params:
type_params_key = f"type:{type_id}"
if type_params_key in material_params:
params_config = material_params[type_params_key].copy()
# 提取 unit 字段(如果有)
if "unit" in params_config:
default_unit = params_config.pop("unit") # 从参数中移除,放到外层
# 剩余的字段放入 Parameters
material_parameters = params_config
logger.debug(f" 🔧 [物料参数-按typeId] 为 typeId={type_id[:8]}... 应用配置: unit={default_unit}, parameters={material_parameters}")
# 2⃣ 其次检查是否有该物料名称的默认参数配置
elif material_name in material_params:
params_config = material_params[material_name].copy() params_config = material_params[material_name].copy()
# 提取 unit 字段(如果有) # 提取 unit 字段(如果有)
@@ -1095,7 +1024,7 @@ def resource_plr_to_bioyond(plr_resources: list[ResourcePLR], type_mapping: dict
# 剩余的字段放入 Parameters # 剩余的字段放入 Parameters
material_parameters = params_config material_parameters = params_config
logger.debug(f" 🔧 [物料参数-按名称] 为 {material_name} 应用配置: unit={default_unit}, parameters={material_parameters}") logger.debug(f" 🔧 [物料参数] 为 {material_name} 应用配置: unit={default_unit}, parameters={material_parameters}")
# 转换为 JSON 字符串 # 转换为 JSON 字符串
parameters_json = json.dumps(material_parameters) if material_parameters else "{}" parameters_json = json.dumps(material_parameters) if material_parameters else "{}"
@@ -1222,7 +1151,11 @@ def initialize_resource(resource_config: dict, resource_type: Any = None) -> Uni
if resource_class_config["type"] == "pylabrobot": if resource_class_config["type"] == "pylabrobot":
resource_plr = RESOURCE(name=resource_config["name"]) resource_plr = RESOURCE(name=resource_config["name"])
if resource_type != ResourcePLR: if resource_type != ResourcePLR:
tree_sets = ResourceTreeSet.from_plr_resources([resource_plr], known_newly_created=True) tree_sets = ResourceTreeSet.from_plr_resources([resource_plr])
# r = resource_plr_to_ulab(resource_plr=resource_plr, parent_name=resource_config.get("parent", None))
# # r = resource_plr_to_ulab(resource_plr=resource_plr)
# if resource_config.get("position") is not None:
# r["position"] = resource_config["position"]
r = tree_sets.dump() r = tree_sets.dump()
else: else:
r = resource_plr r = resource_plr

View File

@@ -50,45 +50,12 @@ class Bottle(Well):
self.barcode = barcode self.barcode = barcode
def serialize(self) -> dict: def serialize(self) -> dict:
# Pylabrobot expects barcode to be an object with serialize(), but here it is a str.
# We temporarily unset it to avoid AttributeError in super().serialize().
_barcode = self.barcode
self.barcode = None
try:
data = super().serialize()
finally:
self.barcode = _barcode
return { return {
**data, **super().serialize(),
"diameter": self.diameter, "diameter": self.diameter,
"height": self.height, "height": self.height,
} }
@classmethod
def deserialize(cls, data: dict, allow_marshal: bool = False):
# Extract barcode before calling parent deserialize to avoid type error
barcode_data = data.pop("barcode", None)
# Call parent deserialize
instance = super(Bottle, cls).deserialize(data, allow_marshal=allow_marshal)
# Set barcode as string (not as Barcode object)
if barcode_data:
if isinstance(barcode_data, str):
instance.barcode = barcode_data
elif isinstance(barcode_data, dict):
# If it's a dict (Barcode serialized format), extract the data field
instance.barcode = barcode_data.get("data", "")
else:
instance.barcode = ""
# Set additional attributes
instance.diameter = data.get("diameter", instance._size_x)
instance.height = data.get("height", instance._size_z)
return instance
T = TypeVar("T", bound=ResourceHolder) T = TypeVar("T", bound=ResourceHolder)
S = TypeVar("S", bound=ResourceHolder) S = TypeVar("S", bound=ResourceHolder)

View File

@@ -1,12 +1,10 @@
import inspect import inspect
import traceback import traceback
import uuid import uuid
from pydantic import BaseModel, field_serializer, field_validator, ValidationError from pydantic import BaseModel, field_serializer, field_validator
from pydantic import Field from pydantic import Field
from typing import List, Tuple, Any, Dict, Literal, Optional, cast, TYPE_CHECKING, Union from typing import List, Tuple, Any, Dict, Literal, Optional, cast, TYPE_CHECKING, Union
from typing_extensions import TypedDict
from unilabos.resources.plr_additional_res_reg import register from unilabos.resources.plr_additional_res_reg import register
from unilabos.utils.log import logger from unilabos.utils.log import logger
@@ -15,29 +13,6 @@ if TYPE_CHECKING:
from pylabrobot.resources import Resource as PLRResource from pylabrobot.resources import Resource as PLRResource
EXTRA_CLASS = "unilabos_resource_class"
EXTRA_SAMPLE_UUID = "sample_uuid"
EXTRA_UNILABOS_SAMPLE_UUID = "unilabos_sample_uuid"
# 函数参数名常量 - 用于自动注入 sample_uuids 列表
PARAM_SAMPLE_UUIDS = "sample_uuids"
# JSON Command 中的系统参数字段名
JSON_UNILABOS_PARAM = "unilabos_param"
# 返回值中的 samples 字段名
RETURN_UNILABOS_SAMPLES = "unilabos_samples"
# sample_uuids 参数类型 (用于 virtual bench 等设备添加 sample_uuids 参数)
SampleUUIDsType = Dict[str, Optional["PLRResource"]]
class LabSample(TypedDict):
sample_uuid: str
oss_path: str
extra: Dict[str, Any]
class ResourceDictPositionSize(BaseModel): class ResourceDictPositionSize(BaseModel):
depth: float = Field(description="Depth", default=0.0) # z depth: float = Field(description="Depth", default=0.0) # z
width: float = Field(description="Width", default=0.0) # x width: float = Field(description="Width", default=0.0) # x
@@ -172,8 +147,8 @@ class ResourceDictInstance(object):
if not content.get("extra"): # MagicCode if not content.get("extra"): # MagicCode
content["extra"] = {} content["extra"] = {}
if "position" in content: if "position" in content:
pose = content.get("pose", {}) pose = content.get("pose",{})
if "position" not in pose: if "position" not in pose :
if "position" in content["position"]: if "position" in content["position"]:
pose["position"] = content["position"]["position"] pose["position"] = content["position"]["position"]
else: else:
@@ -182,14 +157,10 @@ class ResourceDictInstance(object):
pose["size"] = { pose["size"] = {
"width": content["config"].get("size_x", 0), "width": content["config"].get("size_x", 0),
"height": content["config"].get("size_y", 0), "height": content["config"].get("size_y", 0),
"depth": content["config"].get("size_z", 0), "depth": content["config"].get("size_z", 0)
} }
content["pose"] = pose content["pose"] = pose
try: return ResourceDictInstance(ResourceDict.model_validate(content))
res_dict = ResourceDict.model_validate(content)
return ResourceDictInstance(res_dict)
except ValidationError as err:
raise err
def get_plr_nested_dict(self) -> Dict[str, Any]: def get_plr_nested_dict(self) -> Dict[str, Any]:
"""获取资源实例的嵌套字典表示""" """获取资源实例的嵌套字典表示"""
@@ -351,7 +322,7 @@ class ResourceTreeSet(object):
) )
@classmethod @classmethod
def from_plr_resources(cls, resources: List["PLRResource"], known_newly_created=False) -> "ResourceTreeSet": def from_plr_resources(cls, resources: List["PLRResource"]) -> "ResourceTreeSet":
""" """
从plr资源创建ResourceTreeSet 从plr资源创建ResourceTreeSet
""" """
@@ -368,8 +339,6 @@ class ResourceTreeSet(object):
} }
if source in replace_info: if source in replace_info:
return replace_info[source] return replace_info[source]
elif source is None:
return ""
else: else:
print("转换pylabrobot的时候出现未知类型", source) print("转换pylabrobot的时候出现未知类型", source)
return source return source
@@ -380,8 +349,7 @@ class ResourceTreeSet(object):
if not uid: if not uid:
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
res.unilabos_uuid = uid res.unilabos_uuid = uid
if not known_newly_created: logger.warning(f"{res}没有uuid请设置后再传入默认填充{uid}\n{traceback.format_exc()}")
logger.warning(f"{res}没有uuid请设置后再传入默认填充{uid}\n{traceback.format_exc()}")
# 获取unilabos_extra默认为空字典 # 获取unilabos_extra默认为空字典
extra = getattr(res, "unilabos_extra", {}) extra = getattr(res, "unilabos_extra", {})
@@ -418,7 +386,7 @@ class ResourceTreeSet(object):
"parent": parent_resource, # 直接传入 ResourceDict 对象 "parent": parent_resource, # 直接传入 ResourceDict 对象
"parent_uuid": parent_uuid, # 使用 parent_uuid 而不是 parent 对象 "parent_uuid": parent_uuid, # 使用 parent_uuid 而不是 parent 对象
"type": replace_plr_type(d.get("category", "")), "type": replace_plr_type(d.get("category", "")),
"class": extra.get(EXTRA_CLASS, ""), "class": d.get("class", ""),
"position": pos, "position": pos,
"pose": pos, "pose": pos,
"config": { "config": {
@@ -468,7 +436,7 @@ class ResourceTreeSet(object):
trees.append(tree_instance) trees.append(tree_instance)
return cls(trees) return cls(trees)
def to_plr_resources(self, skip_devices=True) -> List["PLRResource"]: def to_plr_resources(self) -> List["PLRResource"]:
""" """
将 ResourceTreeSet 转换为 PLR 资源列表 将 ResourceTreeSet 转换为 PLR 资源列表
@@ -480,20 +448,13 @@ class ResourceTreeSet(object):
from pylabrobot.utils.object_parsing import find_subclass from pylabrobot.utils.object_parsing import find_subclass
# 类型映射 # 类型映射
TYPE_MAP = { TYPE_MAP = {"plate": "Plate", "well": "Well", "deck": "Deck", "container": "RegularContainer", "tip_spot": "TipSpot"}
"plate": "Plate",
"well": "Well",
"deck": "Deck",
"container": "RegularContainer",
"tip_spot": "TipSpot",
}
def collect_node_data(node: ResourceDictInstance, name_to_uuid: dict, all_states: dict, name_to_extra: dict): def collect_node_data(node: ResourceDictInstance, name_to_uuid: dict, all_states: dict, name_to_extra: dict):
"""一次遍历收集 name_to_uuid, all_states 和 name_to_extra""" """一次遍历收集 name_to_uuid, all_states 和 name_to_extra"""
name_to_uuid[node.res_content.name] = node.res_content.uuid name_to_uuid[node.res_content.name] = node.res_content.uuid
all_states[node.res_content.name] = node.res_content.data all_states[node.res_content.name] = node.res_content.data
name_to_extra[node.res_content.name] = node.res_content.extra name_to_extra[node.res_content.name] = node.res_content.extra
name_to_extra[node.res_content.name][EXTRA_CLASS] = node.res_content.klass
for child in node.children: for child in node.children:
collect_node_data(child, name_to_uuid, all_states, name_to_extra) collect_node_data(child, name_to_uuid, all_states, name_to_extra)
@@ -538,10 +499,7 @@ class ResourceTreeSet(object):
plr_dict = node_to_plr_dict(tree.root_node, has_model) plr_dict = node_to_plr_dict(tree.root_node, has_model)
try: try:
sub_cls = find_subclass(plr_dict["type"], PLRResource) sub_cls = find_subclass(plr_dict["type"], PLRResource)
if skip_devices and plr_dict["type"] == "device": if sub_cls is None:
logger.info(f"跳过更新 {plr_dict['name']} 设备是class")
continue
elif sub_cls is None:
raise ValueError( raise ValueError(
f"无法找到类型 {plr_dict['type']} 对应的 PLR 资源类。原始信息:{tree.root_node.res_content}" f"无法找到类型 {plr_dict['type']} 对应的 PLR 资源类。原始信息:{tree.root_node.res_content}"
) )
@@ -549,11 +507,6 @@ class ResourceTreeSet(object):
if "category" not in spec.parameters: if "category" not in spec.parameters:
plr_dict.pop("category", None) plr_dict.pop("category", None)
plr_resource = sub_cls.deserialize(plr_dict, allow_marshal=True) plr_resource = sub_cls.deserialize(plr_dict, allow_marshal=True)
from pylabrobot.resources import Coordinate
from pylabrobot.serializer import deserialize
location = cast(Coordinate, deserialize(plr_dict["location"]))
plr_resource.location = location
plr_resource.load_all_state(all_states) plr_resource.load_all_state(all_states)
# 使用 DeviceNodeResourceTracker 设置 UUID 和 Extra # 使用 DeviceNodeResourceTracker 设置 UUID 和 Extra
tracker.loop_set_uuid(plr_resource, name_to_uuid) tracker.loop_set_uuid(plr_resource, name_to_uuid)
@@ -655,16 +608,6 @@ class ResourceTreeSet(object):
""" """
return [tree.root_node for tree in self.trees] return [tree.root_node for tree in self.trees]
@property
def root_nodes_uuid(self) -> List[ResourceDictInstance]:
"""
获取所有树的根节点
Returns:
所有根节点的资源实例列表
"""
return [tree.root_node.res_content.uuid for tree in self.trees]
@property @property
def all_nodes(self) -> List[ResourceDictInstance]: def all_nodes(self) -> List[ResourceDictInstance]:
""" """
@@ -975,33 +918,6 @@ class DeviceNodeResourceTracker(object):
return self._traverse_and_process(resource, process) return self._traverse_and_process(resource, process)
def loop_find_with_uuid(self, resource, target_uuid: str):
"""
递归遍历资源树,根据 uuid 查找并返回对应的资源
Args:
resource: 资源对象可以是list、dict或实例
target_uuid: 要查找的uuid
Returns:
找到的资源对象未找到则返回None
"""
found_resource = None
def process(res):
nonlocal found_resource
if found_resource is not None:
return 0 # 已找到,跳过后续处理
current_uuid = self._get_resource_attr(res, "uuid", "unilabos_uuid")
if current_uuid and current_uuid == target_uuid:
found_resource = res
logger.trace(f"找到资源UUID: {target_uuid}")
return 1
return 0
self._traverse_and_process(resource, process)
return found_resource
def loop_set_extra(self, resource, name_to_extra_map: Dict[str, dict]) -> int: def loop_set_extra(self, resource, name_to_extra_map: Dict[str, dict]) -> int:
""" """
递归遍历资源树,根据 name 设置所有节点的 extra 递归遍历资源树,根据 name 设置所有节点的 extra
@@ -1020,7 +936,7 @@ class DeviceNodeResourceTracker(object):
extra = name_to_extra_map[resource_name] extra = name_to_extra_map[resource_name]
self.set_resource_extra(res, extra) self.set_resource_extra(res, extra)
if len(extra): if len(extra):
logger.trace(f"设置资源Extra: {resource_name} -> {extra}") logger.debug(f"设置资源Extra: {resource_name} -> {extra}")
return 1 return 1
return 0 return 0
@@ -1187,7 +1103,7 @@ class DeviceNodeResourceTracker(object):
for key in keys_to_remove: for key in keys_to_remove:
self.resource2parent_resource.pop(key, None) self.resource2parent_resource.pop(key, None)
logger.trace(f"[ResourceTracker] 成功移除资源: {resource}") logger.debug(f"成功移除资源: {resource}")
return True return True
def clear_resource(self): def clear_resource(self):

View File

@@ -43,10 +43,6 @@ def warehouse_factory(
if layout == "row-major": if layout == "row-major":
# 行优先row=0(A行) 应该显示在上方,需要较小的 y 值 # 行优先row=0(A行) 应该显示在上方,需要较小的 y 值
y = dy + row * item_dy y = dy + row * item_dy
elif layout == "vertical-col-major":
# 竖向warehouse: row=0 对应顶部y小row=n-1 对应底部y大
# 但标签 01 应该在底部,所以使用反向映射
y = dy + (num_items_y - row - 1) * item_dy
else: else:
# 列优先保持原逻辑row=0 对应较大的 y # 列优先保持原逻辑row=0 对应较大的 y
y = dy + (num_items_y - row - 1) * item_dy y = dy + (num_items_y - row - 1) * item_dy

View File

@@ -159,14 +159,10 @@ _msg_converter: Dict[Type, Any] = {
else Pose() else Pose()
), ),
config=json.dumps(x.get("config", {})), config=json.dumps(x.get("config", {})),
data=json.dumps(obtain_data_with_uuid(x)), data=json.dumps(x.get("data", {})),
), ),
} }
def obtain_data_with_uuid(x: dict):
data = x.get("data", {})
data["unilabos_uuid"] = x.get("uuid", None)
return data
def json_or_yaml_loads(data: str) -> Any: def json_or_yaml_loads(data: str) -> Any:
try: try:
@@ -770,16 +766,13 @@ def ros_message_to_json_schema(msg_class: Any, field_name: str) -> Dict[str, Any
return schema return schema
def ros_action_to_json_schema( def ros_action_to_json_schema(action_class: Any, description="") -> Dict[str, Any]:
action_class: Any, description="", previous_schema: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
""" """
将 ROS Action 类转换为 JSON Schema 将 ROS Action 类转换为 JSON Schema
Args: Args:
action_class: ROS Action 类 action_class: ROS Action 类
description: 描述 description: 描述
previous_schema: 之前的 schema用于保留 goal/feedback/result 下一级字段的 description
Returns: Returns:
完整的 JSON Schema 定义 完整的 JSON Schema 定义
@@ -813,44 +806,9 @@ def ros_action_to_json_schema(
"required": ["goal"], "required": ["goal"],
} }
# 保留之前 schema 中 goal/feedback/result 下一级字段的 description
if previous_schema:
_preserve_field_descriptions(schema, previous_schema)
return schema return schema
def _preserve_field_descriptions(
new_schema: Dict[str, Any], previous_schema: Dict[str, Any]
) -> None:
"""
保留之前 schema 中 goal/feedback/result 下一级字段的 description 和 title
Args:
new_schema: 新生成的 schema会被修改
previous_schema: 之前的 schema
"""
for section in ["goal", "feedback", "result"]:
new_section = new_schema.get("properties", {}).get(section, {})
prev_section = previous_schema.get("properties", {}).get(section, {})
if not new_section or not prev_section:
continue
new_props = new_section.get("properties", {})
prev_props = prev_section.get("properties", {})
for field_name, field_schema in new_props.items():
if field_name in prev_props:
prev_field = prev_props[field_name]
# 保留字段的 description
if "description" in prev_field and prev_field["description"]:
field_schema["description"] = prev_field["description"]
# 保留字段的 title用户自定义的中文名
if "title" in prev_field and prev_field["title"]:
field_schema["title"] = prev_field["title"]
def convert_ros_action_to_jsonschema( def convert_ros_action_to_jsonschema(
action_name_or_type: Union[str, Type], output_file: Optional[str] = None, format: str = "json" action_name_or_type: Union[str, Type], output_file: Optional[str] = None, format: str = "json"
) -> Dict[str, Any]: ) -> Dict[str, Any]:

View File

@@ -4,20 +4,8 @@ import json
import threading import threading
import time import time
import traceback import traceback
from typing import ( from typing import get_type_hints, TypeVar, Generic, Dict, Any, Type, TypedDict, Optional, List, TYPE_CHECKING, Union, \
get_type_hints, Tuple
TypeVar,
Generic,
Dict,
Any,
Type,
TypedDict,
Optional,
List,
TYPE_CHECKING,
Union,
Tuple,
)
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
import asyncio import asyncio
@@ -60,10 +48,8 @@ from unilabos.resources.resource_tracker import (
ResourceTreeSet, ResourceTreeSet,
ResourceTreeInstance, ResourceTreeInstance,
ResourceDictInstance, ResourceDictInstance,
EXTRA_SAMPLE_UUID,
PARAM_SAMPLE_UUIDS,
JSON_UNILABOS_PARAM,
) )
from unilabos.ros.x.rclpyx import get_event_loop
from unilabos.ros.utils.driver_creator import WorkstationNodeCreator, PyLabRobotCreator, DeviceClassCreator from unilabos.ros.utils.driver_creator import WorkstationNodeCreator, PyLabRobotCreator, DeviceClassCreator
from rclpy.task import Task, Future from rclpy.task import Task, Future
from unilabos.utils.import_manager import default_manager from unilabos.utils.import_manager import default_manager
@@ -199,7 +185,7 @@ class PropertyPublisher:
f"创建发布者 {name} 失败,可能由于注册表有误,类型: {msg_type},错误: {ex}\n{traceback.format_exc()}" f"创建发布者 {name} 失败,可能由于注册表有误,类型: {msg_type},错误: {ex}\n{traceback.format_exc()}"
) )
self.timer = node.create_timer(self.timer_period, self.publish_property) self.timer = node.create_timer(self.timer_period, self.publish_property)
self.__loop = ROS2DeviceNode.get_asyncio_loop() self.__loop = get_event_loop()
str_msg_type = str(msg_type)[8:-2] str_msg_type = str(msg_type)[8:-2]
self.node.lab_logger().trace(f"发布属性: {name}, 类型: {str_msg_type}, 周期: {initial_period}秒, QoS: {qos}") self.node.lab_logger().trace(f"发布属性: {name}, 类型: {str_msg_type}, 周期: {initial_period}秒, QoS: {qos}")
@@ -231,15 +217,14 @@ class PropertyPublisher:
def publish_property(self): def publish_property(self):
try: try:
# self.node.lab_logger().trace(f"【.publish_property】开始发布属性: {self.name}") self.node.lab_logger().trace(f"【.publish_property】开始发布属性: {self.name}")
value = self.get_property() value = self.get_property()
if self.print_publish: if self.print_publish:
pass self.node.lab_logger().trace(f"【.publish_property】发布 {self.msg_type}: {value}")
# self.node.lab_logger().trace(f"【.publish_property】发布 {self.msg_type}: {value}")
if value is not None: if value is not None:
msg = convert_to_ros_msg(self.msg_type, value) msg = convert_to_ros_msg(self.msg_type, value)
self.publisher_.publish(msg) self.publisher_.publish(msg)
# self.node.lab_logger().trace(f"【.publish_property】属性 {self.name} 发布成功") self.node.lab_logger().trace(f"【.publish_property】属性 {self.name} 发布成功")
except Exception as e: except Exception as e:
self.node.lab_logger().error( self.node.lab_logger().error(
f"【.publish_property】发布属性 {self.publisher_.topic} 出错: {str(e)}\n{traceback.format_exc()}" f"【.publish_property】发布属性 {self.publisher_.topic} 出错: {str(e)}\n{traceback.format_exc()}"
@@ -377,7 +362,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
from pylabrobot.resources.deck import Deck from pylabrobot.resources.deck import Deck
from pylabrobot.resources import Coordinate from pylabrobot.resources import Coordinate
from pylabrobot.resources import Plate from pylabrobot.resources import Plate
# 物料传输到对应的node节点 # 物料传输到对应的node节点
client = self._resource_clients["c2s_update_resource_tree"] client = self._resource_clients["c2s_update_resource_tree"]
request = SerialCommand.Request() request = SerialCommand.Request()
@@ -405,27 +389,30 @@ class BaseROS2DeviceNode(Node, Generic[T]):
rts: ResourceTreeSet = ResourceTreeSet.from_raw_dict_list(input_resources) rts: ResourceTreeSet = ResourceTreeSet.from_raw_dict_list(input_resources)
parent_resource = None parent_resource = None
if bind_parent_id != self.node_name: if bind_parent_id != self.node_name:
parent_resource = self.resource_tracker.figure_resource({"name": bind_parent_id}) parent_resource = self.resource_tracker.figure_resource(
for r in rts.root_nodes: {"name": bind_parent_id}
# noinspection PyUnresolvedReferences )
r.res_content.parent_uuid = parent_resource.unilabos_uuid for r in rts.root_nodes:
else: # noinspection PyUnresolvedReferences
for r in rts.root_nodes: r.res_content.parent_uuid = parent_resource.unilabos_uuid
r.res_content.parent_uuid = self.uuid
rts_plr_instances = rts.to_plr_resources() if len(LIQUID_INPUT_SLOT) and LIQUID_INPUT_SLOT[0] == -1 and len(rts.root_nodes) == 1 and isinstance(rts.root_nodes[0], RegularContainer):
if len(rts.root_nodes) == 1 and isinstance(rts_plr_instances[0], RegularContainer):
# noinspection PyTypeChecker # noinspection PyTypeChecker
container_instance: RegularContainer = rts_plr_instances[0] container_instance: RegularContainer = rts.root_nodes[0]
found_resources = self.resource_tracker.figure_resource({"name": container_instance.name}, try_mode=True) found_resources = self.resource_tracker.figure_resource(
{"id": container_instance.name}, try_mode=True
)
if not len(found_resources): if not len(found_resources):
self.resource_tracker.add_resource(container_instance) self.resource_tracker.add_resource(container_instance)
logger.info(f"添加物料{container_instance.name}到资源跟踪器") logger.info(f"添加物料{container_instance.name}到资源跟踪器")
else: else:
assert len(found_resources) == 1, f"找到多个同名物料: {container_instance.name}, 请检查物料系统" assert (
len(found_resources) == 1
), f"找到多个同名物料: {container_instance.name}, 请检查物料系统"
found_resource = found_resources[0] found_resource = found_resources[0]
if isinstance(found_resource, RegularContainer): if isinstance(found_resource, RegularContainer):
logger.info(f"更新物料{container_instance.name}的数据{found_resource.state}") logger.info(f"更新物料{container_instance.name}的数据{found_resource.state}")
found_resource.state.update(container_instance.state) found_resource.state.update(json.loads(container_instance.state))
elif isinstance(found_resource, dict): elif isinstance(found_resource, dict):
raise ValueError("已不支持 字典 版本的RegularContainer") raise ValueError("已不支持 字典 版本的RegularContainer")
else: else:
@@ -433,26 +420,21 @@ class BaseROS2DeviceNode(Node, Generic[T]):
f"更新物料{container_instance.name}出现不支持的数据类型{type(found_resource)} {found_resource}" f"更新物料{container_instance.name}出现不支持的数据类型{type(found_resource)} {found_resource}"
) )
# noinspection PyUnresolvedReferences # noinspection PyUnresolvedReferences
request.command = json.dumps( request.command = json.dumps({
{ "action": "add",
"action": "add", "data": {
"data": { "data": rts.dump(),
"data": rts.dump(), "mount_uuid": parent_resource.unilabos_uuid if parent_resource is not None else "",
"mount_uuid": parent_resource.unilabos_uuid if parent_resource is not None else self.uuid, "first_add": False,
"first_add": False, },
}, })
}
)
tree_response: SerialCommand.Response = await client.call_async(request) tree_response: SerialCommand.Response = await client.call_async(request)
uuid_maps = json.loads(tree_response.response) uuid_maps = json.loads(tree_response.response)
plr_instances = rts.to_plr_resources() self.resource_tracker.loop_update_uuid(input_resources, uuid_maps)
for plr_instance in plr_instances:
self.resource_tracker.loop_update_uuid(plr_instance, uuid_maps)
rts: ResourceTreeSet = ResourceTreeSet.from_plr_resources(plr_instances)
self.lab_logger().info(f"Resource tree added. UUID mapping: {len(uuid_maps)} nodes") self.lab_logger().info(f"Resource tree added. UUID mapping: {len(uuid_maps)} nodes")
final_response = { final_response = {
"created_resource_tree": rts.dump(), "created_resources": rts.dump(),
"liquid_input_resource_tree": [], "liquid_input_resources": [],
} }
res.response = json.dumps(final_response) res.response = json.dumps(final_response)
# 如果driver自己就有assign的方法那就使用driver自己的assign方法 # 如果driver自己就有assign的方法那就使用driver自己的assign方法
@@ -478,15 +460,13 @@ class BaseROS2DeviceNode(Node, Generic[T]):
return res return res
try: try:
if len(rts.root_nodes) == 1 and parent_resource is not None: if len(rts.root_nodes) == 1 and parent_resource is not None:
plr_instance = plr_instances[0] plr_instance = rts.to_plr_resources()[0]
if isinstance(plr_instance, Plate): if isinstance(plr_instance, Plate):
empty_liquid_info_in: List[Tuple[Optional[str], float]] = [(None, 0)] * plr_instance.num_items empty_liquid_info_in: List[Tuple[Optional[str], float]] = [(None, 0)] * plr_instance.num_items
if len(ADD_LIQUID_TYPE) == 1 and len(LIQUID_VOLUME) == 1 and len(LIQUID_INPUT_SLOT) > 1: if len(ADD_LIQUID_TYPE) == 1 and len(LIQUID_VOLUME) == 1 and len(LIQUID_INPUT_SLOT) > 1:
ADD_LIQUID_TYPE = ADD_LIQUID_TYPE * len(LIQUID_INPUT_SLOT) ADD_LIQUID_TYPE = ADD_LIQUID_TYPE * len(LIQUID_INPUT_SLOT)
LIQUID_VOLUME = LIQUID_VOLUME * len(LIQUID_INPUT_SLOT) LIQUID_VOLUME = LIQUID_VOLUME * len(LIQUID_INPUT_SLOT)
self.lab_logger().warning( self.lab_logger().warning(f"增加液体资源时数量为1自动补全为 {len(LIQUID_INPUT_SLOT)}")
f"增加液体资源时数量为1自动补全为 {len(LIQUID_INPUT_SLOT)}"
)
for liquid_type, liquid_volume, liquid_input_slot in zip( for liquid_type, liquid_volume, liquid_input_slot in zip(
ADD_LIQUID_TYPE, LIQUID_VOLUME, LIQUID_INPUT_SLOT ADD_LIQUID_TYPE, LIQUID_VOLUME, LIQUID_INPUT_SLOT
): ):
@@ -505,15 +485,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
input_wells = [] input_wells = []
for r in LIQUID_INPUT_SLOT: for r in LIQUID_INPUT_SLOT:
input_wells.append(plr_instance.children[r]) input_wells.append(plr_instance.children[r])
final_response["liquid_input_resource_tree"] = ResourceTreeSet.from_plr_resources( final_response["liquid_input_resources"] = ResourceTreeSet.from_plr_resources(input_wells).dump()
input_wells
).dump()
res.response = json.dumps(final_response) res.response = json.dumps(final_response)
if ( if issubclass(parent_resource.__class__, Deck) and hasattr(parent_resource, "assign_child_at_slot") and "slot" in other_calling_param:
issubclass(parent_resource.__class__, Deck)
and hasattr(parent_resource, "assign_child_at_slot")
and "slot" in other_calling_param
):
other_calling_param["slot"] = int(other_calling_param["slot"]) other_calling_param["slot"] = int(other_calling_param["slot"])
parent_resource.assign_child_at_slot(plr_instance, **other_calling_param) parent_resource.assign_child_at_slot(plr_instance, **other_calling_param)
else: else:
@@ -528,16 +502,14 @@ class BaseROS2DeviceNode(Node, Generic[T]):
rts_with_parent = ResourceTreeSet.from_plr_resources([parent_resource]) rts_with_parent = ResourceTreeSet.from_plr_resources([parent_resource])
if rts_with_parent.root_nodes[0].res_content.uuid_parent is None: if rts_with_parent.root_nodes[0].res_content.uuid_parent is None:
rts_with_parent.root_nodes[0].res_content.parent_uuid = self.uuid rts_with_parent.root_nodes[0].res_content.parent_uuid = self.uuid
request.command = json.dumps( request.command = json.dumps({
{ "action": "add",
"action": "add", "data": {
"data": { "data": rts_with_parent.dump(),
"data": rts_with_parent.dump(), "mount_uuid": rts_with_parent.root_nodes[0].res_content.uuid_parent,
"mount_uuid": rts_with_parent.root_nodes[0].res_content.uuid_parent, "first_add": False,
"first_add": False, },
}, })
}
)
tree_response: SerialCommand.Response = await client.call_async(request) tree_response: SerialCommand.Response = await client.call_async(request)
uuid_maps = json.loads(tree_response.response) uuid_maps = json.loads(tree_response.response)
self.resource_tracker.loop_update_uuid(input_resources, uuid_maps) self.resource_tracker.loop_update_uuid(input_resources, uuid_maps)
@@ -647,7 +619,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
) # type: ignore ) # type: ignore
raw_nodes = json.loads(response.response) raw_nodes = json.loads(response.response)
tree_set = ResourceTreeSet.from_raw_dict_list(raw_nodes) tree_set = ResourceTreeSet.from_raw_dict_list(raw_nodes)
self.lab_logger().trace(f"获取资源结果: {len(tree_set.trees)} 个资源树 {tree_set.root_nodes}") self.lab_logger().debug(f"获取资源结果: {len(tree_set.trees)} 个资源树")
return tree_set return tree_set
async def get_resource_with_dir(self, resource_id: str, with_children: bool = True) -> "ResourcePLR": async def get_resource_with_dir(self, resource_id: str, with_children: bool = True) -> "ResourcePLR":
@@ -681,71 +653,61 @@ class BaseROS2DeviceNode(Node, Generic[T]):
def transfer_to_new_resource( def transfer_to_new_resource(
self, plr_resource: "ResourcePLR", tree: ResourceTreeInstance, additional_add_params: Dict[str, Any] self, plr_resource: "ResourcePLR", tree: ResourceTreeInstance, additional_add_params: Dict[str, Any]
) -> Optional["ResourcePLR"]: ):
parent_uuid = tree.root_node.res_content.parent_uuid parent_uuid = tree.root_node.res_content.parent_uuid
if not parent_uuid: if parent_uuid:
self.lab_logger().warning( parent_resource: ResourcePLR = self.resource_tracker.uuid_to_resources.get(parent_uuid)
f"物料{plr_resource} parent未知挂载到当前节点下额外参数{additional_add_params}" if parent_resource is None:
)
return None
if parent_uuid == self.uuid:
self.lab_logger().warning(
f"物料{plr_resource}请求挂载到{self.identifier},额外参数:{additional_add_params}"
)
return None
parent_resource: ResourcePLR = self.resource_tracker.uuid_to_resources.get(parent_uuid)
if parent_resource is None:
self.lab_logger().warning(
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_uuid}不存在"
)
else:
try:
# 特殊兼容所有plr的物料的assign方法和create_resource append_resource后期同步
additional_params = {}
extra = getattr(plr_resource, "unilabos_extra", {})
if len(extra):
self.lab_logger().info(f"发现物料{plr_resource}额外参数: " + str(extra))
if "update_resource_site" in extra:
additional_add_params["site"] = extra["update_resource_site"]
site = additional_add_params.get("site", None)
spec = inspect.signature(parent_resource.assign_child_resource)
if "spot" in spec.parameters:
ordering_dict: Dict[str, Any] = getattr(parent_resource, "_ordering")
if ordering_dict:
site = list(ordering_dict.keys()).index(site)
additional_params["spot"] = site
old_parent = plr_resource.parent
if old_parent is not None:
# plr并不支持同一个deck的加载和卸载
self.lab_logger().warning(f"物料{plr_resource}请求从{old_parent}卸载")
old_parent.unassign_child_resource(plr_resource)
self.lab_logger().warning( self.lab_logger().warning(
f"物料{plr_resource}请求挂载{parent_resource},额外参数:{additional_params}" f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_uuid}不存在"
) )
else:
try:
# 特殊兼容所有plr的物料的assign方法和create_resource append_resource后期同步
additional_params = {}
extra = getattr(plr_resource, "unilabos_extra", {})
if len(extra):
self.lab_logger().info(f"发现物料{plr_resource}额外参数: " + str(extra))
if "update_resource_site" in extra:
additional_add_params["site"] = extra["update_resource_site"]
site = additional_add_params.get("site", None)
spec = inspect.signature(parent_resource.assign_child_resource)
if "spot" in spec.parameters:
ordering_dict: Dict[str, Any] = getattr(parent_resource, "_ordering")
if ordering_dict:
site = list(ordering_dict.keys()).index(site)
additional_params["spot"] = site
old_parent = plr_resource.parent
if old_parent is not None:
# plr并不支持同一个deck的加载和卸载
self.lab_logger().warning(f"物料{plr_resource}请求从{old_parent}卸载")
old_parent.unassign_child_resource(plr_resource)
self.lab_logger().warning(
f"物料{plr_resource}请求挂载到{parent_resource},额外参数:{additional_params}"
)
# ⭐ assign 之前,需要从 resources 列表中移除 # ⭐ assign 之前,需要从 resources 列表中移除
# 因为资源将不再是顶级资源,而是成为 parent_resource 的子资源 # 因为资源将不再是顶级资源,而是成为 parent_resource 的子资源
# 如果不移除figure_resource 会找到两次:一次在 resources一次在 parent 的 children # 如果不移除figure_resource 会找到两次:一次在 resources一次在 parent 的 children
resource_id = id(plr_resource) resource_id = id(plr_resource)
for i, r in enumerate(self.resource_tracker.resources): for i, r in enumerate(self.resource_tracker.resources):
if id(r) == resource_id: if id(r) == resource_id:
self.resource_tracker.resources.pop(i) self.resource_tracker.resources.pop(i)
self.lab_logger().debug( self.lab_logger().debug(
f"从顶级资源列表中移除 {plr_resource.name}(即将成为 {parent_resource.name} 的子资源)" f"从顶级资源列表中移除 {plr_resource.name}(即将成为 {parent_resource.name} 的子资源)"
) )
break break
parent_resource.assign_child_resource(plr_resource, location=None, **additional_params) parent_resource.assign_child_resource(plr_resource, location=None, **additional_params)
func = getattr(self.driver_instance, "resource_tree_transfer", None) func = getattr(self.driver_instance, "resource_tree_transfer", None)
if callable(func): if callable(func):
# 分别是 物料的原来父节点当前物料的状态物料的新父节点此时物料已经重新assign了 # 分别是 物料的原来父节点当前物料的状态物料的新父节点此时物料已经重新assign了
func(old_parent, plr_resource, parent_resource) func(old_parent, plr_resource, parent_resource)
return parent_resource except Exception as e:
except Exception as e: self.lab_logger().warning(
self.lab_logger().warning( f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_resource}[{parent_uuid}]失败!\n{traceback.format_exc()}"
f"物料{plr_resource}请求挂载{tree.root_node.res_content.name}的父节点{parent_resource}[{parent_uuid}]失败!\n{traceback.format_exc()}" )
)
async def s2c_resource_tree(self, req: SerialCommand_Request, res: SerialCommand_Response): async def s2c_resource_tree(self, req: SerialCommand_Request, res: SerialCommand_Response):
""" """
@@ -760,7 +722,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
def _handle_add( def _handle_add(
plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any] plr_resources: List[ResourcePLR], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
) -> Tuple[Dict[str, Any], List[ResourcePLR]]: ) -> Dict[str, Any]:
""" """
处理资源添加操作的内部函数 处理资源添加操作的内部函数
@@ -772,20 +734,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
Returns: Returns:
操作结果字典 操作结果字典
""" """
parents = [] # 放的是被变更的物料 / 被变更的物料父级
for plr_resource, tree in zip(plr_resources, tree_set.trees): for plr_resource, tree in zip(plr_resources, tree_set.trees):
self.resource_tracker.add_resource(plr_resource) self.resource_tracker.add_resource(plr_resource)
parent = self.transfer_to_new_resource(plr_resource, tree, additional_add_params) self.transfer_to_new_resource(plr_resource, tree, additional_add_params)
if parent is not None:
parents.append(parent)
else:
parents.append(plr_resource)
func = getattr(self.driver_instance, "resource_tree_add", None) func = getattr(self.driver_instance, "resource_tree_add", None)
if callable(func): if callable(func):
func(plr_resources) func(plr_resources)
return {"success": True, "action": "add"}, parents return {"success": True, "action": "add"}
def _handle_remove(resources_uuid: List[str]) -> Dict[str, Any]: def _handle_remove(resources_uuid: List[str]) -> Dict[str, Any]:
""" """
@@ -820,11 +777,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
if plr_resource.parent is not None: if plr_resource.parent is not None:
plr_resource.parent.unassign_child_resource(plr_resource) plr_resource.parent.unassign_child_resource(plr_resource)
self.resource_tracker.remove_resource(plr_resource) self.resource_tracker.remove_resource(plr_resource)
self.lab_logger().info(f"[资源同步] 移除物料 {plr_resource} 及其子节点") self.lab_logger().info(f"移除物料 {plr_resource} 及其子节点")
for other_plr_resource in other_plr_resources: for other_plr_resource in other_plr_resources:
self.resource_tracker.remove_resource(other_plr_resource) self.resource_tracker.remove_resource(other_plr_resource)
self.lab_logger().info(f"[资源同步] 移除物料 {other_plr_resource} 及其子节点") self.lab_logger().info(f"移除物料 {other_plr_resource} 及其子节点")
return { return {
"success": True, "success": True,
@@ -834,9 +791,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
} }
def _handle_update( def _handle_update(
plr_resources: List[Union[ResourcePLR, ResourceDictInstance]], plr_resources: List[Union[ResourcePLR, ResourceDictInstance]], tree_set: ResourceTreeSet, additional_add_params: Dict[str, Any]
tree_set: ResourceTreeSet,
additional_add_params: Dict[str, Any],
) -> Tuple[Dict[str, Any], List[ResourcePLR]]: ) -> Tuple[Dict[str, Any], List[ResourcePLR]]:
""" """
处理资源更新操作的内部函数 处理资源更新操作的内部函数
@@ -858,19 +813,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
original_instance: ResourcePLR = self.resource_tracker.figure_resource( original_instance: ResourcePLR = self.resource_tracker.figure_resource(
{"uuid": tree.root_node.res_content.uuid}, try_mode=False {"uuid": tree.root_node.res_content.uuid}, try_mode=False
) )
original_parent_resource = original_instance.parent
original_parent_resource_uuid = getattr(original_parent_resource, "unilabos_uuid", None)
target_parent_resource_uuid = tree.root_node.res_content.uuid_parent
not_same_parent = (
original_parent_resource_uuid != target_parent_resource_uuid
and original_parent_resource is not None
)
old_name = original_instance.name
new_name = plr_resource.name
parent_appended = False
# Update操作中包含改名需要先remove再add,这里更新父节点即可 # Update操作中包含改名需要先remove再add
if not not_same_parent and old_name != new_name: if original_instance.name != plr_resource.name:
old_name = original_instance.name
new_name = plr_resource.name
self.lab_logger().info(f"物料改名操作:{old_name} -> {new_name}") self.lab_logger().info(f"物料改名操作:{old_name} -> {new_name}")
# 收集所有相关的uuid包括子节点 # 收集所有相关的uuid包括子节点
@@ -879,10 +826,12 @@ class BaseROS2DeviceNode(Node, Generic[T]):
_handle_add([original_instance], tree_set, additional_add_params) _handle_add([original_instance], tree_set, additional_add_params)
self.lab_logger().info(f"物料改名完成:{old_name} -> {new_name}") self.lab_logger().info(f"物料改名完成:{old_name} -> {new_name}")
original_instances.append(original_parent_resource)
parent_appended = True
# 常规更新:不涉及改名 # 常规更新:不涉及改名
original_parent_resource = original_instance.parent
original_parent_resource_uuid = getattr(original_parent_resource, "unilabos_uuid", None)
target_parent_resource_uuid = tree.root_node.res_content.uuid_parent
self.lab_logger().info( self.lab_logger().info(
f"物料{original_instance} 原始父节点{original_parent_resource_uuid} " f"物料{original_instance} 原始父节点{original_parent_resource_uuid} "
f"目标父节点{target_parent_resource_uuid} 更新" f"目标父节点{target_parent_resource_uuid} 更新"
@@ -893,43 +842,29 @@ class BaseROS2DeviceNode(Node, Generic[T]):
original_instance.unilabos_extra = getattr(plr_resource, "unilabos_extra") # type: ignore # noqa: E501 original_instance.unilabos_extra = getattr(plr_resource, "unilabos_extra") # type: ignore # noqa: E501
# 如果父节点变化,需要重新挂载 # 如果父节点变化,需要重新挂载
if not_same_parent: if (
parent = self.transfer_to_new_resource(original_instance, tree, additional_add_params) original_parent_resource_uuid != target_parent_resource_uuid
original_instances.append(parent) and original_parent_resource is not None
parent_appended = True ):
self.transfer_to_new_resource(original_instance, tree, additional_add_params)
else: else:
# 判断是否变更了resource_site,重新登记 # 判断是否变更了resource_site
target_site = original_instance.unilabos_extra.get("update_resource_site") target_site = original_instance.unilabos_extra.get("update_resource_site")
sites = ( sites = original_instance.parent.sites if original_instance.parent is not None and hasattr(original_instance.parent, "sites") else None
original_instance.parent.sites site_names = list(original_instance.parent._ordering.keys()) if original_instance.parent is not None and hasattr(original_instance.parent, "sites") else []
if original_instance.parent is not None and hasattr(original_instance.parent, "sites")
else None
)
site_names = (
list(original_instance.parent._ordering.keys())
if original_instance.parent is not None and hasattr(original_instance.parent, "sites")
else []
)
if target_site is not None and sites is not None and site_names is not None: if target_site is not None and sites is not None and site_names is not None:
site_index = sites.index(original_instance) site_index = sites.index(original_instance)
site_name = site_names[site_index] site_name = site_names[site_index]
if site_name != target_site: if site_name != target_site:
parent = self.transfer_to_new_resource(original_instance, tree, additional_add_params) self.transfer_to_new_resource(original_instance, tree, additional_add_params)
if parent is not None:
original_instances.append(parent)
parent_appended = True
# 加载状态 # 加载状态
original_instance.location = plr_resource.location
original_instance.rotation = plr_resource.rotation
original_instance.barcode = plr_resource.barcode
original_instance.load_all_state(states) original_instance.load_all_state(states)
child_count = len(original_instance.get_all_children()) child_count = len(original_instance.get_all_children())
self.lab_logger().info( self.lab_logger().info(
f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count}" f"更新了资源属性 {plr_resource}[{tree.root_node.res_content.uuid}] " f"及其子节点 {child_count}"
) )
if not parent_appended: original_instances.append(original_instance)
original_instances.append(original_instance)
# 调用driver的update回调 # 调用driver的update回调
func = getattr(self.driver_instance, "resource_tree_update", None) func = getattr(self.driver_instance, "resource_tree_update", None)
@@ -946,7 +881,9 @@ class BaseROS2DeviceNode(Node, Generic[T]):
action = i.get("action") # remove, add, update action = i.get("action") # remove, add, update
resources_uuid: List[str] = i.get("data") # 资源数据 resources_uuid: List[str] = i.get("data") # 资源数据
additional_add_params = i.get("additional_add_params", {}) # 额外参数 additional_add_params = i.get("additional_add_params", {}) # 额外参数
self.lab_logger().trace(f"[资源同步] 处理 {action}, " f"resources count: {len(resources_uuid)}") self.lab_logger().info(
f"[Resource Tree Update] Processing {action} operation, " f"resources count: {len(resources_uuid)}"
)
tree_set = None tree_set = None
if action in ["add", "update"]: if action in ["add", "update"]:
tree_set = await self.get_resource( tree_set = await self.get_resource(
@@ -957,29 +894,13 @@ class BaseROS2DeviceNode(Node, Generic[T]):
if tree_set is None: if tree_set is None:
raise ValueError("tree_set不能为None") raise ValueError("tree_set不能为None")
plr_resources = tree_set.to_plr_resources() plr_resources = tree_set.to_plr_resources()
result, parents = _handle_add(plr_resources, tree_set, additional_add_params) result = _handle_add(plr_resources, tree_set, additional_add_params)
parents: List[Optional["ResourcePLR"]] = [i for i in parents if i is not None] new_tree_set = ResourceTreeSet.from_plr_resources(plr_resources)
# de_dupe_parents = list(set(parents))
# Fix unhashable type error for WareHouse
de_dupe_parents = []
_seen_ids = set()
for p in parents:
if id(p) not in _seen_ids:
_seen_ids.add(id(p))
de_dupe_parents.append(p)
new_tree_set = ResourceTreeSet.from_plr_resources(de_dupe_parents) # 去重
for tree in new_tree_set.trees:
if tree.root_node.res_content.uuid_parent is None and self.node_name != "host_node":
tree.root_node.res_content.parent_uuid = self.uuid
r = SerialCommand.Request() r = SerialCommand.Request()
r.command = json.dumps( r.command = json.dumps(
{"data": {"data": new_tree_set.dump()}, "action": "update"} {"data": {"data": new_tree_set.dump()}, "action": "update"}) # 和Update Resource一致
) # 和Update Resource一致
response: SerialCommand_Response = await self._resource_clients[ response: SerialCommand_Response = await self._resource_clients[
"c2s_update_resource_tree" "c2s_update_resource_tree"].call_async(r) # type: ignore
].call_async(
r
) # type: ignore
self.lab_logger().info(f"确认资源云端 Add 结果: {response.response}") self.lab_logger().info(f"确认资源云端 Add 结果: {response.response}")
results.append(result) results.append(result)
elif action == "update": elif action == "update":
@@ -993,19 +914,12 @@ class BaseROS2DeviceNode(Node, Generic[T]):
plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0]) plr_resources.append(ResourceTreeSet([tree]).to_plr_resources()[0])
result, original_instances = _handle_update(plr_resources, tree_set, additional_add_params) result, original_instances = _handle_update(plr_resources, tree_set, additional_add_params)
if not BasicConfig.no_update_feedback: if not BasicConfig.no_update_feedback:
new_tree_set = ResourceTreeSet.from_plr_resources(original_instances) # 去重 new_tree_set = ResourceTreeSet.from_plr_resources(original_instances)
for tree in new_tree_set.trees:
if tree.root_node.res_content.uuid_parent is None and self.node_name != "host_node":
tree.root_node.res_content.parent_uuid = self.uuid
r = SerialCommand.Request() r = SerialCommand.Request()
r.command = json.dumps( r.command = json.dumps(
{"data": {"data": new_tree_set.dump()}, "action": "update"} {"data": {"data": new_tree_set.dump()}, "action": "update"}) # 和Update Resource一致
) # 和Update Resource一致
response: SerialCommand_Response = await self._resource_clients[ response: SerialCommand_Response = await self._resource_clients[
"c2s_update_resource_tree" "c2s_update_resource_tree"].call_async(r) # type: ignore
].call_async(
r
) # type: ignore
self.lab_logger().info(f"确认资源云端 Update 结果: {response.response}") self.lab_logger().info(f"确认资源云端 Update 结果: {response.response}")
results.append(result) results.append(result)
elif action == "remove": elif action == "remove":
@@ -1020,15 +934,15 @@ class BaseROS2DeviceNode(Node, Generic[T]):
# 返回处理结果 # 返回处理结果
result_json = {"results": results, "total": len(data)} result_json = {"results": results, "total": len(data)}
res.response = json.dumps(result_json, ensure_ascii=False, cls=TypeEncoder) res.response = json.dumps(result_json, ensure_ascii=False, cls=TypeEncoder)
# self.lab_logger().info(f"[Resource Tree Update] Completed processing {len(data)} operations") self.lab_logger().info(f"[Resource Tree Update] Completed processing {len(data)} operations")
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
error_msg = f"Invalid JSON format: {str(e)}" error_msg = f"Invalid JSON format: {str(e)}"
self.lab_logger().error(f"[资源同步] {error_msg}") self.lab_logger().error(f"[Resource Tree Update] {error_msg}")
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False) res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
except Exception as e: except Exception as e:
error_msg = f"Unexpected error: {str(e)}" error_msg = f"Unexpected error: {str(e)}"
self.lab_logger().error(f"[资源同步] {error_msg}") self.lab_logger().error(f"[Resource Tree Update] {error_msg}")
self.lab_logger().error(traceback.format_exc()) self.lab_logger().error(traceback.format_exc())
res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False) res.response = json.dumps({"success": False, "error": error_msg}, ensure_ascii=False)
@@ -1349,8 +1263,7 @@ class BaseROS2DeviceNode(Node, Generic[T]):
ACTION, action_paramtypes = self.get_real_function(self.driver_instance, action_name) ACTION, action_paramtypes = self.get_real_function(self.driver_instance, action_name)
action_kwargs = convert_from_ros_msg_with_mapping(goal, action_value_mapping["goal"]) action_kwargs = convert_from_ros_msg_with_mapping(goal, action_value_mapping["goal"])
self.lab_logger().debug(f"任务 {ACTION.__name__} 接收到原始目标: {str(action_kwargs)[:1000]}") self.lab_logger().debug(f"任务 {ACTION.__name__} 接收到原始目标: {action_kwargs}")
self.lab_logger().trace(f"任务 {ACTION.__name__} 接收到原始目标: {action_kwargs}")
error_skip = False error_skip = False
# 向Host查询物料当前状态如果是host本身的增加物料的请求则直接跳过 # 向Host查询物料当前状态如果是host本身的增加物料的请求则直接跳过
if action_name not in ["create_resource_detailed", "create_resource"]: if action_name not in ["create_resource_detailed", "create_resource"]:
@@ -1364,41 +1277,21 @@ class BaseROS2DeviceNode(Node, Generic[T]):
resource_inputs = action_kwargs[k] if is_sequence else [action_kwargs[k]] resource_inputs = action_kwargs[k] if is_sequence else [action_kwargs[k]]
# 批量查询资源 # 批量查询资源
queried_resources: list = [None] * len(resource_inputs) queried_resources = []
uuid_indices: list[tuple[int, str, dict]] = [] # (index, uuid, resource_data) for resource_data in resource_inputs:
plr_resource = await self.get_resource_with_dir(
# 第一遍处理没有uuid的资源收集有uuid的资源信息 resource_id=resource_data["id"], with_children=True
for idx, resource_data in enumerate(resource_inputs): )
unilabos_uuid = resource_data.get("data", {}).get("unilabos_uuid") if "sample_id" in resource_data:
if unilabos_uuid is None: plr_resource.unilabos_extra["sample_uuid"] = resource_data["sample_id"]
plr_resource = await self.get_resource_with_dir( queried_resources.append(plr_resource)
resource_id=resource_data["id"], with_children=True
)
if "sample_id" in resource_data:
plr_resource.unilabos_extra[EXTRA_SAMPLE_UUID] = resource_data["sample_id"]
queried_resources[idx] = plr_resource
else:
uuid_indices.append((idx, unilabos_uuid, resource_data))
# 第二遍批量查询有uuid的资源
if uuid_indices:
uuids = [item[1] for item in uuid_indices]
resource_tree = await self.get_resource(uuids)
plr_resources = resource_tree.to_plr_resources()
for i, (idx, _, resource_data) in enumerate(uuid_indices):
plr_resource = plr_resources[i]
if "sample_id" in resource_data:
plr_resource.unilabos_extra[EXTRA_SAMPLE_UUID] = resource_data["sample_id"]
queried_resources[idx] = plr_resource
self.lab_logger().debug(f"资源查询结果: 共 {len(queried_resources)} 个资源") self.lab_logger().debug(f"资源查询结果: 共 {len(queried_resources)} 个资源")
# 通过资源跟踪器获取本地实例 # 通过资源跟踪器获取本地实例
final_resources = queried_resources if is_sequence else queried_resources[0] final_resources = queried_resources if is_sequence else queried_resources[0]
if not is_sequence: if not is_sequence:
plr = self.resource_tracker.figure_resource( plr = self.resource_tracker.figure_resource({"name": final_resources.name}, try_mode=False)
{"name": final_resources.name}, try_mode=False
)
# 保留unilabos_extra # 保留unilabos_extra
if hasattr(final_resources, "unilabos_extra") and hasattr(plr, "unilabos_extra"): if hasattr(final_resources, "unilabos_extra") and hasattr(plr, "unilabos_extra"):
plr.unilabos_extra = getattr(final_resources, "unilabos_extra", {}).copy() plr.unilabos_extra = getattr(final_resources, "unilabos_extra", {}).copy()
@@ -1438,9 +1331,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
except Exception as _: except Exception as _:
execution_error = traceback.format_exc() execution_error = traceback.format_exc()
error( error(
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{str(action_kwargs)[:1000]}"
)
trace(
f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}" f"异步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
) )
@@ -1462,9 +1352,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
except Exception as _: except Exception as _:
execution_error = traceback.format_exc() execution_error = traceback.format_exc()
error( error(
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{str(action_kwargs)[:1000]}"
)
trace(
f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}" f"同步任务 {ACTION.__name__} 报错了\n{traceback.format_exc()}\n原始输入:{action_kwargs}"
) )
@@ -1533,18 +1420,11 @@ class BaseROS2DeviceNode(Node, Generic[T]):
if isinstance(rs, list): if isinstance(rs, list):
for r in rs: for r in rs:
res = self.resource_tracker.parent_resource(r) # 获取 resource 对象 res = self.resource_tracker.parent_resource(r) # 获取 resource 对象
if res is None:
res = rs
if id(res) not in seen:
seen.add(id(res))
unique_resources.append(res)
else: else:
res = self.resource_tracker.parent_resource(rs) res = self.resource_tracker.parent_resource(r)
if res is None: if id(res) not in seen:
res = rs seen.add(id(res))
if id(res) not in seen: unique_resources.append(res)
seen.add(id(res))
unique_resources.append(res)
# 使用新的资源树接口 # 使用新的资源树接口
if unique_resources: if unique_resources:
@@ -1596,39 +1476,20 @@ class BaseROS2DeviceNode(Node, Generic[T]):
try: try:
function_name = target["function_name"] function_name = target["function_name"]
function_args = target["function_args"] function_args = target["function_args"]
# 获取 unilabos 系统参数
unilabos_param: Dict[str, Any] = target[JSON_UNILABOS_PARAM]
assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}" assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}"
function = getattr(self.driver_instance, function_name) function = getattr(self.driver_instance, function_name)
assert callable( assert callable(
function function
), f"执行动作时JSON中的function_name对应的函数不可调用: {function_name}\n原JSON: {string}" ), f"执行动作时JSON中的function_name对应的函数不可调用: {function_name}\n原JSON: {string}"
# 处理参数(包含 unilabos 系统参数如 sample_uuids # 处理 ResourceSlot 类型参数
args_list = default_manager._analyze_method_signature(function, skip_unilabos_params=False)["args"] args_list = default_manager._analyze_method_signature(function)["args"]
for arg in args_list: for arg in args_list:
arg_name = arg["name"] arg_name = arg["name"]
arg_type = arg["type"] arg_type = arg["type"]
# 跳过不在 function_args 中的参数 # 跳过不在 function_args 中的参数
if arg_name not in function_args: if arg_name not in function_args:
# 处理 sample_uuids 参数注入
if arg_name == PARAM_SAMPLE_UUIDS:
raw_sample_uuids = unilabos_param.get(PARAM_SAMPLE_UUIDS, {})
# 将 material uuid 转换为 resource 实例
# key: sample_uuid, value: material_uuid -> resource 实例
resolved_sample_uuids: Dict[str, Any] = {}
for sample_uuid, material_uuid in raw_sample_uuids.items():
if material_uuid and self.resource_tracker:
resource = self.resource_tracker.uuid_to_resources.get(material_uuid)
resolved_sample_uuids[sample_uuid] = resource if resource else material_uuid
else:
resolved_sample_uuids[sample_uuid] = material_uuid
function_args[PARAM_SAMPLE_UUIDS] = resolved_sample_uuids
self.lab_logger().debug(
f"[JsonCommand] 注入 {PARAM_SAMPLE_UUIDS}: {resolved_sample_uuids}"
)
continue continue
# 处理单个 ResourceSlot # 处理单个 ResourceSlot
@@ -1636,7 +1497,8 @@ class BaseROS2DeviceNode(Node, Generic[T]):
resource_data = function_args[arg_name] resource_data = function_args[arg_name]
if isinstance(resource_data, dict) and "id" in resource_data: if isinstance(resource_data, dict) and "id" in resource_data:
try: try:
function_args[arg_name] = self._convert_resources_sync(resource_data["uuid"])[0] converted_resource = self._convert_resource_sync(resource_data)
function_args[arg_name] = converted_resource
except Exception as e: except Exception as e:
self.lab_logger().error( self.lab_logger().error(
f"转换ResourceSlot参数 {arg_name} 失败: {e}\n{traceback.format_exc()}" f"转换ResourceSlot参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
@@ -1650,87 +1512,68 @@ class BaseROS2DeviceNode(Node, Generic[T]):
resource_list = function_args[arg_name] resource_list = function_args[arg_name]
if isinstance(resource_list, list): if isinstance(resource_list, list):
try: try:
uuids = [r["uuid"] for r in resource_list if isinstance(r, dict) and "id" in r] converted_resources = []
function_args[arg_name] = self._convert_resources_sync(*uuids) if uuids else [] for resource_data in resource_list:
if isinstance(resource_data, dict) and "id" in resource_data:
converted_resource = self._convert_resource_sync(resource_data)
converted_resources.append(converted_resource)
function_args[arg_name] = converted_resources
except Exception as e: except Exception as e:
self.lab_logger().error( self.lab_logger().error(
f"转换ResourceSlot列表参数 {arg_name} 失败: {e}\n{traceback.format_exc()}" f"转换ResourceSlot列表参数 {arg_name} 失败: {e}\n{traceback.format_exc()}"
) )
raise JsonCommandInitError(f"ResourceSlot列表参数转换失败: {arg_name}") raise JsonCommandInitError(f"ResourceSlot列表参数转换失败: {arg_name}")
# todo: 默认反报送
return function(**function_args) return function(**function_args)
except KeyError as ex: except KeyError as ex:
raise JsonCommandInitError( raise JsonCommandInitError(
f"执行动作时JSON缺少function_name或function_args: {ex}\n原JSON: {string}\n{traceback.format_exc()}" f"执行动作时JSON缺少function_name或function_args: {ex}\n原JSON: {string}\n{traceback.format_exc()}"
) )
def _convert_resources_sync(self, *uuids: str) -> List["ResourcePLR"]: def _convert_resource_sync(self, resource_data: Dict[str, Any]):
"""同步转换资源 UUID 为实例 """同步转换资源数据为实例"""
# 创建资源查询请求
Args: r = SerialCommand.Request()
*uuids: 一个或多个资源 UUID r.command = json.dumps(
{
Returns: "id": resource_data.get("id", None),
单个 UUID 时返回单个资源实例,多个 UUID 时返回资源实例列表 "uuid": resource_data.get("uuid", None),
""" "with_children": True,
if not uuids: }
raise ValueError("至少需要提供一个 UUID")
uuids_list = list(uuids)
future = self._resource_clients["c2s_update_resource_tree"].call_async(
SerialCommand.Request(
command=json.dumps(
{
"data": {"data": uuids_list, "with_children": True},
"action": "get",
}
)
)
) )
# 同步调用资源查询服务
future = self._resource_clients["resource_get"].call_async(r)
# 等待结果使用while循环每次sleep 0.05秒最多等待30秒 # 等待结果使用while循环每次sleep 0.05秒最多等待30秒
timeout = 30.0 timeout = 30.0
elapsed = 0.0 elapsed = 0.0
while not future.done() and elapsed < timeout: while not future.done() and elapsed < timeout:
time.sleep(0.02) time.sleep(0.05)
elapsed += 0.02 elapsed += 0.05
if not future.done(): if not future.done():
raise Exception(f"资源查询超时: {uuids_list}") raise Exception(f"资源查询超时: {resource_data}")
response = future.result() response = future.result()
if response is None: if response is None:
raise Exception(f"资源查询返回空结果: {uuids_list}") raise Exception(f"资源查询返回空结果: {resource_data}")
raw_data = json.loads(response.response) raw_data = json.loads(response.response)
# 转换为 PLR 资源 # 转换为 PLR 资源
tree_set = ResourceTreeSet.from_raw_dict_list(raw_data) tree_set = ResourceTreeSet.from_raw_dict_list(raw_data)
if not len(tree_set.trees): plr_resource = tree_set.to_plr_resources()[0]
raise Exception(f"资源查询返回空树: {raw_data}")
plr_resources = tree_set.to_plr_resources()
# 通过资源跟踪器获取本地实例 # 通过资源跟踪器获取本地实例
figured_resources: List[ResourcePLR] = [] res = self.resource_tracker.figure_resource(plr_resource, try_mode=True)
for plr_resource, tree in zip(plr_resources, tree_set.trees): if len(res) == 0:
res = self.resource_tracker.figure_resource(plr_resource, try_mode=True) self.lab_logger().warning(f"资源转换未能索引到实例: {resource_data},返回新建实例")
if len(res) == 0: return plr_resource
self.lab_logger().warning(f"资源转换未能索引到实例: {tree.root_node.res_content},返回新建实例") elif len(res) == 1:
figured_resources.append(plr_resource) return res[0]
elif len(res) == 1: else:
figured_resources.append(res[0]) raise ValueError(f"资源转换得到多个实例: {res}")
else:
raise ValueError(f"资源转换得到多个实例: {res}")
mapped_plr_resources = []
for uuid in uuids_list:
for plr_resource in figured_resources:
r = self.resource_tracker.loop_find_with_uuid(plr_resource, uuid)
mapped_plr_resources.append(r)
break
return mapped_plr_resources
async def _execute_driver_command_async(self, string: str): async def _execute_driver_command_async(self, string: str):
try: try:
@@ -1745,9 +1588,6 @@ class BaseROS2DeviceNode(Node, Generic[T]):
try: try:
function_name = target["function_name"] function_name = target["function_name"]
function_args = target["function_args"] function_args = target["function_args"]
# 获取 unilabos 系统参数
unilabos_param: Dict[str, Any] = target.get(JSON_UNILABOS_PARAM, {})
assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}" assert isinstance(function_args, dict), "执行动作时JSON必须为dict类型\n原JSON: {string}"
function = getattr(self.driver_instance, function_name) function = getattr(self.driver_instance, function_name)
assert callable( assert callable(
@@ -1757,30 +1597,14 @@ class BaseROS2DeviceNode(Node, Generic[T]):
function function
), f"执行动作时JSON中的function并非异步: {function_name}\n原JSON: {string}" ), f"执行动作时JSON中的function并非异步: {function_name}\n原JSON: {string}"
# 处理参数(包含 unilabos 系统参数如 sample_uuids # 处理 ResourceSlot 类型参数
args_list = default_manager._analyze_method_signature(function, skip_unilabos_params=False)["args"] args_list = default_manager._analyze_method_signature(function)["args"]
for arg in args_list: for arg in args_list:
arg_name = arg["name"] arg_name = arg["name"]
arg_type = arg["type"] arg_type = arg["type"]
# 跳过不在 function_args 中的参数 # 跳过不在 function_args 中的参数
if arg_name not in function_args: if arg_name not in function_args:
# 处理 sample_uuids 参数注入
if arg_name == PARAM_SAMPLE_UUIDS:
raw_sample_uuids = unilabos_param.get(PARAM_SAMPLE_UUIDS, {})
# 将 material uuid 转换为 resource 实例
# key: sample_uuid, value: material_uuid -> resource 实例
resolved_sample_uuids: Dict[str, Any] = {}
for sample_uuid, material_uuid in raw_sample_uuids.items():
if material_uuid and self.resource_tracker:
resource = self.resource_tracker.uuid_to_resources.get(material_uuid)
resolved_sample_uuids[sample_uuid] = resource if resource else material_uuid
else:
resolved_sample_uuids[sample_uuid] = material_uuid
function_args[PARAM_SAMPLE_UUIDS] = resolved_sample_uuids
self.lab_logger().debug(
f"[JsonCommandAsync] 注入 {PARAM_SAMPLE_UUIDS}: {resolved_sample_uuids}"
)
continue continue
# 处理单个 ResourceSlot # 处理单个 ResourceSlot
@@ -1870,15 +1694,6 @@ class ROS2DeviceNode:
它不继承设备类,而是通过代理模式访问设备类的属性和方法。 它不继承设备类,而是通过代理模式访问设备类的属性和方法。
""" """
# 类变量,用于循环管理
_asyncio_loop = None
_asyncio_loop_running = False
_asyncio_loop_thread = None
@classmethod
def get_asyncio_loop(cls):
return cls._asyncio_loop
@staticmethod @staticmethod
async def safe_task_wrapper(trace_callback, func, **kwargs): async def safe_task_wrapper(trace_callback, func, **kwargs):
try: try:
@@ -1955,11 +1770,6 @@ class ROS2DeviceNode:
print_publish: 是否打印发布信息 print_publish: 是否打印发布信息
driver_is_ros: driver_is_ros:
""" """
# 在初始化时检查循环状态
if ROS2DeviceNode._asyncio_loop_running and ROS2DeviceNode._asyncio_loop_thread is not None:
pass
elif ROS2DeviceNode._asyncio_loop_thread is None:
self._start_loop()
# 保存设备类是否支持异步上下文 # 保存设备类是否支持异步上下文
self._has_async_context = hasattr(driver_class, "__aenter__") and hasattr(driver_class, "__aexit__") self._has_async_context = hasattr(driver_class, "__aenter__") and hasattr(driver_class, "__aexit__")
@@ -2051,19 +1861,6 @@ class ROS2DeviceNode:
except Exception as e: except Exception as e:
self._ros_node.lab_logger().error(f"设备后初始化失败: {e}") self._ros_node.lab_logger().error(f"设备后初始化失败: {e}")
def _start_loop(self):
def run_event_loop():
loop = asyncio.new_event_loop()
ROS2DeviceNode._asyncio_loop = loop
asyncio.set_event_loop(loop)
loop.run_forever()
ROS2DeviceNode._asyncio_loop_thread = threading.Thread(
target=run_event_loop, daemon=True, name="ROS2DeviceNode"
)
ROS2DeviceNode._asyncio_loop_thread.start()
logger.info(f"循环线程已启动")
class DeviceInfoType(TypedDict): class DeviceInfoType(TypedDict):
id: str id: str

View File

@@ -1,17 +1,16 @@
import collections import collections
from dataclasses import dataclass, field
import json import json
import threading import threading
import time import time
import traceback import traceback
import uuid import uuid
from dataclasses import dataclass, field from typing import TYPE_CHECKING, Optional, Dict, Any, List, ClassVar, Set, TypedDict, Union
from typing import TYPE_CHECKING, Optional, Dict, Any, List, ClassVar, Set, Union
from action_msgs.msg import GoalStatus from action_msgs.msg import GoalStatus
from geometry_msgs.msg import Point from geometry_msgs.msg import Point
from rclpy.action import ActionClient, get_action_server_names_and_types_by_node from rclpy.action import ActionClient, get_action_server_names_and_types_by_node
from rclpy.service import Service from rclpy.service import Service
from typing_extensions import TypedDict
from unilabos_msgs.msg import Resource # type: ignore from unilabos_msgs.msg import Resource # type: ignore
from unilabos_msgs.srv import ( from unilabos_msgs.srv import (
ResourceAdd, ResourceAdd,
@@ -23,20 +22,9 @@ from unilabos_msgs.srv import (
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
from unique_identifier_msgs.msg import UUID from unique_identifier_msgs.msg import UUID
from unilabos.registry.placeholder_type import ResourceSlot, DeviceSlot
from unilabos.registry.registry import lab_registry from unilabos.registry.registry import lab_registry
from unilabos.resources.container import RegularContainer
from unilabos.resources.graphio import initialize_resource from unilabos.resources.graphio import initialize_resource
from unilabos.resources.registry import add_schema from unilabos.resources.registry import add_schema
from unilabos.resources.resource_tracker import (
ResourceDict,
ResourceDictInstance,
ResourceTreeSet,
ResourceTreeInstance,
RETURN_UNILABOS_SAMPLES,
JSON_UNILABOS_PARAM,
PARAM_SAMPLE_UUIDS,
)
from unilabos.ros.initialize_device import initialize_device_from_dict from unilabos.ros.initialize_device import initialize_device_from_dict
from unilabos.ros.msgs.message_converter import ( from unilabos.ros.msgs.message_converter import (
get_msg_type, get_msg_type,
@@ -47,10 +35,17 @@ from unilabos.ros.msgs.message_converter import (
) )
from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker from unilabos.ros.nodes.base_device_node import BaseROS2DeviceNode, ROS2DeviceNode, DeviceNodeResourceTracker
from unilabos.ros.nodes.presets.controller_node import ControllerNode from unilabos.ros.nodes.presets.controller_node import ControllerNode
from unilabos.resources.resource_tracker import (
ResourceDict,
ResourceDictInstance,
ResourceTreeSet,
ResourceTreeInstance,
)
from unilabos.utils import logger from unilabos.utils import logger
from unilabos.utils.exception import DeviceClassInvalid from unilabos.utils.exception import DeviceClassInvalid
from unilabos.utils.log import warning from unilabos.utils.log import warning
from unilabos.utils.type_check import serialize_result_info from unilabos.utils.type_check import serialize_result_info
from unilabos.registry.placeholder_type import ResourceSlot, DeviceSlot
if TYPE_CHECKING: if TYPE_CHECKING:
from unilabos.app.ws_client import QueueItem from unilabos.app.ws_client import QueueItem
@@ -66,18 +61,6 @@ class TestResourceReturn(TypedDict):
devices: List[DeviceSlot] devices: List[DeviceSlot]
class TestLatencyReturn(TypedDict):
"""test_latency方法的返回值类型"""
avg_rtt_ms: float
avg_time_diff_ms: float
max_time_error_ms: float
task_delay_ms: float
raw_delay_ms: float
test_count: int
status: str
class HostNode(BaseROS2DeviceNode): class HostNode(BaseROS2DeviceNode):
""" """
主机节点类,负责管理设备、资源和控制器 主机节点类,负责管理设备、资源和控制器
@@ -378,7 +361,8 @@ class HostNode(BaseROS2DeviceNode):
request.command = "" request.command = ""
future = sclient.call_async(request) future = sclient.call_async(request)
# Use timeout for result as well # Use timeout for result as well
future.result() future.result(timeout_sec=5.0)
self.lab_logger().debug(f"[Host Node] Re-register completed for {device_namespace}")
except Exception as e: except Exception as e:
# Gracefully handle destruction during shutdown # Gracefully handle destruction during shutdown
if "destruction was requested" in str(e) or self._shutting_down: if "destruction was requested" in str(e) or self._shutting_down:
@@ -602,10 +586,11 @@ class HostNode(BaseROS2DeviceNode):
) )
try: try:
assert len(response) == 1, "Create Resource应当只返回一个结果" new_li = []
for i in response: for i in response:
res = json.loads(i) res = json.loads(i)
return res new_li.append(res)
return {"resources": new_li, "liquid_input_resources": new_li}
except Exception as ex: except Exception as ex:
pass pass
_n = "\n" _n = "\n"
@@ -758,7 +743,6 @@ class HostNode(BaseROS2DeviceNode):
item: "QueueItem", item: "QueueItem",
action_type: str, action_type: str,
action_kwargs: Dict[str, Any], action_kwargs: Dict[str, Any],
sample_material: Dict[str, str],
server_info: Optional[Dict[str, Any]] = None, server_info: Optional[Dict[str, Any]] = None,
) -> None: ) -> None:
""" """
@@ -776,14 +760,14 @@ class HostNode(BaseROS2DeviceNode):
if action_name.startswith("auto-"): if action_name.startswith("auto-"):
action_name = action_name[5:] action_name = action_name[5:]
action_id = f"/devices/{device_id}/_execute_driver_command" action_id = f"/devices/{device_id}/_execute_driver_command"
json_command: Dict[str, Any] = { action_kwargs = {
"function_name": action_name, "string": json.dumps(
"function_args": action_kwargs, {
JSON_UNILABOS_PARAM: { "function_name": action_name,
PARAM_SAMPLE_UUIDS: sample_material, "function_args": action_kwargs,
}, }
)
} }
action_kwargs = {"string": json.dumps(json_command)}
if action_type.startswith("UniLabJsonCommandAsync"): if action_type.startswith("UniLabJsonCommandAsync"):
action_id = f"/devices/{device_id}/_execute_driver_command_async" action_id = f"/devices/{device_id}/_execute_driver_command_async"
else: else:
@@ -794,11 +778,24 @@ class HostNode(BaseROS2DeviceNode):
raise ValueError(f"ActionClient {action_id} not found.") raise ValueError(f"ActionClient {action_id} not found.")
action_client: ActionClient = self._action_clients[action_id] action_client: ActionClient = self._action_clients[action_id]
# 遍历action_kwargs下的所有子dict将"sample_uuid"的值赋给"sample_id"
def assign_sample_id(obj):
if isinstance(obj, dict):
if "sample_uuid" in obj:
obj["sample_id"] = obj["sample_uuid"]
obj.pop("sample_uuid")
for k, v in obj.items():
if k != "unilabos_extra":
assign_sample_id(v)
elif isinstance(obj, list):
for item in obj:
assign_sample_id(item)
assign_sample_id(action_kwargs)
goal_msg = convert_to_ros_msg(action_client._action_type.Goal(), action_kwargs) goal_msg = convert_to_ros_msg(action_client._action_type.Goal(), action_kwargs)
# self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {str(goal_msg)[:1000]}") self.lab_logger().info(f"[Host Node] Sending goal for {action_id}: {goal_msg}")
self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {action_kwargs}")
self.lab_logger().trace(f"[Host Node] Sending goal for {action_id}: {goal_msg}")
action_client.wait_for_server() action_client.wait_for_server()
goal_uuid_obj = UUID(uuid=list(u.bytes)) goal_uuid_obj = UUID(uuid=list(u.bytes))
@@ -856,14 +853,9 @@ class HostNode(BaseROS2DeviceNode):
# 适配后端的一些额外处理 # 适配后端的一些额外处理
return_value = return_info.get("return_value") return_value = return_info.get("return_value")
if isinstance(return_value, dict): if isinstance(return_value, dict):
unilabos_samples = return_value.pop(RETURN_UNILABOS_SAMPLES, None) unilabos_samples = return_info.get("unilabos_samples")
if isinstance(unilabos_samples, list) and unilabos_samples: if isinstance(unilabos_samples, list):
self.lab_logger().info( return_info["unilabos_samples"] = unilabos_samples
f"[Host Node] Job {job_id[:8]} returned {len(unilabos_samples)} sample(s): "
f"{[s.get('name', s.get('id', 'unknown')) if isinstance(s, dict) else str(s)[:20] for s in unilabos_samples[:5]]}"
f"{'...' if len(unilabos_samples) > 5 else ''}"
)
return_info["samples"] = unilabos_samples
suc = return_info.get("suc", False) suc = return_info.get("suc", False)
if not suc: if not suc:
status = "failed" status = "failed"
@@ -889,7 +881,7 @@ class HostNode(BaseROS2DeviceNode):
# 清理 _goals 中的记录 # 清理 _goals 中的记录
if job_id in self._goals: if job_id in self._goals:
del self._goals[job_id] del self._goals[job_id]
self.lab_logger().trace(f"[Host Node] Removed goal {job_id[:8]} from _goals") self.lab_logger().debug(f"[Host Node] Removed goal {job_id[:8]} from _goals")
# 存储结果供 HTTP API 查询 # 存储结果供 HTTP API 查询
try: try:
@@ -1141,11 +1133,11 @@ class HostNode(BaseROS2DeviceNode):
接收序列化的 ResourceTreeSet 数据并进行处理 接收序列化的 ResourceTreeSet 数据并进行处理
""" """
self.lab_logger().info(f"[Host Node-Resource] Resource tree add request received")
try: try:
# 解析请求数据 # 解析请求数据
data = json.loads(request.command) data = json.loads(request.command)
action = data["action"] action = data["action"]
self.lab_logger().info(f"[Host Node-Resource] Resource tree {action} request received")
data = data["data"] data = data["data"]
if action == "add": if action == "add":
await self._resource_tree_action_add_callback(data, response) await self._resource_tree_action_add_callback(data, response)
@@ -1169,7 +1161,7 @@ class HostNode(BaseROS2DeviceNode):
""" """
更新节点信息回调 更新节点信息回调
""" """
self.lab_logger().trace(f"[Host Node] Node info update request received: {request}") # self.lab_logger().info(f"[Host Node] Node info update request received: {request}")
try: try:
from unilabos.app.communication import get_communication_client from unilabos.app.communication import get_communication_client
from unilabos.app.web.client import HTTPClient, http_client from unilabos.app.web.client import HTTPClient, http_client
@@ -1251,7 +1243,7 @@ class HostNode(BaseROS2DeviceNode):
data = json.loads(request.command) data = json.loads(request.command)
if "uuid" in data and data["uuid"] is not None: if "uuid" in data and data["uuid"] is not None:
http_req = http_client.resource_tree_get([data["uuid"]], data["with_children"]) http_req = http_client.resource_tree_get([data["uuid"]], data["with_children"])
elif "id" in data: elif "id" in data and data["id"].startswith("/"):
http_req = http_client.resource_get(data["id"], data["with_children"]) http_req = http_client.resource_get(data["id"], data["with_children"])
else: else:
raise ValueError("没有使用正确的物料 id 或 uuid") raise ValueError("没有使用正确的物料 id 或 uuid")
@@ -1334,20 +1326,10 @@ class HostNode(BaseROS2DeviceNode):
self.lab_logger().debug(f"[Host Node-Resource] List parameters: {request}") self.lab_logger().debug(f"[Host Node-Resource] List parameters: {request}")
return response return response
def test_latency(self) -> TestLatencyReturn: def test_latency(self):
""" """
测试网络延迟的action实现 测试网络延迟的action实现
通过5次ping-pong机制校对时间误差并计算实际延迟 通过5次ping-pong机制校对时间误差并计算实际延迟
Returns:
TestLatencyReturn: 包含延迟测试结果的字典,包括:
- avg_rtt_ms: 平均往返时间(毫秒)
- avg_time_diff_ms: 平均时间差(毫秒)
- max_time_error_ms: 最大时间误差(毫秒)
- task_delay_ms: 实际任务延迟(毫秒),-1表示无法计算
- raw_delay_ms: 原始时间差(毫秒),-1表示无法计算
- test_count: 有效测试次数
- status: 测试状态,"success"表示成功,"all_timeout"表示全部超时
""" """
import uuid as uuid_module import uuid as uuid_module
@@ -1410,15 +1392,7 @@ class HostNode(BaseROS2DeviceNode):
if not ping_results: if not ping_results:
self.lab_logger().error("❌ 所有ping-pong测试都失败了") self.lab_logger().error("❌ 所有ping-pong测试都失败了")
return { return {"status": "all_timeout"}
"avg_rtt_ms": -1.0,
"avg_time_diff_ms": -1.0,
"max_time_error_ms": -1.0,
"task_delay_ms": -1.0,
"raw_delay_ms": -1.0,
"test_count": 0,
"status": "all_timeout",
}
# 统计分析 # 统计分析
rtts = [r["rtt_ms"] for r in ping_results] rtts = [r["rtt_ms"] for r in ping_results]
@@ -1426,7 +1400,7 @@ class HostNode(BaseROS2DeviceNode):
avg_rtt_ms = sum(rtts) / len(rtts) avg_rtt_ms = sum(rtts) / len(rtts)
avg_time_diff_ms = sum(time_diffs) / len(time_diffs) avg_time_diff_ms = sum(time_diffs) / len(time_diffs)
max_time_diff_error_ms: float = max(abs(min(time_diffs)), abs(max(time_diffs))) max_time_diff_error_ms = max(abs(min(time_diffs)), abs(max(time_diffs)))
self.lab_logger().info("-" * 50) self.lab_logger().info("-" * 50)
self.lab_logger().info("[测试统计]") self.lab_logger().info("[测试统计]")
@@ -1466,7 +1440,7 @@ class HostNode(BaseROS2DeviceNode):
self.lab_logger().info("=" * 60) self.lab_logger().info("=" * 60)
res: TestLatencyReturn = { return {
"avg_rtt_ms": avg_rtt_ms, "avg_rtt_ms": avg_rtt_ms,
"avg_time_diff_ms": avg_time_diff_ms, "avg_time_diff_ms": avg_time_diff_ms,
"max_time_error_ms": max_time_diff_error_ms, "max_time_error_ms": max_time_diff_error_ms,
@@ -1477,23 +1451,12 @@ class HostNode(BaseROS2DeviceNode):
"test_count": len(ping_results), "test_count": len(ping_results),
"status": "success", "status": "success",
} }
return res
def test_resource( def test_resource(
self, self, resource: ResourceSlot, resources: List[ResourceSlot], device: DeviceSlot, devices: List[DeviceSlot]
resource: ResourceSlot = None,
resources: List[ResourceSlot] = None,
device: DeviceSlot = None,
devices: List[DeviceSlot] = None,
) -> TestResourceReturn: ) -> TestResourceReturn:
if resources is None:
resources = []
if devices is None:
devices = []
if resource is None:
resource = RegularContainer("test_resource传入None")
return { return {
"resources": ResourceTreeSet.from_plr_resources([resource, *resources], known_newly_created=True).dump(), "resources": ResourceTreeSet.from_plr_resources([resource, *resources]).dump(),
"devices": [device, *devices], "devices": [device, *devices],
} }
@@ -1545,9 +1508,7 @@ class HostNode(BaseROS2DeviceNode):
# 构建服务地址 # 构建服务地址
srv_address = f"/srv{namespace}/s2c_resource_tree" srv_address = f"/srv{namespace}/s2c_resource_tree"
self.lab_logger().trace( self.lab_logger().info(f"[Host Node-Resource] Notifying {device_id} for resource tree {action} operation")
f"[Host Node-Resource] Host -> {device_id} ResourceTree {action} operation started -------"
)
# 创建服务客户端 # 创建服务客户端
sclient = self.create_client(SerialCommand, srv_address) sclient = self.create_client(SerialCommand, srv_address)
@@ -1582,8 +1543,8 @@ class HostNode(BaseROS2DeviceNode):
time.sleep(0.05) time.sleep(0.05)
response = future.result() response = future.result()
self.lab_logger().trace( self.lab_logger().info(
f"[Host Node-Resource] Host -> {device_id} ResourceTree {action} operation completed -------" f"[Host Node-Resource] Resource tree {action} notification completed for {device_id}"
) )
return True return True

View File

@@ -6,6 +6,8 @@ from typing import List, Dict, Any, Optional, TYPE_CHECKING
import rclpy import rclpy
from rosidl_runtime_py import message_to_ordereddict from rosidl_runtime_py import message_to_ordereddict
from unilabos_msgs.msg import Resource
from unilabos_msgs.srv import ResourceUpdate
from unilabos.messages import * # type: ignore # protocol names from unilabos.messages import * # type: ignore # protocol names
from rclpy.action import ActionServer, ActionClient from rclpy.action import ActionServer, ActionClient
@@ -13,6 +15,7 @@ from rclpy.action.server import ServerGoalHandle
from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response from unilabos_msgs.srv._serial_command import SerialCommand_Request, SerialCommand_Response
from unilabos.compile import action_protocol_generators from unilabos.compile import action_protocol_generators
from unilabos.resources.graphio import nested_dict_to_list
from unilabos.ros.initialize_device import initialize_device_from_dict from unilabos.ros.initialize_device import initialize_device_from_dict
from unilabos.ros.msgs.message_converter import ( from unilabos.ros.msgs.message_converter import (
get_action_type, get_action_type,
@@ -228,15 +231,15 @@ class ROS2WorkstationNode(BaseROS2DeviceNode):
try: try:
# 统一处理单个或多个资源 # 统一处理单个或多个资源
resource_id = ( resource_id = (
protocol_kwargs[k]["id"] protocol_kwargs[k]["id"] if v == "unilabos_msgs/Resource" else protocol_kwargs[k][0]["id"]
if v == "unilabos_msgs/Resource"
else protocol_kwargs[k][0]["id"]
) )
resource_uuid = protocol_kwargs[k].get("uuid", None) resource_uuid = protocol_kwargs[k].get("uuid", None)
r = SerialCommand_Request() r = SerialCommand_Request()
r.command = json.dumps({"id": resource_id, "uuid": resource_uuid, "with_children": True}) r.command = json.dumps({"id": resource_id, "uuid": resource_uuid, "with_children": True})
# 发送请求并等待响应 # 发送请求并等待响应
response: SerialCommand_Response = await self._resource_clients["resource_get"].call_async( response: SerialCommand_Response = await self._resource_clients[
"resource_get"
].call_async(
r r
) # type: ignore ) # type: ignore
raw_data = json.loads(response.response) raw_data = json.loads(response.response)
@@ -304,54 +307,12 @@ class ROS2WorkstationNode(BaseROS2DeviceNode):
# 向Host更新物料当前状态 # 向Host更新物料当前状态
for k, v in goal.get_fields_and_field_types().items(): for k, v in goal.get_fields_and_field_types().items():
if v not in ["unilabos_msgs/Resource", "sequence<unilabos_msgs/Resource>"]: if v in ["unilabos_msgs/Resource", "sequence<unilabos_msgs/Resource>"]:
continue r = ResourceUpdate.Request()
self.lab_logger().info(f"更新资源状态: {k}") r.resources = [
try: convert_to_ros_msg(Resource, rs) for rs in nested_dict_to_list(protocol_kwargs[k])
# 去重:使用 seen 集合获取唯一的资源对象 ]
seen = set() response = await self._resource_clients["resource_update"].call_async(r)
unique_resources = []
# 获取资源数据,统一转换为列表
resource_data = protocol_kwargs[k]
is_sequence = v != "unilabos_msgs/Resource"
if not is_sequence:
resource_list = [resource_data] if isinstance(resource_data, dict) else resource_data
else:
# 处理序列类型,可能是嵌套列表
resource_list = []
if isinstance(resource_data, list):
for item in resource_data:
if isinstance(item, list):
resource_list.extend(item)
else:
resource_list.append(item)
else:
resource_list = [resource_data]
for res_data in resource_list:
if not isinstance(res_data, dict):
continue
res_name = res_data.get("id") or res_data.get("name")
if not res_name:
continue
# 使用 resource_tracker 获取本地 PLR 实例
plr = self.resource_tracker.figure_resource({"name": res_name}, try_mode=False)
# 获取父资源
res = self.resource_tracker.parent_resource(plr)
if res is None:
res = plr
if id(res) not in seen:
seen.add(id(res))
unique_resources.append(res)
# 使用新的资源树接口更新
if unique_resources:
await self.update_resource(unique_resources)
except Exception as e:
self.lab_logger().error(f"资源更新失败: {e}")
self.lab_logger().error(traceback.format_exc())
# 设置成功状态和返回值 # 设置成功状态和返回值
execution_success = True execution_success = True

View File

@@ -52,8 +52,7 @@ class DeviceClassCreator(Generic[T]):
if self.device_instance is not None: if self.device_instance is not None:
for c in self.children: for c in self.children:
if c.res_content.type != "device": if c.res_content.type != "device":
res = ResourceTreeSet([ResourceTreeInstance(c)]).to_plr_resources()[0] self.resource_tracker.add_resource(c.get_plr_nested_dict())
self.resource_tracker.add_resource(res)
def create_instance(self, data: Dict[str, Any]) -> T: def create_instance(self, data: Dict[str, Any]) -> T:
""" """
@@ -120,7 +119,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
# return resource, source_type # return resource, source_type
def _process_resource_references( def _process_resource_references(
self, data: Any, processed_child_names: Optional[Dict[str, Any]], to_dict=False, states=None, prefix_path="", name_to_uuid=None self, data: Any, to_dict=False, states=None, prefix_path="", name_to_uuid=None
) -> Any: ) -> Any:
""" """
递归处理资源引用替换_resource_child_name对应的资源 递归处理资源引用替换_resource_child_name对应的资源
@@ -165,7 +164,6 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
states[prefix_path] = resource_instance.serialize_all_state() states[prefix_path] = resource_instance.serialize_all_state()
return serialized return serialized
else: else:
processed_child_names[child_name] = resource_instance
self.resource_tracker.add_resource(resource_instance) self.resource_tracker.add_resource(resource_instance)
# 立即设置UUIDstate已经在resource_ulab_to_plr中处理过了 # 立即设置UUIDstate已经在resource_ulab_to_plr中处理过了
if name_to_uuid: if name_to_uuid:
@@ -184,12 +182,12 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
result = {} result = {}
for key, value in data.items(): for key, value in data.items():
new_prefix = f"{prefix_path}.{key}" if prefix_path else key new_prefix = f"{prefix_path}.{key}" if prefix_path else key
result[key] = self._process_resource_references(value, processed_child_names, to_dict, states, new_prefix, name_to_uuid) result[key] = self._process_resource_references(value, to_dict, states, new_prefix, name_to_uuid)
return result return result
elif isinstance(data, list): elif isinstance(data, list):
return [ return [
self._process_resource_references(item, processed_child_names, to_dict, states, f"{prefix_path}[{i}]", name_to_uuid) self._process_resource_references(item, to_dict, states, f"{prefix_path}[{i}]", name_to_uuid)
for i, item in enumerate(data) for i, item in enumerate(data)
] ]
@@ -236,7 +234,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
# 首先处理资源引用 # 首先处理资源引用
states = {} states = {}
processed_data = self._process_resource_references( processed_data = self._process_resource_references(
data, {}, to_dict=True, states=states, name_to_uuid=name_to_uuid data, to_dict=True, states=states, name_to_uuid=name_to_uuid
) )
try: try:
@@ -272,12 +270,7 @@ class PyLabRobotCreator(DeviceClassCreator[T]):
arg_value = spec_args[param_name].annotation arg_value = spec_args[param_name].annotation
data[param_name]["_resource_type"] = self.device_cls.__module__ + ":" + arg_value data[param_name]["_resource_type"] = self.device_cls.__module__ + ":" + arg_value
logger.debug(f"自动补充 _resource_type: {data[param_name]['_resource_type']}") logger.debug(f"自动补充 _resource_type: {data[param_name]['_resource_type']}")
processed_child_names = {} processed_data = self._process_resource_references(data, to_dict=False, name_to_uuid=name_to_uuid)
processed_data = self._process_resource_references(data, processed_child_names, to_dict=False, name_to_uuid=name_to_uuid)
for child_name, resource_instance in processed_data.items():
for ind, name in enumerate([child.res_content.name for child in self.children]):
if name == child_name:
self.children.pop(ind)
self.device_instance = super(PyLabRobotCreator, self).create_instance(processed_data) # 补全变量后直接调用调用的自身的attach_resource self.device_instance = super(PyLabRobotCreator, self).create_instance(processed_data) # 补全变量后直接调用调用的自身的attach_resource
except Exception as e: except Exception as e:
logger.error(f"PyLabRobot创建实例失败: {e}") logger.error(f"PyLabRobot创建实例失败: {e}")
@@ -349,10 +342,9 @@ class WorkstationNodeCreator(DeviceClassCreator[T]):
try: try:
# 创建实例额外补充一个给protocol node的字段后面考虑取消 # 创建实例额外补充一个给protocol node的字段后面考虑取消
data["children"] = self.children data["children"] = self.children
# super(WorkstationNodeCreator, self).create_instance(data)的时候会attach for child in self.children:
# for child in self.children: if child.res_content.type != "device":
# if child.res_content.type != "device": self.resource_tracker.add_resource(child.get_plr_nested_dict())
# self.resource_tracker.add_resource(child.get_plr_nested_dict())
deck_dict = data.get("deck") deck_dict = data.get("deck")
if deck_dict: if deck_dict:
from pylabrobot.resources import Deck, Resource from pylabrobot.resources import Deck, Resource

182
unilabos/ros/x/rclpyx.py Normal file
View File

@@ -0,0 +1,182 @@
import asyncio
from asyncio import events
import threading
import rclpy
from rclpy.impl.implementation_singleton import rclpy_implementation as _rclpy
from rclpy.executors import await_or_execute, Executor
from rclpy.action import ActionClient, ActionServer
from rclpy.action.server import ServerGoalHandle, GoalResponse, GoalInfo, GoalStatus
from std_msgs.msg import String
from action_tutorials_interfaces.action import Fibonacci
loop = None
def get_event_loop():
global loop
return loop
async def default_handle_accepted_callback_async(goal_handle):
"""Execute the goal."""
await goal_handle.execute()
class ServerGoalHandleX(ServerGoalHandle):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
async def execute(self, execute_callback=None):
# It's possible that there has been a request to cancel the goal prior to executing.
# In this case we want to avoid the illegal state transition to EXECUTING
# but still call the users execute callback to let them handle canceling the goal.
if not self.is_cancel_requested:
self._update_state(_rclpy.GoalEvent.EXECUTE)
await self._action_server.notify_execute_async(self, execute_callback)
class ActionServerX(ActionServer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.register_handle_accepted_callback(default_handle_accepted_callback_async)
async def _execute_goal_request(self, request_header_and_message):
request_header, goal_request = request_header_and_message
goal_uuid = goal_request.goal_id
goal_info = GoalInfo()
goal_info.goal_id = goal_uuid
self._node.get_logger().debug('New goal request with ID: {0}'.format(goal_uuid.uuid))
# Check if goal ID is already being tracked by this action server
with self._lock:
goal_id_exists = self._handle.goal_exists(goal_info)
accepted = False
if not goal_id_exists:
# Call user goal callback
response = await await_or_execute(self._goal_callback, goal_request.goal)
if not isinstance(response, GoalResponse):
self._node.get_logger().warning(
'Goal request callback did not return a GoalResponse type. Rejecting goal.')
else:
accepted = GoalResponse.ACCEPT == response
if accepted:
# Stamp time of acceptance
goal_info.stamp = self._node.get_clock().now().to_msg()
# Create a goal handle
try:
with self._lock:
goal_handle = ServerGoalHandleX(self, goal_info, goal_request.goal)
except RuntimeError as e:
self._node.get_logger().error(
'Failed to accept new goal with ID {0}: {1}'.format(goal_uuid.uuid, e))
accepted = False
else:
self._goal_handles[bytes(goal_uuid.uuid)] = goal_handle
# Send response
response_msg = self._action_type.Impl.SendGoalService.Response()
response_msg.accepted = accepted
response_msg.stamp = goal_info.stamp
self._handle.send_goal_response(request_header, response_msg)
if not accepted:
self._node.get_logger().debug('New goal rejected: {0}'.format(goal_uuid.uuid))
return
self._node.get_logger().debug('New goal accepted: {0}'.format(goal_uuid.uuid))
# Provide the user a reference to the goal handle
# await await_or_execute(self._handle_accepted_callback, goal_handle)
asyncio.create_task(self._handle_accepted_callback(goal_handle))
async def notify_execute_async(self, goal_handle, execute_callback):
# Use provided callback, defaulting to a previously registered callback
if execute_callback is None:
if self._execute_callback is None:
return
execute_callback = self._execute_callback
# Schedule user callback for execution
self._node.get_logger().info(f"{events.get_running_loop()}")
asyncio.create_task(self._execute_goal(execute_callback, goal_handle))
# loop = asyncio.new_event_loop()
# asyncio.set_event_loop(loop)
# task = loop.create_task(self._execute_goal(execute_callback, goal_handle))
# await task
class ActionClientX(ActionClient):
feedback_queue = asyncio.Queue()
async def feedback_cb(self, msg):
await self.feedback_queue.put(msg)
async def send_goal_async(self, goal_msg):
goal_future = super().send_goal_async(
goal_msg,
feedback_callback=self.feedback_cb
)
client_goal_handle = await asyncio.ensure_future(goal_future)
if not client_goal_handle.accepted:
raise Exception("Goal rejected.")
result_future = client_goal_handle.get_result_async()
while True:
feedback_future = asyncio.ensure_future(self.feedback_queue.get())
tasks = [result_future, feedback_future]
await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
if result_future.done():
result = result_future.result().result
yield (None, result)
break
else:
feedback = feedback_future.result().feedback
yield (feedback, None)
async def main(node):
print('Node started.')
action_client = ActionClientX(node, Fibonacci, 'fibonacci')
goal_msg = Fibonacci.Goal()
goal_msg.order = 10
async for (feedback, result) in action_client.send_goal_async(goal_msg):
if feedback:
print(f'Feedback: {feedback}')
else:
print(f'Result: {result}')
print('Finished.')
async def ros_loop_node(node):
while rclpy.ok():
rclpy.spin_once(node, timeout_sec=0)
await asyncio.sleep(1e-4)
async def ros_loop(executor: Executor):
while rclpy.ok():
executor.spin_once(timeout_sec=0)
await asyncio.sleep(1e-4)
def run_event_loop():
global loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_forever()
def run_event_loop_in_thread():
thread = threading.Thread(target=run_event_loop, args=())
thread.start()
if __name__ == "__main__":
rclpy.init()
node = rclpy.create_node('async_subscriber')
future = asyncio.wait([ros_loop(node), main()])
asyncio.get_event_loop().run_until_complete(future)

View File

@@ -9,125 +9,49 @@
"parent": null, "parent": null,
"type": "device", "type": "device",
"class": "bioyond_dispensing_station", "class": "bioyond_dispensing_station",
"position": {
"x": 0,
"y": 0,
"z": 0
},
"config": { "config": {
"api_key": "YOUR_API_KEY", "config": {
"api_host": "http://your-api-host:port", "api_key": "DE9BDDA0",
"material_type_mappings": { "api_host": "http://192.168.1.200:44388",
"BIOYOND_PolymerStation_1FlaskCarrier": [ "material_type_mappings": {
"烧杯", "BIOYOND_PolymerStation_1FlaskCarrier": [
"uuid-placeholder-flask" "烧杯",
], "3a14196b-24f2-ca49-9081-0cab8021bf1a"
"BIOYOND_PolymerStation_1BottleCarrier": [ ],
"试剂瓶", "BIOYOND_PolymerStation_1BottleCarrier": [
"uuid-placeholder-bottle" "试剂瓶",
], "3a14196b-8bcf-a460-4f74-23f21ca79e72"
"BIOYOND_PolymerStation_6StockCarrier": [ ],
"分装板", "BIOYOND_PolymerStation_6StockCarrier": [
"uuid-placeholder-stock-6" "分装板",
], "3a14196e-5dfe-6e21-0c79-fe2036d052c4"
"BIOYOND_PolymerStation_Liquid_Vial": [ ],
"10%分装小瓶", "BIOYOND_PolymerStation_Liquid_Vial": [
"uuid-placeholder-liquid-vial" "10%分装小瓶",
], "3a14196c-76be-2279-4e22-7310d69aed68"
"BIOYOND_PolymerStation_Solid_Vial": [ ],
"90%分装小瓶", "BIOYOND_PolymerStation_Solid_Vial": [
"uuid-placeholder-solid-vial" "90%分装小瓶",
], "3a14196c-cdcf-088d-dc7d-5cf38f0ad9ea"
"BIOYOND_PolymerStation_8StockCarrier": [ ],
"样品板", "BIOYOND_PolymerStation_8StockCarrier": [
"uuid-placeholder-stock-8" "样品板",
], "3a14196e-b7a0-a5da-1931-35f3000281e9"
"BIOYOND_PolymerStation_Solid_Stock": [ ],
"样品瓶", "BIOYOND_PolymerStation_Solid_Stock": [
"uuid-placeholder-solid-stock" "样品瓶",
] "3a14196a-cf7d-8aea-48d8-b9662c7dba94"
}, ]
"warehouse_mapping": {
"粉末堆栈": {
"uuid": "uuid-placeholder-powder-stack",
"site_uuids": {
"A01": "uuid-placeholder-powder-A01",
"A02": "uuid-placeholder-powder-A02",
"A03": "uuid-placeholder-powder-A03",
"A04": "uuid-placeholder-powder-A04",
"B01": "uuid-placeholder-powder-B01",
"B02": "uuid-placeholder-powder-B02",
"B03": "uuid-placeholder-powder-B03",
"B04": "uuid-placeholder-powder-B04",
"C01": "uuid-placeholder-powder-C01",
"C02": "uuid-placeholder-powder-C02",
"C03": "uuid-placeholder-powder-C03",
"C04": "uuid-placeholder-powder-C04",
"D01": "uuid-placeholder-powder-D01",
"D02": "uuid-placeholder-powder-D02",
"D03": "uuid-placeholder-powder-D03",
"D04": "uuid-placeholder-powder-D04"
}
},
"溶液堆栈": {
"uuid": "uuid-placeholder-liquid-stack",
"site_uuids": {
"A01": "uuid-placeholder-liquid-A01",
"A02": "uuid-placeholder-liquid-A02",
"A03": "uuid-placeholder-liquid-A03",
"A04": "uuid-placeholder-liquid-A04",
"B01": "uuid-placeholder-liquid-B01",
"B02": "uuid-placeholder-liquid-B02",
"B03": "uuid-placeholder-liquid-B03",
"B04": "uuid-placeholder-liquid-B04",
"C01": "uuid-placeholder-liquid-C01",
"C02": "uuid-placeholder-liquid-C02",
"C03": "uuid-placeholder-liquid-C03",
"C04": "uuid-placeholder-liquid-C04",
"D01": "uuid-placeholder-liquid-D01",
"D02": "uuid-placeholder-liquid-D02",
"D03": "uuid-placeholder-liquid-D03",
"D04": "uuid-placeholder-liquid-D04"
}
},
"试剂堆栈": {
"uuid": "uuid-placeholder-reagent-stack",
"site_uuids": {
"A01": "uuid-placeholder-reagent-A01",
"A02": "uuid-placeholder-reagent-A02",
"A03": "uuid-placeholder-reagent-A03",
"A04": "uuid-placeholder-reagent-A04",
"B01": "uuid-placeholder-reagent-B01",
"B02": "uuid-placeholder-reagent-B02",
"B03": "uuid-placeholder-reagent-B03",
"B04": "uuid-placeholder-reagent-B04"
}
} }
}, },
"http_service_config": { "deck": {
"http_service_host": "127.0.0.1", "data": {
"http_service_port": 8080 "_resource_child_name": "Bioyond_Dispensing_Deck",
}, "_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerPreparationStation_Deck"
"material_default_parameters": {
"NMP": {
"unit": "毫升",
"density": "1.03",
"densityUnit": "g/mL",
"description": "N-甲基吡咯烷酮 (N-Methyl-2-pyrrolidone)"
} }
}, },
"material_type_parameters": {} "protocol_type": []
}, },
"deck": {
"data": {
"_resource_child_name": "Bioyond_Dispensing_Deck",
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerPreparationStation_Deck"
}
},
"size_x": 2700.0,
"size_y": 1080.0,
"size_z": 1500.0,
"protocol_type": [],
"data": {} "data": {}
}, },
{ {

View File

@@ -1,795 +0,0 @@
{
"nodes": [
{
"id": "PRCXI",
"name": "PRCXI",
"type": "device",
"class": "liquid_handler.prcxi",
"parent": "",
"pose": {
"size": {
"width": 562,
"height": 394,
"depth": 0
}
},
"config": {
"axis": "Left",
"deck": {
"_resource_type": "unilabos.devices.liquid_handling.prcxi.prcxi:PRCXI9300Deck",
"_resource_child_name": "PRCXI_Deck"
},
"host": "10.20.30.184",
"port": 9999,
"debug": true,
"setup": true,
"is_9320": true,
"timeout": 10,
"matrix_id": "5de524d0-3f95-406c-86dd-f83626ebc7cb",
"simulator": true,
"channel_num": 2
},
"data": {
"reset_ok": true
},
"schema": {},
"description": "",
"model": null,
"position": {
"x": 0,
"y": 240,
"z": 0
}
},
{
"id": "PRCXI_Deck",
"name": "PRCXI_Deck",
"children": [],
"parent": "PRCXI",
"type": "deck",
"class": "",
"position": {
"x": 10,
"y": 10,
"z": 0
},
"config": {
"type": "PRCXI9300Deck",
"size_x": 542,
"size_y": 374,
"size_z": 0,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "deck",
"barcode": null
},
"data": {}
},
{
"id": "T1",
"name": "T1",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 0,
"y": 288,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T1",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T2",
"name": "T2",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 138,
"y": 288,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T2",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T3",
"name": "T3",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 276,
"y": 288,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T3",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T4",
"name": "T4",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 414,
"y": 288,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T4",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T5",
"name": "T5",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 0,
"y": 192,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T5",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T6",
"name": "T6",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 138,
"y": 192,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T6",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T7",
"name": "T7",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 276,
"y": 192,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T7",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T8",
"name": "T8",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 414,
"y": 192,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T8",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T9",
"name": "T9",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 0,
"y": 96,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T9",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T10",
"name": "T10",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 138,
"y": 96,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T10",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T11",
"name": "T11",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 276,
"y": 96,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T11",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T12",
"name": "T12",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 414,
"y": 96,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T12",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T13",
"name": "T13",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 0,
"y": 0,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T13",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T14",
"name": "T14",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 138,
"y": 0,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T14",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T15",
"name": "T15",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 276,
"y": 0,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T15",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
},
{
"id": "T16",
"name": "T16",
"children": [],
"parent": "PRCXI_Deck",
"type": "plate",
"class": "",
"position": {
"x": 414,
"y": 0,
"z": 0
},
"config": {
"type": "PRCXI9300Container",
"size_x": 127,
"size_y": 85.5,
"size_z": 10,
"rotation": {
"x": 0,
"y": 0,
"z": 0,
"type": "Rotation"
},
"category": "plate",
"model": null,
"barcode": null,
"ordering": {},
"sites": [
{
"label": "T16",
"visible": true,
"position": { "x": 0, "y": 0, "z": 0 },
"size": { "width": 128.0, "height": 86, "depth": 0 },
"content_type": [
"plate",
"tip_rack",
"plates",
"tip_racks",
"tube_rack"
]
}
]
},
"data": {}
}
],
"edges": []
}

View File

@@ -14,200 +14,60 @@
], ],
"type": "device", "type": "device",
"class": "reaction_station.bioyond", "class": "reaction_station.bioyond",
"position": { "position": {"x": 0, "y": 3800, "z": 0},
"x": 0,
"y": 1100,
"z": 0
},
"config": { "config": {
"api_key": "YOUR_API_KEY", "config": {
"api_host": "http://your-api-host:port", "api_key": "DE9BDDA0",
"workflow_mappings": { "api_host": "http://192.168.1.200:44402",
"reactor_taken_out": "workflow-uuid-reactor-out", "workflow_mappings": {
"reactor_taken_in": "workflow-uuid-reactor-in", "reactor_taken_out": "3a16081e-4788-ca37-eff4-ceed8d7019d1",
"Solid_feeding_vials": "workflow-uuid-solid-vials", "reactor_taken_in": "3a160df6-76b3-0957-9eb0-cb496d5721c6",
"Liquid_feeding_vials(non-titration)": "workflow-uuid-liquid-vials", "Solid_feeding_vials": "3a160877-87e7-7699-7bc6-ec72b05eb5e6",
"Liquid_feeding_solvents": "workflow-uuid-solvents", "Liquid_feeding_vials(non-titration)": "3a167d99-6158-c6f0-15b5-eb030f7d8e47",
"Liquid_feeding(titration)": "workflow-uuid-titration", "Liquid_feeding_solvents": "3a160824-0665-01ed-285a-51ef817a9046",
"liquid_feeding_beaker": "workflow-uuid-beaker", "Liquid_feeding(titration)": "3a16082a-96ac-0449-446a-4ed39f3365b6",
"Drip_back": "workflow-uuid-drip-back" "liquid_feeding_beaker": "3a16087e-124f-8ddb-8ec1-c2dff09ca784",
}, "Drip_back": "3a162cf9-6aac-565a-ddd7-682ba1796a4a"
"material_type_mappings": {
"BIOYOND_PolymerStation_Reactor": [
"反应器",
"uuid-placeholder-reactor"
],
"BIOYOND_PolymerStation_1BottleCarrier": [
"试剂瓶",
"uuid-placeholder-bottle"
],
"BIOYOND_PolymerStation_1FlaskCarrier": [
"烧杯",
"uuid-placeholder-beaker"
],
"BIOYOND_PolymerStation_6StockCarrier": [
"样品板",
"uuid-placeholder-sample-plate"
],
"BIOYOND_PolymerStation_Solid_Vial": [
"90%分装小瓶",
"uuid-placeholder-solid-vial"
],
"BIOYOND_PolymerStation_Liquid_Vial": [
"10%分装小瓶",
"uuid-placeholder-liquid-vial"
],
"BIOYOND_PolymerStation_TipBox": [
"枪头盒",
"uuid-placeholder-tipbox"
],
"BIOYOND_PolymerStation_Measurement_Vial": [
"测量小瓶",
"uuid-placeholder-measure-vial"
]
},
"warehouse_mapping": {
"堆栈1左": {
"uuid": "uuid-placeholder-stack1-left",
"site_uuids": {
"A01": "uuid-placeholder-site-A01",
"A02": "uuid-placeholder-site-A02",
"A03": "uuid-placeholder-site-A03",
"A04": "uuid-placeholder-site-A04",
"B01": "uuid-placeholder-site-B01",
"B02": "uuid-placeholder-site-B02",
"B03": "uuid-placeholder-site-B03",
"B04": "uuid-placeholder-site-B04",
"C01": "uuid-placeholder-site-C01",
"C02": "uuid-placeholder-site-C02",
"C03": "uuid-placeholder-site-C03",
"C04": "uuid-placeholder-site-C04",
"D01": "uuid-placeholder-site-D01",
"D02": "uuid-placeholder-site-D02",
"D03": "uuid-placeholder-site-D03",
"D04": "uuid-placeholder-site-D04"
}
}, },
"堆栈1右": { "material_type_mappings": {
"uuid": "uuid-placeholder-stack1-right", "BIOYOND_PolymerStation_Reactor": [
"site_uuids": { "反应器",
"A05": "uuid-placeholder-site-A05", "3a14233b-902d-0d7b-4533-3f60f1c41c1b"
"A06": "uuid-placeholder-site-A06", ],
"A07": "uuid-placeholder-site-A07", "BIOYOND_PolymerStation_1BottleCarrier": [
"A08": "uuid-placeholder-site-A08", "试剂瓶",
"B05": "uuid-placeholder-site-B05", "3a14233b-56e3-6c53-a8ab-fcaac163a9ba"
"B06": "uuid-placeholder-site-B06", ],
"B07": "uuid-placeholder-site-B07", "BIOYOND_PolymerStation_1FlaskCarrier": [
"B08": "uuid-placeholder-site-B08", "烧杯",
"C05": "uuid-placeholder-site-C05", "3a14233b-f0a9-ba84-eaa9-0d4718b361b6"
"C06": "uuid-placeholder-site-C06", ],
"C07": "uuid-placeholder-site-C07", "BIOYOND_PolymerStation_6StockCarrier": [
"C08": "uuid-placeholder-site-C08", "样品板",
"D05": "uuid-placeholder-site-D05", "3a142339-80de-8f25-6093-1b1b1b6c322e"
"D06": "uuid-placeholder-site-D06", ],
"D07": "uuid-placeholder-site-D07", "BIOYOND_PolymerStation_Solid_Vial": [
"D08": "uuid-placeholder-site-D08" "90%分装小瓶",
} "3a14233a-26e1-28f8-af6a-60ca06ba0165"
}, ],
"站内试剂存放堆栈": { "BIOYOND_PolymerStation_Liquid_Vial": [
"uuid": "uuid-placeholder-reagent-stack", "10%分装小瓶",
"site_uuids": { "3a14233a-84a3-088d-6676-7cb4acd57c64"
"A01": "uuid-placeholder-reagent-A01", ],
"A02": "uuid-placeholder-reagent-A02" "BIOYOND_PolymerStation_TipBox": [
} "枪头盒",
}, "3a143890-9d51-60ac-6d6f-6edb43c12041"
"测量小瓶仓库(测密度)": { ]
"uuid": "uuid-placeholder-density-stack",
"site_uuids": {
"A01": "uuid-placeholder-density-A01",
"A02": "uuid-placeholder-density-A02",
"A03": "uuid-placeholder-density-A03",
"B01": "uuid-placeholder-density-B01",
"B02": "uuid-placeholder-density-B02",
"B03": "uuid-placeholder-density-B03"
}
},
"站内Tip盒堆栈(左)": {
"uuid": "uuid-placeholder-tipstack-left",
"site_uuids": {
"A02": "uuid-placeholder-tip-A02",
"A03": "uuid-placeholder-tip-A03",
"B02": "uuid-placeholder-tip-B02",
"B03": "uuid-placeholder-tip-B03"
}
},
"站内Tip盒堆栈(右)": {
"uuid": "uuid-placeholder-tipstack-right",
"site_uuids": {
"A01": "uuid-placeholder-tip-A01",
"B01": "uuid-placeholder-tip-B01"
}
} }
}, },
"workflow_to_section_map": { "deck": {
"reactor_taken_in": "反应器放入", "data": {
"reactor_taken_out": "反应器取出", "_resource_child_name": "Bioyond_Deck",
"Solid_feeding_vials": "固体投料-小瓶", "_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerReactionStation_Deck"
"Liquid_feeding_vials(non-titration)": "液体投料-小瓶(非滴定)",
"Liquid_feeding_solvents": "液体投料-溶剂",
"Liquid_feeding(titration)": "液体投料-滴定",
"liquid_feeding_beaker": "液体投料-烧杯",
"Drip_back": "液体回滴"
},
"action_names": {
"reactor_taken_in": {
"config": "通量-配置",
"stirring": "反应模块-开始搅拌"
},
"solid_feeding_vials": {
"feeding": "粉末加样模块-投料",
"observe": "反应模块-观察搅拌结果"
},
"liquid_feeding_vials_non_titration": {
"liquid": "稀释液瓶加液位-液体投料",
"observe": "反应模块-滴定结果观察"
},
"liquid_feeding_solvents": {
"liquid": "试剂AB放置位-试剂吸液分液",
"observe": "反应模块-观察搅拌结果"
},
"liquid_feeding_titration": {
"liquid": "稀释液瓶加液位-稀释液吸液分液",
"observe": "反应模块-滴定结果观察"
},
"liquid_feeding_beaker": {
"liquid": "烧杯溶液放置位-烧杯吸液分液",
"observe": "反应模块-观察搅拌结果"
},
"drip_back": {
"liquid": "试剂AB放置位-试剂吸液分液",
"observe": "反应模块-向下滴定结果观察"
} }
}, },
"http_service_config": { "protocol_type": []
"http_service_host": "127.0.0.1",
"http_service_port": 8080
},
"material_default_parameters": {
"NMP": {
"unit": "毫升",
"density": "1.03",
"densityUnit": "g/mL",
"description": "N-甲基吡咯烷酮 (N-Methyl-2-pyrrolidone)"
}
},
"material_type_parameters": {}
}, },
"deck": {
"data": {
"_resource_child_name": "Bioyond_Deck",
"_resource_type": "unilabos.resources.bioyond.decks:BIOYOND_PolymerReactionStation_Deck"
}
},
"size_x": 2700.0,
"size_y": 1080.0,
"size_z": 2500.0,
"protocol_type": [],
"data": {} "data": {}
}, },
{ {
@@ -217,11 +77,7 @@
"parent": "reaction_station_bioyond", "parent": "reaction_station_bioyond",
"type": "device", "type": "device",
"class": "reaction_station.reactor", "class": "reaction_station.reactor",
"position": { "position": {"x": 1150, "y": 380, "z": 0},
"x": 1150,
"y": 300,
"z": 0
},
"config": {}, "config": {},
"data": {} "data": {}
}, },
@@ -232,11 +88,7 @@
"parent": "reaction_station_bioyond", "parent": "reaction_station_bioyond",
"type": "device", "type": "device",
"class": "reaction_station.reactor", "class": "reaction_station.reactor",
"position": { "position": {"x": 1365, "y": 380, "z": 0},
"x": 1365,
"y": 300,
"z": 0
},
"config": {}, "config": {},
"data": {} "data": {}
}, },
@@ -247,11 +99,7 @@
"parent": "reaction_station_bioyond", "parent": "reaction_station_bioyond",
"type": "device", "type": "device",
"class": "reaction_station.reactor", "class": "reaction_station.reactor",
"position": { "position": {"x": 1580, "y": 380, "z": 0},
"x": 1580,
"y": 300,
"z": 0
},
"config": {}, "config": {},
"data": {} "data": {}
}, },
@@ -262,11 +110,7 @@
"parent": "reaction_station_bioyond", "parent": "reaction_station_bioyond",
"type": "device", "type": "device",
"class": "reaction_station.reactor", "class": "reaction_station.reactor",
"position": { "position": {"x": 1790, "y": 380, "z": 0},
"x": 1790,
"y": 300,
"z": 0
},
"config": {}, "config": {},
"data": {} "data": {}
}, },
@@ -277,11 +121,7 @@
"parent": "reaction_station_bioyond", "parent": "reaction_station_bioyond",
"type": "device", "type": "device",
"class": "reaction_station.reactor", "class": "reaction_station.reactor",
"position": { "position": {"x": 2010, "y": 380, "z": 0},
"x": 2010,
"y": 300,
"z": 0
},
"config": {}, "config": {},
"data": {} "data": {}
}, },
@@ -294,7 +134,7 @@
"class": "BIOYOND_PolymerReactionStation_Deck", "class": "BIOYOND_PolymerReactionStation_Deck",
"position": { "position": {
"x": 0, "x": 0,
"y": 1100, "y": 0,
"z": 0 "z": 0
}, },
"config": { "config": {

View File

@@ -1,28 +0,0 @@
{
"nodes": [
{
"id": "workbench_1",
"name": "虚拟工作台",
"children": [],
"parent": null,
"type": "device",
"class": "virtual_workbench",
"position": {
"x": 400,
"y": 300,
"z": 0
},
"config": {
"arm_operation_time": 3.0,
"heating_time": 10.0,
"num_heating_stations": 3
},
"data": {
"status": "Ready",
"arm_state": "idle",
"message": "工作台就绪"
}
}
],
"links": []
}

View File

@@ -0,0 +1,187 @@
# UniLabOS 日志配置说明
> **文件位置**: `unilabos/utils/log.py`
> **最后更新**: 2026-01-11
> **维护者**: Uni-Lab-OS 开发团队
本文档说明 UniLabOS 日志系统中对第三方库和内部模块的日志级别配置,避免控制台被过多的 DEBUG 日志淹没。
---
## 📋 已屏蔽的日志
以下库/模块的日志已被设置为 **WARNING****INFO** 级别,不再显示 DEBUG 日志:
### 1. pymodbusModbus 通信库)
**配置位置**: `log.py` 第196-200行
```python
# pymodbus 库的日志太详细,设置为 WARNING
logging.getLogger('pymodbus').setLevel(logging.WARNING)
logging.getLogger('pymodbus.logging').setLevel(logging.WARNING)
logging.getLogger('pymodbus.logging.base').setLevel(logging.WARNING)
logging.getLogger('pymodbus.logging.decoders').setLevel(logging.WARNING)
```
**屏蔽原因**:
- pymodbus 在 DEBUG 级别会输出每一次 Modbus 通信的详细信息
- 包括 `Processing: 0x5 0x1e 0x0 0x0...` 等原始数据
- 包括 `decoded PDU function_code(3 sub -1) -> ReadHoldingRegistersResponse(...)` 等解码信息
- 这些信息对日常使用价值不大,但会快速刷屏
**典型被屏蔽的日志**:
```
[DEBUG] Processing: 0x5 0x1e 0x0 0x0 0x0 0x7 0x1 0x3 0x4 0x0 0x0 0x0 0x0 [handleFrame:72] [pymodbus.logging.base]
[DEBUG] decoded PDU function_code(3 sub -1) -> ReadHoldingRegistersResponse(...) [decode:79] [pymodbus.logging.decoders]
```
---
### 2. websocketsWebSocket 库)
**配置位置**: `log.py` 第202-205行
```python
# websockets 库的日志输出较多,设置为 WARNING
logging.getLogger('websockets').setLevel(logging.WARNING)
logging.getLogger('websockets.client').setLevel(logging.WARNING)
logging.getLogger('websockets.server').setLevel(logging.WARNING)
```
**屏蔽原因**:
- WebSocket 连接、断开、心跳等信息在 DEBUG 级别会频繁输出
- 对于长时间运行的服务,这些日志意义不大
---
### 3. ROS Host Node设备状态更新
**配置位置**: `log.py` 第207-208行
```python
# ROS 节点的状态更新日志过于频繁,设置为 INFO
logging.getLogger('unilabos.ros.nodes.presets.host_node').setLevel(logging.INFO)
```
**屏蔽原因**:
- 设备状态更新(如手套箱压力)每隔几秒就会更新一次
- DEBUG 日志会记录每一次状态变化,导致日志刷屏
- 这些频繁的状态更新对调试价值不大
**典型被屏蔽的日志**:
```
[DEBUG] [/devices/host_node] Status updated: BatteryStation.data_glove_box_pressure = 4.229457855224609 [property_callback:666] [unilabos.ros.nodes.presets.host_node]
```
---
### 4. asyncio 和 urllib3
**配置位置**: `log.py` 第224-225行
```python
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("urllib3").setLevel(logging.INFO)
```
**屏蔽原因**:
- asyncio: 异步 IO 的内部调试信息
- urllib3: HTTP 请求库的连接池、重试等详细信息
---
## 🔧 如何临时启用这些日志(调试用)
### 方法1: 修改 log.py永久启用
`log.py``configure_logger()` 函数中,将对应库的日志级别改为 `logging.DEBUG`:
```python
# 临时启用 pymodbus 的 DEBUG 日志
logging.getLogger('pymodbus').setLevel(logging.DEBUG)
logging.getLogger('pymodbus.logging').setLevel(logging.DEBUG)
logging.getLogger('pymodbus.logging.base').setLevel(logging.DEBUG)
logging.getLogger('pymodbus.logging.decoders').setLevel(logging.DEBUG)
```
### 方法2: 在代码中临时启用(单次调试)
在需要调试的代码文件中添加:
```python
import logging
# 临时启用 pymodbus DEBUG 日志
logging.getLogger('pymodbus').setLevel(logging.DEBUG)
# 你的 Modbus 调试代码
...
# 调试完成后恢复
logging.getLogger('pymodbus').setLevel(logging.WARNING)
```
### 方法3: 使用环境变量或配置文件(推荐)
未来可以考虑在启动参数中添加 `--debug-modbus` 等选项来动态控制。
---
## 📊 日志级别说明
| 级别 | 数值 | 用途 | 是否显示 |
|------|------|------|---------|
| TRACE | 5 | 最详细的跟踪信息 | ✅ |
| DEBUG | 10 | 调试信息 | ✅ |
| INFO | 20 | 一般信息 | ✅ |
| WARNING | 30 | 警告信息 | ✅ |
| ERROR | 40 | 错误信息 | ✅ |
| CRITICAL | 50 | 严重错误 | ✅ |
**当前配置**:
- UniLabOS 自身代码: DEBUG 及以上全部显示
- pymodbus/websockets: **WARNING** 及以上显示(屏蔽 DEBUG/INFO
- ROS host_node: **INFO** 及以上显示(屏蔽 DEBUG
---
## ⚠️ 重要提示
### 修改生效时间
- 修改 `log.py` 后需要 **重启 unilab 服务** 才能生效
- 不需要重新安装或重新编译
### 调试 Modbus 通信问题
如果需要调试 Modbus 通信故障,应该:
1. 临时启用 pymodbus DEBUG 日志方法2
2. 复现问题
3. 查看详细的通信日志
4. 调试完成后记得恢复 WARNING 级别
### 调试设备状态问题
如果需要调试设备状态更新问题:
```python
logging.getLogger('unilabos.ros.nodes.presets.host_node').setLevel(logging.DEBUG)
```
---
## 📝 维护记录
| 日期 | 修改内容 | 操作人 |
|------|---------|--------|
| 2026-01-11 | 初始创建,添加 pymodbus、websockets、ROS host_node 屏蔽 | - |
| 2026-01-07 | 添加 pymodbus 和 websockets 屏蔽log-0107.py | - |
---
## 🔗 相关文件
- `log.py` - 日志配置主文件
- `unilabos/devices/workstation/coin_cell_assembly/` - 使用 Modbus 的扣电工作站代码
- `unilabos/ros/nodes/presets/host_node.py` - ROS 主机节点代码
---
**维护提示**: 如果添加了新的第三方库或发现新的日志刷屏问题,请在此文档中记录并更新 `log.py` 配置。

View File

@@ -182,49 +182,3 @@ def get_all_subscriptions(instance) -> list:
except Exception: except Exception:
pass pass
return subscriptions return subscriptions
def not_action(func: F) -> F:
"""
标记方法为非动作的装饰器
用于装饰 driver 类中的方法,使其在 complete_registry 时不被识别为动作。
适用于辅助方法、内部工具方法等不应暴露为设备动作的公共方法。
Example:
class MyDriver:
@not_action
def helper_method(self):
# 这个方法不会被注册为动作
pass
def actual_action(self, param: str):
# 这个方法会被注册为动作
self.helper_method()
Note:
- 可以与其他装饰器组合使用,@not_action 应放在最外层
- 仅影响 complete_registry 的动作识别,不影响方法的正常调用
"""
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
# 在函数上附加标记
wrapper._is_not_action = True # type: ignore[attr-defined]
return wrapper # type: ignore[return-value]
def is_not_action(func) -> bool:
"""
检查函数是否被标记为非动作
Args:
func: 被检查的函数
Returns:
如果函数被 @not_action 装饰则返回 True否则返回 False
"""
return getattr(func, "_is_not_action", False)

View File

@@ -24,7 +24,6 @@ class EnvironmentChecker:
"msgcenterpy": "msgcenterpy", "msgcenterpy": "msgcenterpy",
"opentrons_shared_data": "opentrons_shared_data", "opentrons_shared_data": "opentrons_shared_data",
"typing_extensions": "typing_extensions", "typing_extensions": "typing_extensions",
"crcmod": "crcmod-plus",
} }
# 特殊安装包(需要特殊处理的包) # 特殊安装包(需要特殊处理的包)

View File

@@ -27,9 +27,7 @@ __all__ = [
from ast import Constant from ast import Constant
from unilabos.resources.resource_tracker import PARAM_SAMPLE_UUIDS
from unilabos.utils import logger from unilabos.utils import logger
from unilabos.utils.decorator import is_not_action
class ImportManager: class ImportManager:
@@ -277,9 +275,6 @@ class ImportManager:
method_info = self._analyze_method_signature(method) method_info = self._analyze_method_signature(method)
result["status_methods"][actual_name] = method_info result["status_methods"][actual_name] = method_info
elif not name.startswith("_"): elif not name.startswith("_"):
# 检查是否被 @not_action 装饰器标记
if is_not_action(method):
continue
# 其他非_开头的方法归类为action # 其他非_开头的方法归类为action
method_info = self._analyze_method_signature(method) method_info = self._analyze_method_signature(method)
result["action_methods"][name] = method_info result["action_methods"][name] = method_info
@@ -335,25 +330,17 @@ class ImportManager:
if actual_name not in result["status_methods"]: if actual_name not in result["status_methods"]:
result["status_methods"][actual_name] = method_info result["status_methods"][actual_name] = method_info
else: else:
# 检查是否被 @not_action 装饰器标记
if self._is_not_action_method(node):
continue
# 其他非_开头的方法归类为action # 其他非_开头的方法归类为action
result["action_methods"][method_name] = method_info result["action_methods"][method_name] = method_info
return result return result
def _analyze_method_signature(self, method, skip_unilabos_params: bool = True) -> Dict[str, Any]: def _analyze_method_signature(self, method) -> Dict[str, Any]:
""" """
分析方法签名,提取具体的命名参数信息 分析方法签名,提取具体的命名参数信息
注意:此方法会跳过*args和**kwargs只提取具体的命名参数 注意:此方法会跳过*args和**kwargs只提取具体的命名参数
这样可以确保通过**dict方式传参时的准确性 这样可以确保通过**dict方式传参时的准确性
Args:
method: 要分析的方法
skip_unilabos_params: 是否跳过 unilabos 系统参数(如 sample_uuids
registry 补全时为 TrueJsonCommand 执行时为 False
示例用法: 示例用法:
method_info = self._analyze_method_signature(some_method) method_info = self._analyze_method_signature(some_method)
params = {"param1": "value1", "param2": "value2"} params = {"param1": "value1", "param2": "value2"}
@@ -374,10 +361,6 @@ class ImportManager:
if param.kind == param.VAR_KEYWORD: # **kwargs if param.kind == param.VAR_KEYWORD: # **kwargs
continue continue
# 跳过 sample_uuids 参数由系统自动注入registry 补全时跳过)
if skip_unilabos_params and param_name == PARAM_SAMPLE_UUIDS:
continue
is_required = param.default == inspect.Parameter.empty is_required = param.default == inspect.Parameter.empty
if is_required: if is_required:
num_required += 1 num_required += 1
@@ -467,13 +450,6 @@ class ImportManager:
return True return True
return False return False
def _is_not_action_method(self, node: ast.FunctionDef) -> bool:
"""检查是否是@not_action装饰的方法"""
for decorator in node.decorator_list:
if isinstance(decorator, ast.Name) and decorator.id == "not_action":
return True
return False
def _get_property_name_from_setter(self, node: ast.FunctionDef) -> str: def _get_property_name_from_setter(self, node: ast.FunctionDef) -> str:
"""从setter装饰器中获取属性名""" """从setter装饰器中获取属性名"""
for decorator in node.decorator_list: for decorator in node.decorator_list:
@@ -573,9 +549,6 @@ class ImportManager:
for i, arg in enumerate(node.args.args): for i, arg in enumerate(node.args.args):
if arg.arg == "self": if arg.arg == "self":
continue continue
# 跳过 sample_uuids 参数(由系统自动注入)
if arg.arg == PARAM_SAMPLE_UUIDS:
continue
arg_info = { arg_info = {
"name": arg.arg, "name": arg.arg,
"type": None, "type": None,

View File

@@ -192,6 +192,21 @@ def configure_logger(loglevel=None, working_dir=None):
# 添加处理器到根日志记录器 # 添加处理器到根日志记录器
root_logger.addHandler(console_handler) root_logger.addHandler(console_handler)
# 降低第三方库的日志级别,避免过多输出
# pymodbus 库的日志太详细,设置为 WARNING
logging.getLogger('pymodbus').setLevel(logging.WARNING)
logging.getLogger('pymodbus.logging').setLevel(logging.WARNING)
logging.getLogger('pymodbus.logging.base').setLevel(logging.WARNING)
logging.getLogger('pymodbus.logging.decoders').setLevel(logging.WARNING)
# websockets 库的日志输出较多,设置为 WARNING
logging.getLogger('websockets').setLevel(logging.WARNING)
logging.getLogger('websockets.client').setLevel(logging.WARNING)
logging.getLogger('websockets.server').setLevel(logging.WARNING)
# ROS 节点的状态更新日志过于频繁,设置为 INFO
logging.getLogger('unilabos.ros.nodes.presets.host_node').setLevel(logging.INFO)
# 如果指定了工作目录,添加文件处理器 # 如果指定了工作目录,添加文件处理器
if working_dir is not None: if working_dir is not None:
logs_dir = os.path.join(working_dir, "logs") logs_dir = os.path.join(working_dir, "logs")

View File

@@ -1,11 +1,7 @@
import psutil import psutil
import pywinauto import pywinauto
try: from pywinauto_recorder import UIApplication
from pywinauto_recorder import UIApplication from pywinauto_recorder.player import UIPath, click, focus_on_application, exists, find, get_wrapper_path
from pywinauto_recorder.player import UIPath, click, focus_on_application, exists, find, get_wrapper_path
except ImportError:
print("未安装pywinauto_recorder部分功能无法使用安装时注意enum")
pass
from pywinauto.controls.uiawrapper import UIAWrapper from pywinauto.controls.uiawrapper import UIAWrapper
from pywinauto.application import WindowSpecification from pywinauto.application import WindowSpecification
from pywinauto import findbestmatch from pywinauto import findbestmatch

View File

@@ -1,18 +0,0 @@
networkx
typing_extensions
websockets
msgcenterpy>=0.1.5
opentrons_shared_data
pint
fastapi
jinja2
requests
uvicorn
pyautogui
opcua
pyserial
pandas
crcmod-plus
pymodbus
matplotlib
pylibftdi

View File

@@ -1,104 +1,3 @@
"""
工作流转换模块 - JSON 到 WorkflowGraph 的转换流程
==================== 输入格式 (JSON) ====================
{
"workflow": [
{"action": "transfer_liquid", "action_args": {"sources": "cell_lines", "targets": "Liquid_1", "asp_vol": 100.0, "dis_vol": 74.75, ...}},
...
],
"reagent": {
"cell_lines": {"slot": 4, "well": ["A1", "A3", "A5"], "labware": "DRUG + YOYO-MEDIA"},
"Liquid_1": {"slot": 1, "well": ["A4", "A7", "A10"], "labware": "rep 1"},
...
}
}
==================== 转换步骤 ====================
第一步: 按 slot 去重创建 create_resource 节点(创建板子)
--------------------------------------------------------------------------------
- 首先创建一个 Group 节点type="Group", minimized=true用于包含所有 create_resource 节点
- 遍历所有 reagent按 slot 去重,为每个唯一的 slot 创建一个板子
- 所有 create_resource 节点的 parent_uuid 指向 Group 节点minimized=true
- 生成参数:
res_id: plate_slot_{slot}
device_id: /PRCXI
class_name: PRCXI_BioER_96_wellplate
parent: /PRCXI/PRCXI_Deck/T{slot}
slot_on_deck: "{slot}"
- 输出端口: labware用于连接 set_liquid_from_plate
- 控制流: create_resource 之间通过 ready 端口串联
示例: slot=1, slot=4 -> 创建 1 个 Group + 2 个 create_resource 节点
第二步: 为每个 reagent 创建 set_liquid_from_plate 节点(设置液体)
--------------------------------------------------------------------------------
- 首先创建一个 Group 节点type="Group", minimized=true用于包含所有 set_liquid_from_plate 节点
- 遍历所有 reagent为每个试剂创建 set_liquid_from_plate 节点
- 所有 set_liquid_from_plate 节点的 parent_uuid 指向 Group 节点minimized=true
- 生成参数:
plate: [](通过连接传递,来自 create_resource 的 labware
well_names: ["A1", "A3", "A5"](来自 reagent 的 well 数组)
liquid_names: ["cell_lines", "cell_lines", "cell_lines"](与 well 数量一致)
volumes: [1e5, 1e5, 1e5](与 well 数量一致,默认体积)
- 输入连接: create_resource (labware) -> set_liquid_from_plate (input_plate)
- 输出端口: output_wells用于连接 transfer_liquid
- 控制流: set_liquid_from_plate 连接在所有 create_resource 之后,通过 ready 端口串联
第三步: 解析 workflow创建 transfer_liquid 等动作节点
--------------------------------------------------------------------------------
- 遍历 workflow 数组,为每个动作创建步骤节点
- 参数重命名: asp_vol -> asp_vols, dis_vol -> dis_vols, asp_flow_rate -> asp_flow_rates, dis_flow_rate -> dis_flow_rates
- 参数扩展: 根据 targets 的 wells 数量,将单值扩展为数组
例: asp_vol=100.0, targets 有 3 个 wells -> asp_vols=[100.0, 100.0, 100.0]
- 连接处理: 如果 sources/targets 已通过 set_liquid_from_plate 连接,参数值改为 []
- 输入连接: set_liquid_from_plate (output_wells) -> transfer_liquid (sources_identifier / targets_identifier)
- 输出端口: sources_out, targets_out用于连接下一个 transfer_liquid
==================== 连接关系图 ====================
控制流 (ready 端口串联):
- create_resource 之间: 无 ready 连接
- set_liquid_from_plate 之间: 无 ready 连接
- create_resource 与 set_liquid_from_plate 之间: 无 ready 连接
- transfer_liquid 之间: 通过 ready 端口串联
transfer_liquid_1 -> transfer_liquid_2 -> transfer_liquid_3 -> ...
物料流:
[create_resource] --labware--> [set_liquid_from_plate] --output_wells--> [transfer_liquid] --sources_out/targets_out--> [下一个 transfer_liquid]
(slot=1) (cell_lines) (input_plate) (sources_identifier) (sources_identifier)
(slot=4) (Liquid_1) (targets_identifier) (targets_identifier)
==================== 端口映射 ====================
create_resource:
输出: labware
set_liquid_from_plate:
输入: input_plate
输出: output_plate, output_wells
transfer_liquid:
输入: sources -> sources_identifier, targets -> targets_identifier
输出: sources -> sources_out, targets -> targets_out
==================== 设备名配置 (device_name) ====================
每个节点都有 device_name 字段,指定在哪个设备上执行:
- create_resource: device_name = "host_node"(固定)
- set_liquid_from_plate: device_name = "PRCXI"(可配置,见 DEVICE_NAME_DEFAULT
- transfer_liquid 等动作: device_name = "PRCXI"(可配置,见 DEVICE_NAME_DEFAULT
==================== 校验规则 ====================
- 检查 sources/targets 是否在 reagent 中定义
- 检查 sources 和 targets 的 wells 数量是否匹配
- 检查参数数组长度是否与 wells 数量一致
- 如有问题,在 footer 中添加 [WARN: ...] 标记
"""
import re import re
import uuid import uuid
@@ -109,35 +8,6 @@ from typing import Dict, List, Any, Tuple, Optional
Json = Dict[str, Any] Json = Dict[str, Any]
# ==================== 默认配置 ====================
# 设备名配置
DEVICE_NAME_HOST = "host_node" # create_resource 固定在 host_node 上执行
DEVICE_NAME_DEFAULT = "PRCXI" # transfer_liquid, set_liquid_from_plate 等动作的默认设备名
# 节点类型
NODE_TYPE_DEFAULT = "ILab" # 所有节点的默认类型
# create_resource 节点默认参数
CREATE_RESOURCE_DEFAULTS = {
"device_id": "/PRCXI",
"parent_template": "/PRCXI/PRCXI_Deck/T{slot}", # {slot} 会被替换为实际的 slot 值
"class_name": "PRCXI_BioER_96_wellplate",
}
# 默认液体体积 (uL)
DEFAULT_LIQUID_VOLUME = 1e5
# 参数重命名映射:单数 -> 复数(用于 transfer_liquid 等动作)
PARAM_RENAME_MAPPING = {
"asp_vol": "asp_vols",
"dis_vol": "dis_vols",
"asp_flow_rate": "asp_flow_rates",
"dis_flow_rate": "dis_flow_rates",
}
# ---------------- Graph ---------------- # ---------------- Graph ----------------
@@ -358,7 +228,7 @@ def refactor_data(
def build_protocol_graph( def build_protocol_graph(
labware_info: Dict[str, Dict[str, Any]], labware_info: List[Dict[str, Any]],
protocol_steps: List[Dict[str, Any]], protocol_steps: List[Dict[str, Any]],
workstation_name: str, workstation_name: str,
action_resource_mapping: Optional[Dict[str, str]] = None, action_resource_mapping: Optional[Dict[str, str]] = None,
@@ -366,260 +236,112 @@ def build_protocol_graph(
"""统一的协议图构建函数,根据设备类型自动选择构建逻辑 """统一的协议图构建函数,根据设备类型自动选择构建逻辑
Args: Args:
labware_info: labware 信息字典,格式为 {name: {slot, well, labware, ...}, ...} labware_info: labware 信息字典
protocol_steps: 协议步骤列表 protocol_steps: 协议步骤列表
workstation_name: 工作站名称 workstation_name: 工作站名称
action_resource_mapping: action 到 resource_name 的映射字典,可选 action_resource_mapping: action 到 resource_name 的映射字典,可选
""" """
G = WorkflowGraph() G = WorkflowGraph()
resource_last_writer = {} # reagent_name -> "node_id:port" resource_last_writer = {}
slot_to_create_resource = {} # slot -> create_resource node_id
protocol_steps = refactor_data(protocol_steps, action_resource_mapping) protocol_steps = refactor_data(protocol_steps, action_resource_mapping)
# 有机化学&移液站协议图构建
WORKSTATION_ID = workstation_name
# ==================== 第一步:按 slot 去重创建 create_resource 节点 ==================== # 为所有labware创建资源节点
# 收集所有唯一的 slot
slots_info = {} # slot -> {labware, res_id}
for labware_id, item in labware_info.items():
slot = str(item.get("slot", ""))
if slot and slot not in slots_info:
res_id = f"plate_slot_{slot}"
slots_info[slot] = {
"labware": item.get("labware", ""),
"res_id": res_id,
}
# 创建 Group 节点,包含所有 create_resource 节点
group_node_id = str(uuid.uuid4())
G.add_node(
group_node_id,
name="Resources Group",
type="Group",
parent_uuid="",
lab_node_type="Device",
template_name="",
resource_name="",
footer="",
minimized=True,
param=None,
)
# 为每个唯一的 slot 创建 create_resource 节点
res_index = 0 res_index = 0
for slot, info in slots_info.items(): for labware_id, item in labware_info.items():
# item_id = item.get("id") or item.get("name", f"item_{uuid.uuid4()}")
node_id = str(uuid.uuid4()) node_id = str(uuid.uuid4())
res_id = info["res_id"]
# 判断节点类型
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
lab_node_type = "Labware"
description = f"Prepare Labware: {labware_id}"
liquid_type = []
liquid_volume = []
elif item.get("type") == "hardware" or "reactor" in str(labware_id).lower():
if "reactor" not in str(labware_id).lower():
continue
lab_node_type = "Sample"
description = f"Prepare Reactor: {labware_id}"
liquid_type = []
liquid_volume = []
else:
lab_node_type = "Reagent"
description = f"Add Reagent to Flask: {labware_id}"
liquid_type = [labware_id]
liquid_volume = [1e5]
res_index += 1 res_index += 1
G.add_node( G.add_node(
node_id, node_id,
template_name="create_resource", template_name="create_resource",
resource_name="host_node", resource_name="host_node",
name=f"Plate {res_index}", name=f"Res {res_index}",
description=f"Create plate on slot {slot}", description=description,
lab_node_type="Labware", lab_node_type=lab_node_type,
footer="create_resource-host_node", footer="create_resource-host_node",
device_name=DEVICE_NAME_HOST,
type=NODE_TYPE_DEFAULT,
parent_uuid=group_node_id, # 指向 Group 节点
minimized=True, # 折叠显示
param={ param={
"res_id": res_id, "res_id": labware_id,
"device_id": CREATE_RESOURCE_DEFAULTS["device_id"], "device_id": WORKSTATION_ID,
"class_name": CREATE_RESOURCE_DEFAULTS["class_name"], "class_name": "container",
"parent": CREATE_RESOURCE_DEFAULTS["parent_template"].format(slot=slot), "parent": WORKSTATION_ID,
"bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0}, "bind_locations": {"x": 0.0, "y": 0.0, "z": 0.0},
"slot_on_deck": slot, "liquid_input_slot": [-1],
"liquid_type": liquid_type,
"liquid_volume": liquid_volume,
"slot_on_deck": "",
}, },
) )
slot_to_create_resource[slot] = node_id resource_last_writer[labware_id] = f"{node_id}:labware"
# create_resource 之间不需要 ready 连接
# ==================== 第二步:为每个 reagent 创建 set_liquid_from_plate 节点 ====================
# 创建 Group 节点,包含所有 set_liquid_from_plate 节点
set_liquid_group_id = str(uuid.uuid4())
G.add_node(
set_liquid_group_id,
name="SetLiquid Group",
type="Group",
parent_uuid="",
lab_node_type="Device",
template_name="",
resource_name="",
footer="",
minimized=True,
param=None,
)
set_liquid_index = 0
for labware_id, item in labware_info.items():
# 跳过 Tip/Rack 类型
if "Rack" in str(labware_id) or "Tip" in str(labware_id):
continue
if item.get("type") == "hardware":
continue
slot = str(item.get("slot", ""))
wells = item.get("well", [])
if not wells or not slot:
continue
# res_id 不能有空格
res_id = str(labware_id).replace(" ", "_")
well_count = len(wells)
node_id = str(uuid.uuid4())
set_liquid_index += 1
G.add_node(
node_id,
template_name="set_liquid_from_plate",
resource_name="liquid_handler.prcxi",
name=f"SetLiquid {set_liquid_index}",
description=f"Set liquid: {labware_id}",
lab_node_type="Reagent",
footer="set_liquid_from_plate-liquid_handler.prcxi",
device_name=DEVICE_NAME_DEFAULT,
type=NODE_TYPE_DEFAULT,
parent_uuid=set_liquid_group_id, # 指向 Group 节点
minimized=True, # 折叠显示
param={
"plate": [], # 通过连接传递
"well_names": wells, # 孔位名数组,如 ["A1", "A3", "A5"]
"liquid_names": [res_id] * well_count,
"volumes": [DEFAULT_LIQUID_VOLUME] * well_count,
},
)
# set_liquid_from_plate 之间不需要 ready 连接
# 物料流create_resource 的 labware -> set_liquid_from_plate 的 input_plate
create_res_node_id = slot_to_create_resource.get(slot)
if create_res_node_id:
G.add_edge(create_res_node_id, node_id, source_port="labware", target_port="input_plate")
# set_liquid_from_plate 的输出 output_wells 用于连接 transfer_liquid
resource_last_writer[labware_id] = f"{node_id}:output_wells"
# transfer_liquid 之间通过 ready 串联,从 None 开始
last_control_node_id = None last_control_node_id = None
# 端口名称映射JSON 字段名 -> 实际 handle key
INPUT_PORT_MAPPING = {
"sources": "sources_identifier",
"targets": "targets_identifier",
"vessel": "vessel",
"to_vessel": "to_vessel",
"from_vessel": "from_vessel",
"reagent": "reagent",
"solvent": "solvent",
"compound": "compound",
}
OUTPUT_PORT_MAPPING = {
"sources": "sources_out", # 输出端口是 xxx_out
"targets": "targets_out", # 输出端口是 xxx_out
"vessel": "vessel_out",
"to_vessel": "to_vessel_out",
"from_vessel": "from_vessel_out",
"filtrate_vessel": "filtrate_out",
"reagent": "reagent",
"solvent": "solvent",
"compound": "compound",
}
# 需要根据 wells 数量扩展的参数列表(复数形式)
EXPAND_BY_WELLS_PARAMS = ["asp_vols", "dis_vols", "asp_flow_rates", "dis_flow_rates"]
# 处理协议步骤 # 处理协议步骤
for step in protocol_steps: for step in protocol_steps:
node_id = str(uuid.uuid4()) node_id = str(uuid.uuid4())
params = step.get("param", {}).copy() # 复制一份,避免修改原数据 G.add_node(node_id, **step)
connected_params = set() # 记录被连接的参数
warnings = [] # 收集警告信息
# 参数重命名:单数 -> 复数
for old_name, new_name in PARAM_RENAME_MAPPING.items():
if old_name in params:
params[new_name] = params.pop(old_name)
# 处理输入连接
for param_key, target_port in INPUT_PORT_MAPPING.items():
resource_name = params.get(param_key)
if resource_name and resource_name in resource_last_writer:
source_node, source_port = resource_last_writer[resource_name].split(":")
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
connected_params.add(param_key)
elif resource_name and resource_name not in resource_last_writer:
# 资源名在 labware_info 中不存在
warnings.append(f"{param_key}={resource_name} 未找到")
# 获取 targets 对应的 wells 数量,用于扩展参数
targets_name = params.get("targets")
sources_name = params.get("sources")
targets_wells_count = 1
sources_wells_count = 1
if targets_name and targets_name in labware_info:
target_wells = labware_info[targets_name].get("well", [])
targets_wells_count = len(target_wells) if target_wells else 1
elif targets_name:
warnings.append(f"targets={targets_name} 未在 reagent 中定义")
if sources_name and sources_name in labware_info:
source_wells = labware_info[sources_name].get("well", [])
sources_wells_count = len(source_wells) if source_wells else 1
elif sources_name:
warnings.append(f"sources={sources_name} 未在 reagent 中定义")
# 检查 sources 和 targets 的 wells 数量是否匹配
if targets_wells_count != sources_wells_count and targets_name and sources_name:
warnings.append(f"wells 数量不匹配: sources={sources_wells_count}, targets={targets_wells_count}")
# 使用 targets 的 wells 数量来扩展参数
wells_count = targets_wells_count
# 扩展单值参数为数组(根据 targets 的 wells 数量)
for expand_param in EXPAND_BY_WELLS_PARAMS:
if expand_param in params:
value = params[expand_param]
# 如果是单个值,扩展为数组
if not isinstance(value, list):
params[expand_param] = [value] * wells_count
# 如果已经是数组但长度不对,记录警告
elif len(value) != wells_count:
warnings.append(f"{expand_param} 数量({len(value)})与 wells({wells_count})不匹配")
# 如果 sources/targets 已通过连接传递,将参数值改为空数组
for param_key in connected_params:
if param_key in params:
params[param_key] = []
# 更新 step 的 param、footer、device_name 和 type
step_copy = step.copy()
step_copy["param"] = params
step_copy["device_name"] = DEVICE_NAME_DEFAULT # 动作节点使用默认设备名
step_copy["type"] = NODE_TYPE_DEFAULT # 节点类型
# 如果有警告,修改 footer 添加警告标记(警告放前面)
if warnings:
original_footer = step.get("footer", "")
step_copy["footer"] = f"[WARN: {'; '.join(warnings)}] {original_footer}"
G.add_node(node_id, **step_copy)
# 控制流 # 控制流
if last_control_node_id is not None: if last_control_node_id is not None:
G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready") G.add_edge(last_control_node_id, node_id, source_port="ready", target_port="ready")
last_control_node_id = node_id last_control_node_id = node_id
# 处理输出:更新 resource_last_writer # 物料流
for param_key, output_port in OUTPUT_PORT_MAPPING.items(): params = step.get("param", {})
resource_name = step.get("param", {}).get(param_key) # 使用原始参数值 input_resources_possible_names = [
"vessel",
"to_vessel",
"from_vessel",
"reagent",
"solvent",
"compound",
"sources",
"targets",
]
for target_port in input_resources_possible_names:
resource_name = params.get(target_port)
if resource_name and resource_name in resource_last_writer:
source_node, source_port = resource_last_writer[resource_name].split(":")
G.add_edge(source_node, node_id, source_port=source_port, target_port=target_port)
output_resources = {
"vessel_out": params.get("vessel"),
"from_vessel_out": params.get("from_vessel"),
"to_vessel_out": params.get("to_vessel"),
"filtrate_out": params.get("filtrate_vessel"),
"reagent": params.get("reagent"),
"solvent": params.get("solvent"),
"compound": params.get("compound"),
"sources_out": params.get("sources"),
"targets_out": params.get("targets"),
}
for source_port, resource_name in output_resources.items():
if resource_name: if resource_name:
resource_last_writer[resource_name] = f"{node_id}:{output_port}" resource_last_writer[resource_name] = f"{node_id}:{source_port}"
return G return G

Some files were not shown because too many files have changed in this diff Show More