From 61140b3795bb6f6ba1f752af569629e35c074302 Mon Sep 17 00:00:00 2001 From: Jeff Fifield Date: Tue, 22 Oct 2024 14:30:23 -0600 Subject: [PATCH] test --- .github/workflows/buildAndTestRyzenAI.yml | 190 +++++++++++------ .github/workflows/buildAndTestRyzenAISw.yml | 11 +- .github/workflows/buildRyzenWheels.yml | 222 ++++++++++++-------- utils/mlir_aie_wheels/setup.py | 2 +- 4 files changed, 265 insertions(+), 160 deletions(-) diff --git a/.github/workflows/buildAndTestRyzenAI.yml b/.github/workflows/buildAndTestRyzenAI.yml index 8fd09812de..57293830ec 100644 --- a/.github/workflows/buildAndTestRyzenAI.yml +++ b/.github/workflows/buildAndTestRyzenAI.yml @@ -4,25 +4,10 @@ on: push: branches: - main - - test-ryzen-ai +# - test-ryzen-ai pull_request: merge_group: - - # Allows you to run this workflow manually from the Actions tab by - # selecting CI and then "Run workflow" menu on the right branch - # and clicking on "launch_tmate_terminal_for_debug". - # Unfortunately this works only for the default branch. - # So you can either - # - change the default branch of the PR on the GitHub repository owning the PR - # and launching in Actions tab; - # - or edit directly the step below which runs tmate and push to the - # PR, ignoring the manual workflow launch. workflow_dispatch: - launch_tmate_terminal_for_debug: - type: boolean - description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)' - required: false - default: false defaults: run: @@ -39,7 +24,7 @@ env: DEBIAN_FRONTEND: noninteractive XILINXD_LICENSE_FILE: /opt/xilinx/Xilinx.lic VITIS: /opt/ryzen_ai-1.3.0/vitis_aie_essentials - CMAKE_ARGS: | + STATIC_CMAKE_ARGS: | -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ -DCMAKE_C_COMPILER_LAUNCHER=ccache \ -DCMAKE_EXE_LINKER_FLAGS_INIT="-fuse-ld=lld" \ @@ -54,83 +39,160 @@ env: LIT_OPTS: -sv --time-tests -j12 --timeout 600 --show-unsupported --show-excluded jobs: - build-tests: - name: Run Tests on Ryzen AI + build-mlir-aie: + name: Build for Ryzen AI + runs-on: ubuntu-latest + steps: + - name: Free disk space + uses: descriptinc/free-disk-space@main + with: + tool-cache: true + android: true + dotnet: true + haskell: true + large-packages: true + swap-storage: false + + - uses: actions/checkout@v4 + with: + submodules: "true" + + - uses: uraimo/run-on-arch-action@v2.5.0 + name: Build mlir-aie + id: runcmd + with: + distro: none + arch: none + base_image: ghcr.io/xilinx/mlir-aie/ubuntu22-ryzenai-1.3.0ea:1.1 + githubToken: ${{ github.token }} + dockerRunArgs: | + --mac-address 02:42:ac:11:00:02 + env: | + VITIS: ${{ env.VITIS }} + XILINXD_LICENSE_FILE: ${{ env.XILINXD_LICENSE_FILE }} + run: | + python -m venv ${{ github.workspace }}/aie-venv + source ${{ github.workspace }}/aie-venv/bin/activate + + echo "Installing vitis_aie_essentials ..." + pushd /opt + tar xfz /workspace/vaie.tgz + popd + + echo "Installing llvm-aie ..." + pip -q download llvm-aie -f https://github.com/Xilinx/llvm-aie/releases/expanded_assets/nightly + unzip -q llvm_aie*.whl + rm -rf llvm_aie*.whl + export PEANO_INSTALL_DIR=`realpath llvm-aie` + + VERSION=$(utils/clone-llvm.sh --get-wheel-version) + echo "Installing mlir version $VERSION ..." + pip -q download mlir==$VERSION \ + -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro + unzip -q mlir-*.whl + rm mlir-*.whl + find mlir -exec touch -a -m -t 201108231405.14 {} \; + + pip install -r python/requirements.txt + pip install -r python/requirements_ml.txt + HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt + + mkdir build + pushd build + + export PATH=$VITIS/bin:$VITIS/aietools/bin:$PATH + cmake ${{ github.workspace }} -G Ninja \ + -DPython3_EXECUTABLE=$(which python) \ + -DLLVM_EXTERNAL_LIT=$(which lit) \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=${{ github.workspace }}/build/install \ + -DCMAKE_MODULE_PATH=${{ github.workspace }}/cmake/modulesXilinx \ + -DMLIR_DIR=${{ github.workspace }}/mlir/lib/cmake/mlir \ + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ + -DCMAKE_C_COMPILER_LAUNCHER=ccache \ + -DCMAKE_EXE_LINKER_FLAGS_INIT="-fuse-ld=lld" \ + -DCMAKE_MODULE_LINKER_FLAGS_INIT="-fuse-ld=lld" \ + -DCMAKE_SHARED_LINKER_FLAGS_INIT="-fuse-ld=lld" \ + -DXRT_ROOT=/opt/xilinx/xrt \ + -DPEANO_INSTALL_DIR=$PEANO_INSTALL_DIR \ + -DAIE_ENABLE_PYTHON_PASSES=OFF \ + -DAIE_ENABLE_XRT_PYTHON_BINDINGS=ON \ + -DAIE_INCLUDE_INTEGRATION_TESTS=OFF \ + -DAIE_VITIS_COMPONENTS=AIE2 + + ninja install + chown -R github.github * + popd + tar cf build.tar build + + - name: Upload mlir_aie + uses: actions/upload-artifact@v4 + with: + name: mlir_aie + path: build.tar + + test-mlir-aie: + name: Test on Ryzen AI runs-on: amd7940hs + needs: build-mlir-aie + strategy: + fail-fast: false + matrix: + build_type: [ llvm-aie, xchesscc ] steps: + - uses: actions/checkout@v4 with: submodules: "true" - # Launch an ssh session via a proxy server if there is a need - # for debug. This seems to live for 35 min max - # https://github.com/mxschmitt/action-tmate - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - # To run this, launch it manually on the default branch and - # click on "launch_tmate_terminal_for_debug" - if: github.event_name == 'workflow_dispatch' - && inputs.launch_tmate_terminal_for_debug - - - name: Run commands + - uses: actions/download-artifact@v4 + with: + name: mlir_aie + path: . + + - name: Run mlir-aie tests run: | + export PATH=$VITIS/bin:$VITIS/aietools/bin:$PATH + export PEANO_INSTALL_DIR= + if [ "${{ matrix.build_type }}" == "llvm-aie" ]; then + echo "Installing peano ..." + pip -q download llvm-aie -f https://github.com/Xilinx/llvm-aie/releases/expanded_assets/nightly + unzip -q llvm_aie*.whl + rm -rf llvm_aie*.whl + export PEANO_INSTALL_DIR=`realpath llvm-aie` + fi - pip cache purge - source /opt/xilinx/xrt/setup.sh python -m venv aie-venv source aie-venv/bin/activate + pip install -r python/requirements.txt - HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt pip install -r python/requirements_ml.txt - pip install jupyter - sed -i.bak 's/OUTPUT_TIMEOUT = 10/OUTPUT_TIMEOUT = 100/g' \ - $(python -c 'import site; print(site.getsitepackages()[0])')/jupyter_client/runapp.py + HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt VERSION=$(utils/clone-llvm.sh --get-wheel-version) pip -q download mlir==$VERSION \ -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro unzip -q mlir-*.whl + rm mlir-*.whl # I have no clue why but the system clock on GHA containers is like 12 hours ahead. # That means wheels have file with time stamps in the future which makes ninja loop # forever when configuring. Set the time to some arbitrary stamp in the past just to be safe. find mlir -exec touch -a -m -t 201108231405.14 {} \; - mkdir build - pushd build - - export PATH=$VITIS/bin:$VITIS/aietools/bin:$PATH - cmake .. -G Ninja \ - -DPython3_EXECUTABLE=$(which python) \ - -DLLVM_EXTERNAL_LIT=$(which lit) \ - -DCMAKE_INSTALL_PREFIX=$PWD/../mlir_aie \ - -DCMAKE_MODULE_PATH=$PWD/../cmake/modulesXilinx \ - -DMLIR_DIR=$PWD/../mlir/lib/cmake/mlir \ - $CMAKE_ARGS - - ninja install - ninja check-aie - - popd + tar xf build.tar + rm build.tar + cd build + lit ${{ env.LIT_OPTS }} test programming_guide programming_examples build-quick-setup: - name: Run Examples on Ryzen AI + name: Test Examples on Ryzen AI runs-on: amd7940hs steps: - uses: actions/checkout@v4 with: submodules: "true" - # Launch an ssh session via a proxy server if there is a need - # for debug. This seems to live for 35 min max - # https://github.com/mxschmitt/action-tmate - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - # To run this, launch it manually on the default branch and - # click on "launch_tmate_terminal_for_debug" - if: github.event_name == 'workflow_dispatch' - && inputs.launch_tmate_terminal_for_debug - - - name: Run commands + - name: Build and Test mlir-aie run: | pip cache purge diff --git a/.github/workflows/buildAndTestRyzenAISw.yml b/.github/workflows/buildAndTestRyzenAISw.yml index aace4d8739..3ff2405e29 100644 --- a/.github/workflows/buildAndTestRyzenAISw.yml +++ b/.github/workflows/buildAndTestRyzenAISw.yml @@ -4,7 +4,7 @@ on: push: branches: - main - - ryzen-ai-sw-test + - test-ryzen-ai # pull_request: workflow_dispatch: inputs: @@ -38,7 +38,9 @@ jobs: name: Build and Test with Ryzen AI Software runs-on: ubuntu-latest - + strategy: + matrix: + ubuntu_version: [ "ubuntu24" ] steps: - name: Free disk space @@ -64,7 +66,7 @@ jobs: id: runcmd with: distro: none - base_image: ghcr.io/xilinx/mlir-aie/ubuntu22-ryzenai-1.3.0ea + base_image: ghcr.io/xilinx/mlir-aie/${{ matrix.ubuntu_version }}-ryzenai-1.3.0ea:1.0 githubToken: ${{ github.token }} dockerRunArgs: | --mac-address ${{ secrets.XILINX_MAC }} @@ -78,15 +80,16 @@ jobs: echo -n "${{ secrets.XILINX_LIC }}" | base64 --decode > /workspace/Xilinx.lic export XILINXD_LICENSE_FILE=/workspace/Xilinx.lic - ccrypt -d -K ${{ secrets.RYZEN_AI_SW_KEY }} /workspace/ryzen_ai-1.3.0ea1.tgz.cpt tar xvf /workspace/ryzen_ai-1.3.0ea1.tgz pushd /workspace/ryzen_ai-1.3.0 + sed -i 's/python -/python3.10 -/g' install_ryzen_ai_1_3.sh ./install_ryzen_ai_1_3.sh -a yes -p /workspace/venv -l popd export LD_LIBRARY_PATH= export PYTHONPATH= source /workspace/venv/bin/activate + export LD_LIBRARY_PATH= source /opt/xilinx/xrt/setup.sh popd diff --git a/.github/workflows/buildRyzenWheels.yml b/.github/workflows/buildRyzenWheels.yml index fe47854e3b..2512d41150 100644 --- a/.github/workflows/buildRyzenWheels.yml +++ b/.github/workflows/buildRyzenWheels.yml @@ -23,65 +23,93 @@ concurrency: env: DEBIAN_FRONTEND: noninteractive XILINXD_LICENSE_FILE: /opt/xilinx/Xilinx.lic + VITIS: /opt/ryzen_ai-1.3.0/vitis_aie_essentials jobs: build-repo: name: Build and upload mlir_aie wheels - runs-on: amd7940hs + runs-on: ubuntu-latest permissions: id-token: write contents: write + packages: read steps: + - name: Free disk space + uses: descriptinc/free-disk-space@main + with: + tool-cache: true + android: true + dotnet: true + haskell: true + large-packages: true + swap-storage: false - uses: actions/checkout@v4 with: submodules: "true" - - uses: actions/setup-python@v5 + - uses: uraimo/run-on-arch-action@v2.5.0 + name: Build mlir-aie + id: runcmd with: - python-version: '3.10' - - - name: Build mlir-aie distro - run: | - - pip cache purge - - python -m venv aie-venv - source aie-venv/bin/activate - pip install -r python/requirements.txt - HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt - - VERSION=$(utils/clone-llvm.sh --get-wheel-version) - pip -q download mlir==$VERSION \ - -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro - unzip -q mlir-*.whl - # I have no clue why but the system clock on GHA containers is like 12 hours ahead. - # That means wheels have file with time stamps in the future which makes ninja loop - # forever when configuring. Set the time to some arbitrary stamp in the past just to be safe. - find mlir -exec touch -a -m -t 201108231405.14 {} \; - - export PATH=/opt/Xilinx/Vitis/2023.2/bin:/opt/Xilinx/Vitis/2023.2/aietools/bin:$PATH - export MLIR_INSTALL_ABS_PATH=$PWD/mlir - export MLIR_AIE_SOURCE_DIR=$PWD - export WHEELHOUSE_DIR=$PWD/wheelhouse - export CMAKE_MODULE_PATH=$PWD/cmake/modulesXilinx - export XRT_ROOT=/opt/xilinx/xrt - export AIE_PROJECT_COMMIT=$(git rev-parse --short HEAD) - export DATETIME=$(date +"%Y%m%d%H") - - pushd utils/mlir_aie_wheels - - pip install wheel auditwheel patchelf importlib_metadata - CIBW_ARCHS=x86_64 pip wheel . -v -w $WHEELHOUSE_DIR --no-build-isolation - - popd - - auditwheel repair -w $WHEELHOUSE_DIR/repaired_wheel $WHEELHOUSE_DIR/mlir_aie-*.whl --plat manylinux_2_35_x86_64 --exclude libcdo_driver.so --exclude libmlir_float16_utils.so - WHL_FN=$(ls $WHEELHOUSE_DIR/repaired_wheel/mlir_aie*whl) - mv "$WHL_FN" "`echo $WHL_FN | sed "s/cp310-cp310/py3-none/"`" + distro: none + arch: none + base_image: ghcr.io/xilinx/mlir-aie/ubuntu22-ryzenai-1.3.0ea:1.1 + githubToken: ${{ github.token }} + dockerRunArgs: | + --mac-address 02:42:ac:11:00:02 + env: | + VITIS: ${{ env.VITIS }} + XILINXD_LICENSE_FILE: ${{ env.XILINXD_LICENSE_FILE }} + run: | + git config --global --add safe.directory $PWD + MLIR_VERSION=$(git rev-parse --short HEAD) + echo "Building mlir-aie version $MLIR_VERSION" + + python -m venv ${{ github.workspace }}/aie-venv + source ${{ github.workspace }}/aie-venv/bin/activate + + echo "Installing vitis_aie_essentials ..." + pushd /opt + tar xfz /workspace/vaie.tgz + popd + + pip install -r python/requirements.txt + pip install -r python/requirements_ml.txt + HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt + + VERSION=$(utils/clone-llvm.sh --get-wheel-version) + pip -q download mlir==$VERSION \ + -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro + unzip -q mlir-*.whl + # I have no clue why but the system clock on GHA containers is like 12 hours ahead. + # That means wheels have file with time stamps in the future which makes ninja loop + # forever when configuring. Set the time to some arbitrary stamp in the past just to be safe. + find mlir -exec touch -a -m -t 201108231405.14 {} \; + + export PATH=$VITIS/bin:$VITIS/aietools/bin:$PATH + export MLIR_INSTALL_ABS_PATH=$PWD/mlir + export MLIR_AIE_SOURCE_DIR=$PWD + export WHEELHOUSE_DIR=$PWD/wheelhouse + export CMAKE_MODULE_PATH=$PWD/cmake/modulesXilinx + export XRT_ROOT=/opt/xilinx/xrt + export AIE_PROJECT_COMMIT=$MLIR_VERSION + export AIE_VITIS_COMPONENTS=AIE2 + export DATETIME=$(date +"%Y%m%d%H") + + pushd utils/mlir_aie_wheels + + pip install wheel auditwheel patchelf importlib_metadata + CIBW_ARCHS=x86_64 pip wheel . -v -w $WHEELHOUSE_DIR --no-build-isolation + + popd + + auditwheel repair -w $WHEELHOUSE_DIR/repaired_wheel $WHEELHOUSE_DIR/mlir_aie-*.whl --plat manylinux_2_35_x86_64 --exclude libcdo_driver.so --exclude libmlir_float16_utils.so + WHL_FN=$(ls $WHEELHOUSE_DIR/repaired_wheel/mlir_aie*whl) + mv "$WHL_FN" "`echo $WHL_FN | sed "s/cp310-cp310/py3-none/"`" - name: Upload mlir_aie uses: actions/upload-artifact@v3 @@ -104,14 +132,13 @@ jobs: build-wheel: name: Build wheel - - runs-on: amd7940hs - + runs-on: ubuntu-latest needs: build-repo permissions: id-token: write contents: write + packages: read strategy: fail-fast: false @@ -127,10 +154,6 @@ jobs: fetch-depth: 2 submodules: "true" - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python_version }} - - uses: actions/download-artifact@v3 with: # unpacks default artifact into dist/ @@ -138,48 +161,65 @@ jobs: name: mlir_aie path: . - - name: Build mlir-aie python bindings - run: | - - # faster to do this twice instead of upload the directory with ~4000 files in it... - VERSION=$(utils/clone-llvm.sh --get-wheel-version) - pip -q download mlir==$VERSION \ - -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro - unzip -q mlir-*.whl - # I have no clue why but the system clock on GHA containers is like 12 hours ahead. - # That means wheels have file with time stamps in the future which makes ninja loop - # forever when configuring. Set the time to some arbitrary stamp in the past just to be safe. - find mlir -exec touch -a -m -t 201108231405.14 {} \; - - unzip -q mlir_aie-*.whl - find mlir_aie -exec touch -a -m -t 201108231405.14 {} \; - - python -m venv aie-venv - source aie-venv/bin/activate - pip install -r python/requirements.txt - HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt - source aie-venv/bin/activate - - export MLIR_INSTALL_ABS_PATH=$PWD/mlir - export MLIR_AIE_INSTALL_ABS_PATH=$PWD/mlir_aie - export WHEELHOUSE_DIR=$PWD/wheelhouse - export CMAKE_MODULE_PATH=$PWD/cmake/modulesXilinx - export PATH=/opt/Xilinx/Vitis/2023.2/bin:/opt/Xilinx/Vitis/2023.2/aietools/bin:$PATH - export XRT_ROOT=/opt/xilinx/xrt - export AIE_PROJECT_COMMIT=$(git rev-parse --short HEAD) - export DATETIME=$(date +"%Y%m%d%H") - - cp python/requirements.txt utils/mlir_aie_wheels/python_bindings - - pushd utils/mlir_aie_wheels/python_bindings - - pip install wheel auditwheel patchelf - CIBW_ARCHS=x86_64 pip wheel . -v -w $WHEELHOUSE_DIR --no-build-isolation - DEBUG=1 CIBW_ARCHS=x86_64 pip wheel . -v -w $WHEELHOUSE_DIR --no-build-isolation - - popd - - auditwheel repair -w $WHEELHOUSE_DIR/repaired_wheel $WHEELHOUSE_DIR/aie_python_bindings*whl --plat manylinux_2_35_x86_64 + - uses: uraimo/run-on-arch-action@v2.5.0 + name: Build mlir-aie python bindings + id: runcmd + with: + distro: none + arch: none + base_image: ghcr.io/xilinx/mlir-aie/ubuntu22-ryzenai-1.3.0ea:1.1 + githubToken: ${{ github.token }} + dockerRunArgs: | + --mac-address 02:42:ac:11:00:02 + env: | + VITIS: ${{ env.VITIS }} + XILINXD_LICENSE_FILE: ${{ env.XILINXD_LICENSE_FILE }} + run: | + git config --global --add safe.directory $PWD + MLIR_VERSION=$(git rev-parse --short HEAD) + echo "Building mlir-aie version $MLIR_VERSION ..." + + # faster to do this twice instead of upload the directory with ~4000 files in it... + VERSION=$(utils/clone-llvm.sh --get-wheel-version) + pip -q download mlir==$VERSION \ + -f https://github.com/Xilinx/mlir-aie/releases/expanded_assets/mlir-distro + unzip -q mlir-*.whl + # I have no clue why but the system clock on GHA containers is like 12 hours ahead. + # That means wheels have file with time stamps in the future which makes ninja loop + # forever when configuring. Set the time to some arbitrary stamp in the past just to be safe. + find mlir -exec touch -a -m -t 201108231405.14 {} \; + + unzip -q mlir_aie-*.whl + find mlir_aie -exec touch -a -m -t 201108231405.14 {} \; + + python${{ matrix.python_version }} -m venv aie-venv + source aie-venv/bin/activate + + pip install -r python/requirements.txt + HOST_MLIR_PYTHON_PACKAGE_PREFIX=aie pip install -r python/requirements_extras.txt + source aie-venv/bin/activate + + export MLIR_INSTALL_ABS_PATH=$PWD/mlir + export MLIR_AIE_INSTALL_ABS_PATH=$PWD/mlir_aie + export WHEELHOUSE_DIR=$PWD/wheelhouse + export CMAKE_MODULE_PATH=$PWD/cmake/modulesXilinx + + export PATH=$VITIS/bin:$VITIS/aietools/bin:$PATH + export XRT_ROOT=/opt/xilinx/xrt + export AIE_PROJECT_COMMIT=$MLIR_VERSION + export DATETIME=$(date +"%Y%m%d%H") + + cp python/requirements.txt utils/mlir_aie_wheels/python_bindings + + pushd utils/mlir_aie_wheels/python_bindings + + pip install wheel auditwheel patchelf + CIBW_ARCHS=x86_64 pip wheel . -v -w $WHEELHOUSE_DIR --no-build-isolation + DEBUG=1 CIBW_ARCHS=x86_64 pip wheel . -v -w $WHEELHOUSE_DIR --no-build-isolation + + popd + + auditwheel repair -w $WHEELHOUSE_DIR/repaired_wheel $WHEELHOUSE_DIR/aie_python_bindings*whl --plat manylinux_2_35_x86_64 - uses: geekyeggo/delete-artifact@v4 if: github.event_name == 'pull_request' diff --git a/utils/mlir_aie_wheels/setup.py b/utils/mlir_aie_wheels/setup.py index c801f50951..60ae6df5d1 100644 --- a/utils/mlir_aie_wheels/setup.py +++ b/utils/mlir_aie_wheels/setup.py @@ -142,7 +142,7 @@ def build_extension(self, ext: CMakeExtension) -> None: "-DCMAKE_PLATFORM_NO_VERSIONED_SONAME=ON", "-DLLVM_CCACHE_BUILD=ON", f"-DLLVM_ENABLE_RTTI={os.getenv('ENABLE_RTTI', 'ON')}", - "-DAIE_VITIS_COMPONENTS=AIE2;AIE2P", + f"-DAIE_VITIS_COMPONENTS={os.getenv('AIE_VITIS_COMPONENTS', 'AIE2')}", "-DAIE_ENABLE_BINDINGS_PYTHON=ON", "-DAIE_ENABLE_PYTHON_PASSES=OFF", "-DMLIR_DETECT_PYTHON_ENV_PRIME_SEARCH=ON",