Compare commits

..

1 Commits
main ... v1.7.0

Author SHA1 Message Date
gastoner 1e88d4ca8b chore: 🥁 tagging v1.7.0 🥳 2025-05-20 06:05:00 +00:00
71 changed files with 2680 additions and 3663 deletions

View File

@ -26,9 +26,14 @@ on:
- 'packages/backend/src/assets/ai.json'
workflow_dispatch:
inputs:
podman_desktop_repo_args:
default: 'REPO=podman-desktop,FORK=podman-desktop,BRANCH=main'
description: 'Podman Desktop repo fork and branch'
fork:
default: 'containers'
description: 'Podman Desktop repo fork'
type: string
required: true
branch:
default: 'main'
description: 'Podman Desktop repo branch'
type: string
required: true
ext_repo_options:
@ -67,26 +72,24 @@ on:
type: string
required: true
azure_vm_size:
default: ''
default: 'Standard_D8s_v4'
description: 'Azure VM size (Standard_E4as_v5 is cheapest, 4core AMD, 32GB RAM)'
type: choice
required: false
required: true
options:
- ''
- Standard_D8as_v5
- Standard_D8s_v4
- Standard_E8as_v5
- Standard_E4as_v5
mapt_params:
default: 'IMAGE=quay.io/redhat-developer/mapt,VERSION_TAG=v0.9.5,CPUS=4,MEMORY=32,EXCLUDED_REGIONS="westindia,centralindia,southindia,australiacentral,australiacentral2,australiaeast,australiasoutheast,southafricanorth,southafricawest"'
description: 'MAPT image, version tag, cpus and memory request, and excluded regions in format IMAGE=xxx,VERSION_TAG=xxx,CPUS=xxx,MEMORY=xxx,EXCLUDED_REGIONS=xxx'
required: true
type: string
jobs:
windows:
name: windows-${{ matrix.windows-version }}-${{ matrix.windows-featurepack }}
runs-on: ubuntu-latest
env:
MAPT_VERSION: v0.7.4
MAPT_IMAGE: quay.io/redhat-developer/mapt
MAPT_EXCLUDED_REGIONS: 'westindia,centralindia,southindia,australiacentral,australiacentral2,australiaeast,australiasoutheast,southafricanorth,southafricawest'
strategy:
fail-fast: false
matrix:
@ -105,10 +108,10 @@ jobs:
version=$(curl https://raw.githubusercontent.com/containers/podman-desktop/main/extensions/podman/packages/extension/src/podman5.json | jq -r '.version')
echo "Default Podman Version from Podman Desktop: ${version}"
echo "PD_PODMAN_VERSION=${version}" >> $GITHUB_ENV
- name: Set the default env. variables
env:
DEFAULT_PODMAN_DESKTOP_REPO_ARGS: 'REPO=podman-desktop,FORK=podman-desktop,BRANCH=main'
DEFAULT_FORK: 'containers'
DEFAULT_BRANCH: 'main'
DEFAULT_NPM_TARGET: 'test:e2e'
DEFAULT_ENV_VARS: 'TEST_PODMAN_MACHINE=true,ELECTRON_ENABLE_INSPECT=true'
DEFAULT_PODMAN_OPTIONS: 'INIT=1,START=1,ROOTFUL=1,NETWORKING=0'
@ -117,75 +120,44 @@ jobs:
DEFAULT_PODMAN_VERSION: "${{ env.PD_PODMAN_VERSION || '5.3.2' }}"
DEFAULT_URL: "https://github.com/containers/podman/releases/download/v$DEFAULT_PODMAN_VERSION/podman-$DEFAULT_PODMAN_VERSION-setup.exe"
DEFAULT_PDE2E_IMAGE_VERSION: 'v0.0.3-windows'
DEFAULT_MAPT_PARAMS: "IMAGE=${{ vars.MAPT_IMAGE || 'quay.io/redhat-developer/mapt' }},VERSION_TAG=${{ vars.MAPT_VERSION_TAG || 'v0.9.5' }},CPUS=${{ vars.MAPT_CPUS || '4' }},MEMORY=${{ vars.MAPT_MEMORY || '32' }},EXCLUDED_REGIONS=\"${{ vars.MAPT_EXCLUDED_REGIONS || 'westindia,centralindia,southindia,australiacentral,australiacentral2,australiaeast,australiasoutheast,southafricanorth,southafricawest' }}\""
DEFAULT_AZURE_VM_SIZE: 'Standard_D8s_v4'
run: |
echo "FORK=${{ github.event.inputs.fork || env.DEFAULT_FORK }}" >> $GITHUB_ENV
echo "BRANCH=${{ github.event.inputs.branch || env.DEFAULT_BRANCH }}" >> $GITHUB_ENV
echo "NPM_TARGET=${{ github.event.inputs.npm_target || env.DEFAULT_NPM_TARGET }}" >> $GITHUB_ENV
echo "ENV_VARS=${{ github.event.inputs.env_vars || env.DEFAULT_ENV_VARS }}" >> $GITHUB_ENV
echo "PODMAN_URL=${{ github.event.inputs.podman_remote_url || env.DEFAULT_URL }}" >> $GITHUB_ENV
echo "PDE2E_IMAGE_VERSION=${{ github.event.inputs.pde2e_image_version || env.DEFAULT_PDE2E_IMAGE_VERSION }}" >> $GITHUB_ENV
echo "${{ github.event.inputs.podman_desktop_repo_args || env.DEFAULT_PODMAN_DESKTOP_REPO_ARGS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "PD_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "${{ github.event.inputs.ext_tests_options || env.DEFAULT_EXT_TESTS_OPTIONS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "${{ github.event.inputs.podman_options || env.DEFAULT_PODMAN_OPTIONS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "PODMAN_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "${{ github.event.inputs.ext_repo_options || env.DEFAULT_EXT_REPO_OPTIONS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "EXT_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "MAPT_VM_SIZE=${{ github.event.inputs.azure_vm_size || '' }}" >> $GITHUB_ENV
echo "${{ github.event.inputs.mapt_params || env.DEFAULT_MAPT_PARAMS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "MAPT_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "AZURE_VM_SIZE=${{ github.event.inputs.azure_vm_size || env.DEFAULT_AZURE_VM_SIZE }}" >> $GITHUB_ENV
- name: Create instance
run: |
# Create instance
if [ -z "${{ env.MAPT_VM_SIZE }}" ]; then
echo "MAPT_VM_SIZE is not set, using resources approach"
podman run -d --name windows-create --rm \
-v ${PWD}:/workspace:z \
-e ARM_TENANT_ID=${{ secrets.ARM_TENANT_ID }} \
-e ARM_SUBSCRIPTION_ID=${{ secrets.ARM_SUBSCRIPTION_ID }} \
-e ARM_CLIENT_ID=${{ secrets.ARM_CLIENT_ID }} \
-e ARM_CLIENT_SECRET='${{ secrets.ARM_CLIENT_SECRET }}' \
--user 0 \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION_TAG }} azure \
windows create \
--project-name 'windows-desktop' \
--backed-url 'file:///workspace' \
--conn-details-output '/workspace' \
--windows-version '${{ matrix.windows-version }}' \
--windows-featurepack '${{ matrix.windows-featurepack }}' \
--cpus ${{ env.MAPT_CPUS }} \
--memory ${{env.MAPT_MEMORY}} \
--nested-virt \
--tags project=podman-desktop \
--spot-excluded-regions ${{ env.MAPT_EXCLUDED_REGIONS }} \
--spot
# Check logs
podman logs -f windows-create
else
echo "MAPT_VM_SIZE is set to '${{ env.MAPT_VM_SIZE }}', using size approach"
# Create instance with VM size
podman run -d --name windows-create --rm \
-v ${PWD}:/workspace:z \
-e ARM_TENANT_ID=${{ secrets.ARM_TENANT_ID }} \
-e ARM_SUBSCRIPTION_ID=${{ secrets.ARM_SUBSCRIPTION_ID }} \
-e ARM_CLIENT_ID=${{ secrets.ARM_CLIENT_ID }} \
-e ARM_CLIENT_SECRET='${{ secrets.ARM_CLIENT_SECRET }}' \
--user 0 \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION_TAG }} azure \
windows create \
--project-name 'windows-desktop' \
--backed-url 'file:///workspace' \
--conn-details-output '/workspace' \
--windows-version '${{ matrix.windows-version }}' \
--windows-featurepack '${{ matrix.windows-featurepack }}' \
--vmsize '${{ env.MAPT_VM_SIZE }}' \
--tags project=podman-desktop \
--spot-excluded-regions ${{ env.MAPT_EXCLUDED_REGIONS }} \
--spot
# Check logs
podman logs -f windows-create
fi
podman run -d --name windows-create --rm \
-v ${PWD}:/workspace:z \
-e ARM_TENANT_ID=${{ secrets.ARM_TENANT_ID }} \
-e ARM_SUBSCRIPTION_ID=${{ secrets.ARM_SUBSCRIPTION_ID }} \
-e ARM_CLIENT_ID=${{ secrets.ARM_CLIENT_ID }} \
-e ARM_CLIENT_SECRET='${{ secrets.ARM_CLIENT_SECRET }}' \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION }} azure \
windows create \
--project-name 'windows-desktop' \
--backed-url 'file:///workspace' \
--conn-details-output '/workspace' \
--windows-version '${{ matrix.windows-version }}' \
--windows-featurepack '${{ matrix.windows-featurepack }}' \
--vmsize '${{ env.AZURE_VM_SIZE }}' \
--tags project=podman-desktop \
--spot-excluded-regions ${{ env.MAPT_EXCLUDED_REGIONS }} \
--spot
# Check logs
podman logs -f windows-create
- name: Check instance system info
run: |
@ -249,8 +221,8 @@ jobs:
pd-e2e/builder.ps1 \
-targetFolder pd-e2e \
-resultsFolder results \
-fork ${{ env.PD_FORK }} \
-branch ${{ env.PD_BRANCH }} \
-fork ${{ env.FORK }} \
-branch ${{ env.BRANCH }} \
-envVars ${{ env.ENV_VARS }}
# check logs
podman logs -f pde2e-builder-run
@ -272,8 +244,8 @@ jobs:
-resultsFolder results \
-podmanPath $(cat results/podman-location.log) \
-pdPath "$(cat results/pde2e-binary-path.log | tr '\n' " ")" \
-fork ${{ env.PD_FORK }} \
-branch ${{ env.PD_BRANCH }} \
-fork ${{ env.FORK }} \
-branch ${{ env.BRANCH }} \
-extRepo ${{ env.EXT_REPO }} \
-extFork ${{ env.EXT_FORK }} \
-extBranch ${{ env.EXT_BRANCH }} \
@ -298,8 +270,7 @@ jobs:
-e ARM_SUBSCRIPTION_ID=${{ secrets.ARM_SUBSCRIPTION_ID }} \
-e ARM_CLIENT_ID=${{ secrets.ARM_CLIENT_ID }} \
-e ARM_CLIENT_SECRET='${{ secrets.ARM_CLIENT_SECRET }}' \
--user 0 \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION_TAG }} azure \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION }} azure \
windows destroy \
--project-name 'windows-desktop' \
--backed-url 'file:///workspace'

View File

@ -26,7 +26,7 @@ jobs:
build:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
name: Install pnpm

View File

@ -15,7 +15,7 @@ jobs:
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
# Runs a single command using the runners shell
- name: Compute model size
run: ./tools/compute-model-sizes.sh

View File

@ -46,20 +46,20 @@ jobs:
name: Run E2E tests ${{ github.event_name == 'schedule' && '[nightly]' || '' }}
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
if: github.event_name == 'workflow_dispatch'
with:
repository: ${{ github.event.inputs.organization }}/${{ github.event.inputs.repositoryName }}
ref: ${{ github.event.inputs.branch }}
path: ${{ github.event.inputs.repositoryName }}
- uses: actions/checkout@v5
- uses: actions/checkout@v4
if: github.event_name == 'push' || github.event_name == 'schedule'
with:
path: podman-desktop-extension-ai-lab
# Checkout podman desktop
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: containers/podman-desktop
ref: main
@ -81,18 +81,15 @@ jobs:
- name: Update podman
run: |
echo "ubuntu version from kubic repository to install podman we need (v5)"
ubuntu_version='23.10'
echo "Add unstable kubic repo into list of available sources and get the repo key"
# ubuntu version from kubic repository to install podman we need (v5)
ubuntu_version='23.04'
sudo sh -c "echo 'deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list"
curl -L "https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/Release.key" | sudo apt-key add -
echo "Updating database of packages..."
sudo apt-get update -qq
echo "install necessary dependencies for criu package which is not part of ${ubuntu_version}"
# install necessary dependencies for criu package which is not part of 23.04
sudo apt-get install -qq libprotobuf32t64 python3-protobuf libnet1
echo "install criu manually from static location"
curl -sLO http://archive.ubuntu.com/ubuntu/pool/universe/c/criu/criu_3.16.1-2_amd64.deb && sudo dpkg -i criu_3.16.1-2_amd64.deb
echo "installing/update podman package..."
# install criu manually from static location
curl -sLO http://cz.archive.ubuntu.com/ubuntu/pool/universe/c/criu/criu_3.16.1-2_amd64.deb && sudo dpkg -i criu_3.16.1-2_amd64.deb
sudo apt-get update -qq
sudo apt-get -qq -y install podman || { echo "Start fallback steps for podman nightly installation from a static mirror" && \
sudo sh -c "echo 'deb http://ftp.lysator.liu.se/pub/opensuse/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list" && \
curl -L "http://ftp.lysator.liu.se/pub/opensuse/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/Release.key" | sudo apt-key add - && \

View File

@ -1,53 +0,0 @@
#
# Copyright (C) 2025 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
name: llama-stack-playground
on:
workflow_dispatch:
inputs:
version:
description: 'llama-stack tag to use (e.g. main, v0.2.8,...)'
type: string
required: true
jobs:
publish:
name: publish
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
with:
repository: meta-llama/llama-stack
ref: ${{ github.event.inputs.version }}
- name: Install qemu dependency
run: |
sudo apt-get update
sudo apt-get install -y qemu-user-static
- name: Build manifest and images
run: |
podman manifest create quay.io/podman-ai-lab/llama-stack-playground:${{ github.event.inputs.version }}
podman build --platform linux/amd64,linux/arm64 llama_stack/distribution/ui --manifest quay.io/podman-ai-lab/llama-stack-playground:${{ github.event.inputs.version }}
- name: Login to quay.io
run: podman login quay.io --username ${{ secrets.QUAY_USERNAME }} --password ${{ secrets.QUAY_PASSWORD }}
- name: Push manifest and images to quay.io
run: podman manifest push quay.io/podman-ai-lab/llama-stack-playground:${{ github.event.inputs.version }}

View File

@ -29,7 +29,7 @@ jobs:
matrix:
os: [windows-2022, ubuntu-22.04, macos-14]
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
name: Install pnpm
@ -74,7 +74,7 @@ jobs:
env:
SKIP_INSTALLATION: true
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
path: podman-desktop-extension-ai-lab
# Set up pnpm
@ -88,7 +88,7 @@ jobs:
with:
node-version: 22
# Checkout podman desktop
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: containers/podman-desktop
ref: main
@ -96,18 +96,15 @@ jobs:
- name: Update podman
run: |
echo "ubuntu version from kubic repository to install podman we need (v5)"
ubuntu_version='23.10'
echo "Add unstable kubic repo into list of available sources and get the repo key"
# ubuntu version from kubic repository to install podman we need (v5)
ubuntu_version='23.04'
sudo sh -c "echo 'deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list"
curl -L "https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/Release.key" | sudo apt-key add -
echo "Updating database of packages..."
sudo apt-get update -qq
echo "install necessary dependencies for criu package which is not part of ${ubuntu_version}"
# install necessary dependencies for criu package which is not part of 23.04
sudo apt-get install -qq libprotobuf32t64 python3-protobuf libnet1
echo "install criu manually from static location"
curl -sLO http://archive.ubuntu.com/ubuntu/pool/universe/c/criu/criu_3.16.1-2_amd64.deb && sudo dpkg -i criu_3.16.1-2_amd64.deb
echo "installing/update podman package..."
# install criu manually from static location
curl -sLO http://cz.archive.ubuntu.com/ubuntu/pool/universe/c/criu/criu_3.16.1-2_amd64.deb && sudo dpkg -i criu_3.16.1-2_amd64.deb
sudo apt-get update -qq
sudo apt-get -qq -y install podman || { echo "Start fallback steps for podman nightly installation from a static mirror" && \
sudo sh -c "echo 'deb http://ftp.lysator.liu.se/pub/opensuse/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list" && \
curl -L "http://ftp.lysator.liu.se/pub/opensuse/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/Release.key" | sudo apt-key add - && \

View File

@ -36,7 +36,7 @@ jobs:
env:
SKIP_INSTALLATION: true
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
path: podman-desktop-extension-ai-lab
# Set up pnpm
@ -50,7 +50,7 @@ jobs:
with:
node-version: 22
# Checkout podman desktop
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
repository: podman-desktop/podman-desktop
ref: main
@ -58,18 +58,15 @@ jobs:
- name: Update podman
run: |
echo "ubuntu version from kubic repository to install podman we need (v5)"
ubuntu_version='23.10'
echo "Add unstable kubic repo into list of available sources and get the repo key"
# ubuntu version from kubic repository to install podman we need (v5)
ubuntu_version='23.04'
sudo sh -c "echo 'deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list"
curl -L "https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/Release.key" | sudo apt-key add -
echo "Updating database of packages..."
sudo apt-get update -qq
echo "install necessary dependencies for criu package which is not part of ${ubuntu_version}"
# install necessary dependencies for criu package which is not part of 23.04
sudo apt-get install -qq libprotobuf32t64 python3-protobuf libnet1
echo "install criu manually from static location"
curl -sLO http://archive.ubuntu.com/ubuntu/pool/universe/c/criu/criu_3.16.1-2_amd64.deb && sudo dpkg -i criu_3.16.1-2_amd64.deb
echo "installing/update podman package..."
# install criu manually from static location
curl -sLO http://cz.archive.ubuntu.com/ubuntu/pool/universe/c/criu/criu_3.16.1-2_amd64.deb && sudo dpkg -i criu_3.16.1-2_amd64.deb
sudo apt-get update -qq
sudo apt-get -qq -y install podman || { echo "Start fallback steps for podman nightly installation from a static mirror" && \
sudo sh -c "echo 'deb http://ftp.lysator.liu.se/pub/opensuse/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list" && \
curl -L "http://ftp.lysator.liu.se/pub/opensuse/repositories/devel:/kubic:/libcontainers:/unstable/xUbuntu_${ubuntu_version}/Release.key" | sudo apt-key add - && \
@ -111,7 +108,7 @@ jobs:
- name: Update ramalama image references in AI Lab Extension
working-directory: ./podman-desktop-extension-ai-lab
run: sed -i -E "s/(@sha256:[0-9a-f]+)/:${{ github.event_name != 'workflow_dispatch' && 'latest' || github.event.inputs.tag }}/g" packages/backend/src/assets/inference-images.json
run: sed -i -E "s/(@sha256:[0-9a-f]+)/:${{ github.event.inputs.tag }}/g" packages/backend/src/assets/inference-images.json
- name: Build Image
working-directory: ./podman-desktop-extension-ai-lab

View File

@ -1,49 +0,0 @@
name: recipe-catalog-change-cleanup
on:
workflow_run:
workflows: ["recipe-catalog-change-windows-trigger"]
types:
- completed
jobs:
extract-context:
runs-on: ubuntu-24.04
outputs:
extract-context: ${{ steps.prepare-context.outputs.extract-context }}
trigger-template: ${{ steps.prepare-context.outputs.trigger-template }}
steps:
- name: Prepare context
id: prepare-context
env:
WORKFLOW_RUN: ${{ toJson(github.event.workflow_run) }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Workflow run ID: ${{ fromJson(env.WORKFLOW_RUN).id }}"
echo "Fork owner: ${{ fromJson(env.WORKFLOW_RUN).head_repository.owner.login }}"
echo "Fork repo: ${{ fromJson(env.WORKFLOW_RUN).head_repository.name }}"
echo "Fork branch: ${{ fromJson(env.WORKFLOW_RUN).head_branch }}"
echo "Commit SHA: ${{ fromJson(env.WORKFLOW_RUN).head_sha }}"
echo "Base repo: ${{ fromJson(env.WORKFLOW_RUN).repository.full_name }}"
echo "Conclusion: ${{ fromJson(env.WORKFLOW_RUN).conclusion }}"
# Fetch job conclusions using the GitHub CLI
echo "Fetching jobs for workflow run ID: ${{ fromJson(env.WORKFLOW_RUN).id }}"
gh api \
repos/${{ github.repository }}/actions/runs/${{ fromJson(env.WORKFLOW_RUN).id }}/jobs \
--jq '.jobs[] | "\(.name)=\(.conclusion)"' | while read -r line; do
echo "$line" >> $GITHUB_OUTPUT
done
cat $GITHUB_OUTPUT
cleanup:
runs-on: ubuntu-24.04
needs: extract-context
if: ${{ github.event.workflow_run.conclusion == 'skipped' || (github.event.workflow_run.conclusion == 'success' && needs.extract-context.outputs.trigger-template == 'skipped') }}
steps:
- name: Remove skipped or cancelled workflow run
env:
WORKFLOW_RUN: ${{ toJson(github.event.workflow_run) }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Cleaning up workflow run ID: ${{ fromJson(env.WORKFLOW_RUN).id }}"
gh run delete ${{ fromJson(env.WORKFLOW_RUN).id }} --repo ${{ fromJson(env.WORKFLOW_RUN).repository.full_name }}
echo "Workflow run ID ${{ fromJson(env.WORKFLOW_RUN).id }} has been cleaned up."

View File

@ -45,7 +45,7 @@ on:
pde2e-image-version:
required: false
type: string
mapt_params:
azure-vm-size:
required: false
type: string
@ -53,6 +53,10 @@ jobs:
windows:
name: recipe-catalog-windows-${{ matrix.windows-version }}-${{ matrix.windows-featurepack }}
runs-on: ubuntu-24.04
env:
MAPT_VERSION: v0.7.4
MAPT_IMAGE: quay.io/redhat-developer/mapt
MAPT_EXCLUDED_REGIONS: 'westindia,centralindia,southindia,australiacentral,australiacentral2,australiaeast,australiasoutheast,southafricanorth,southafricawest'
strategy:
fail-fast: false
matrix:
@ -64,14 +68,14 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
status_context="catalog-change-windows-matrix-${{ matrix.windows-version }}-${{ matrix.windows-featurepack }}"
status_context="ci/gh/e2e/windows-matrix-${{ matrix.windows-version }}-${{ matrix.windows-featurepack }}"
echo "status_context=${status_context}" >> "$GITHUB_ENV"
set -xuo
# Status msg
data="{\"state\":\"pending\""
data="${data},\"description\":\"Running recipe tests on catalog change on Windows ${{ matrix.windows-version }}-${{ matrix.windows-featurepack }}\""
data="${data},\"context\":\"$status_context\""
data="${data},\"target_url\":\"https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\"}"
data="${data},\"target_url\":\"https://github.com/${{ inputs.trigger-workflow-base-repo }}/actions/runs/${{ inputs.trigger-workflow-run-id }}\"}"
# Create status by API call
curl -L -v -X POST \
-H "Accept: application/vnd.github+json" \
@ -92,12 +96,12 @@ jobs:
DEFAULT_NPM_TARGET: 'test:e2e'
DEFAULT_ENV_VARS: 'TEST_PODMAN_MACHINE=true,ELECTRON_ENABLE_INSPECT=true'
DEFAULT_PODMAN_OPTIONS: 'INIT=1,START=1,ROOTFUL=1,NETWORKING=0'
DEFAULT_EXT_TESTS_OPTIONS: 'EXT_RUN_TESTS_FROM_EXTENSION=1,EXT_RUN_TESTS_AS_ADMIN=1,EXT_TEST_GPU_SUPPORT_ENABLED=0'
DEFAULT_EXT_TESTS_OPTIONS: 'EXT_RUN_TESTS_FROM_EXTENSION=1,EXT_RUN_TESTS_AS_ADMIN=1'
DEFAULT_EXT_REPO_OPTIONS: 'REPO=podman-desktop-extension-ai-lab,FORK=containers,BRANCH=main'
DEFAULT_PODMAN_VERSION: "${{ env.PD_PODMAN_VERSION || '5.3.2' }}"
DEFAULT_URL: "https://github.com/containers/podman/releases/download/v$DEFAULT_PODMAN_VERSION/podman-$DEFAULT_PODMAN_VERSION-setup.exe"
DEFAULT_PDE2E_IMAGE_VERSION: 'v0.0.3-windows'
DEFAULT_MAPT_PARAMS: "IMAGE=${{ vars.MAPT_IMAGE || 'quay.io/redhat-developer/mapt' }},VERSION_TAG=${{ vars.MAPT_VERSION_TAG || 'v0.9.5' }},CPUS=${{ vars.MAPT_CPUS || '4' }},MEMORY=${{ vars.MAPT_MEMORY || '32' }},EXCLUDED_REGIONS=\"${{ vars.MAPT_EXCLUDED_REGIONS || 'westindia,centralindia,southindia,australiacentral,australiacentral2,australiaeast,australiasoutheast,southafricanorth,southafricawest' }}\""
DEFAULT_AZURE_VM_SIZE: 'Standard_D8as_v5'
run: |
echo "FORK=${{ inputs.pd-fork || env.DEFAULT_FORK }}" >> $GITHUB_ENV
echo "BRANCH=${{ inputs.pd-branch || env.DEFAULT_BRANCH }}" >> $GITHUB_ENV
@ -109,13 +113,12 @@ jobs:
echo "DEFAULT_EXT_REPO_OPTIONS=REPO=${{ inputs.trigger-workflow-repo-name }},FORK=${{ inputs.trigger-workflow-fork }},BRANCH=${{ inputs.trigger-workflow-branch }}" >> $GITHUB_ENV
fi
echo "${{ github.event.inputs.ext_tests_options || env.DEFAULT_EXT_TESTS_OPTIONS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print kv[1]"="kv[2]}}' >> $GITHUB_ENV
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "${{ env.DEFAULT_PODMAN_OPTIONS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "PODMAN_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "PODMAN_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "${{ inputs.podman-options || env.DEFAULT_EXT_REPO_OPTIONS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "EXT_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "${{ github.event.inputs.mapt_params || env.DEFAULT_MAPT_PARAMS }}" | awk -F ',' \
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "MAPT_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
'{for (i=1; i<=NF; i++) {split($i, kv, "="); print "EXT_"kv[1]"="kv[2]}}' >> $GITHUB_ENV
echo "AZURE_VM_SIZE=${{ inputs.azure-vm-size || env.DEFAULT_AZURE_VM_SIZE }}" >> $GITHUB_ENV
- name: Create instance
run: |
@ -126,17 +129,14 @@ jobs:
-e ARM_SUBSCRIPTION_ID=${{ secrets.ARM_SUBSCRIPTION_ID }} \
-e ARM_CLIENT_ID=${{ secrets.ARM_CLIENT_ID }} \
-e ARM_CLIENT_SECRET='${{ secrets.ARM_CLIENT_SECRET }}' \
--user 0 \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION_TAG }} azure \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION }} azure \
windows create \
--project-name 'windows-desktop' \
--backed-url 'file:///workspace' \
--conn-details-output '/workspace' \
--windows-version '${{ matrix.windows-version }}' \
--windows-featurepack '${{ matrix.windows-featurepack }}' \
--cpus ${{ env.MAPT_CPUS }} \
--memory ${{ env.MAPT_MEMORY }} \
--nested-virt \
--vmsize '${{ env.AZURE_VM_SIZE }}' \
--tags project=podman-desktop \
--spot-excluded-regions ${{ env.MAPT_EXCLUDED_REGIONS }} \
--spot
@ -268,8 +268,8 @@ jobs:
data="{\"state\":\"failure\""
fi
data="${data},\"description\":\"Finished recipe tests on catalog change on Windows ${{ matrix.windows-version }}-${{ matrix.windows-featurepack }}\""
data="${data},\"context\":\"${{ env.status_context }}\""
data="${data},\"target_url\":\"https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\"}"
data="${data},\"context\":\"$status_context\""
data="${data},\"target_url\":\"https://github.com/${{ inputs.trigger-workflow-base-repo }}/actions/runs/${{ inputs.trigger-workflow-run-id }}\"}"
# Create status by API call
curl -L -v -X POST \
-H "Accept: application/vnd.github+json" \
@ -287,8 +287,7 @@ jobs:
-e ARM_SUBSCRIPTION_ID=${{ secrets.ARM_SUBSCRIPTION_ID }} \
-e ARM_CLIENT_ID=${{ secrets.ARM_CLIENT_ID }} \
-e ARM_CLIENT_SECRET='${{ secrets.ARM_CLIENT_SECRET }}' \
--user 0 \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION_TAG }} azure \
${{ env.MAPT_IMAGE }}:${{ env.MAPT_VERSION }} azure \
windows destroy \
--project-name 'windows-desktop' \
--backed-url 'file:///workspace'

View File

@ -49,6 +49,7 @@ jobs:
else
echo "No changes detected in ai.json"
echo "changes-detected=false" >> $GITHUB_OUTPUT
gh run cancel ${{ github.run_id}}
fi
trigger-template:
@ -64,5 +65,5 @@ jobs:
trigger-workflow-branch: ${{ needs.extract-context.outputs.fork-branch }}
trigger-workflow-commit-sha: ${{ needs.extract-context.outputs.commit-sha }}
trigger-workflow-base-repo: ${{ needs.extract-context.outputs.base-repo }}
ext_tests_options: 'EXT_RUN_TESTS_FROM_EXTENSION=1,EXT_RUN_TESTS_AS_ADMIN=0,EXT_TEST_GPU_SUPPORT_ENABLED=0'
ext_tests_options: 'EXT_RUN_TESTS_FROM_EXTENSION=1,EXT_RUN_TESTS_AS_ADMIN=0'
secrets: inherit

View File

@ -41,7 +41,7 @@ jobs:
releaseId: ${{ steps.create_release.outputs.id}}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.branch }}
- name: Generate tag utilities
@ -116,7 +116,7 @@ jobs:
needs: [tag]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v4
with:
ref: ${{ needs.tag.outputs.githubTag }}
@ -150,7 +150,7 @@ jobs:
release:
needs: [tag, build]
name: Release
runs-on: ubuntu-24.04
runs-on: ubuntu-20.04
steps:
- name: id
run: echo the release id is ${{ needs.tag.outputs.releaseId}}

View File

@ -1,54 +0,0 @@
#!/usr/bin/env bash
#
# Copyright (C) 2025 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
# Script to update ramalama image references in inference-images.json
set -euo pipefail
JSON_PATH="packages/backend/src/assets/inference-images.json"
TMP_JSON="${JSON_PATH}.tmp"
TAG=$1
# Images and their keys in the JSON
IMAGES=(
"whispercpp:ramalama/ramalama-whisper-server:default"
"llamacpp:ramalama/ramalama-llama-server:default"
"llamacpp:ramalama/cuda-llama-server:cuda"
"openvino:ramalama/openvino:default"
)
cp "$JSON_PATH" "$TMP_JSON"
for entry in "${IMAGES[@]}"; do
IFS=":" read -r key image jsonkey <<< "$entry"
digest=$(curl -s "https://quay.io/v2/$image/manifests/$TAG" -H 'Accept: application/vnd.oci.image.index.v1+json' --head | grep -i Docker-Content-Digest | awk -e '{ print $2 }' | tr -d '\r')
# Update the JSON file with the new digest
jq --arg img "quay.io/$image" --arg dig "$digest" --arg key "$key" --arg jsonkey "$jsonkey" \
'(.[$key][$jsonkey]) = ($img + "@" + $dig)' \
"$TMP_JSON" > "$TMP_JSON.new" && mv "$TMP_JSON.new" "$TMP_JSON"
done
# Compare and update if changed
if cmp -s "$JSON_PATH" "$TMP_JSON"; then
echo "No update needed: digests are up to date."
rm "$TMP_JSON"
exit 0
else
mv "$TMP_JSON" "$JSON_PATH"
echo "Updated inference-images.json with latest digests."
exit 10
fi

View File

@ -1,87 +0,0 @@
#
# Copyright (C) 2025 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
# This workflow automatically updates ramalama image digests in inference-images.json
# and creates a pull request with the changes.
name: update-ramalama-references
on:
schedule:
- cron: '0 3 * * *' # Runs daily at 03:00 UTC
workflow_dispatch:
permissions:
contents: write
jobs:
update-references:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Get latest ramalama version
id: get_ramalama_version
run: |
RAMALAMA_VERSION=$(curl -s https://quay.io/v2/ramalama/ramalama-llama-server/tags/list -s | jq .tags[] | grep -E '^"[0-9]+\.[0-9]+\.[0-9]+"$' | sort -V | tail -n 1 | tr -d '"')
echo "RAMALAMA_VERSION=${RAMALAMA_VERSION}" >> $GITHUB_OUTPUT
- name: Check if PR already exists
id: pr_exists
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const branch = `update-ramalama-references-${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}`;
const { data: pulls } = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
head: `${context.repo.owner}:${branch}`,
state: 'open',
});
if (pulls.length > 0) {
core.setOutput('exists', 'true');
} else {
core.setOutput('exists', 'false');
}
- name: Update ramalama image references in inference-images.json
id: update_digests
if: steps.pr_exists.outputs.exists == 'false'
run: |
bash .github/workflows/update-ramalama-references.sh "${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}"
continue-on-error: true
- name: Commit changes
if: steps.pr_exists.outputs.exists == 'false' && steps.update_digests.outcome == 'failure'
run: |
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
git checkout -b "update-ramalama-references-${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}"
git add packages/backend/src/assets/inference-images.json
git commit -m "chore: update ramalama image references ${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}"
git push origin "update-ramalama-references-${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}"
- name: Create Pull Request
if: steps.pr_exists.outputs.exists == 'false' && steps.update_digests.outcome == 'failure'
run: |
echo -e "update ramalama image references to ${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}" > /tmp/pr-title
pullRequestUrl=$(gh pr create --title "chore: update ramalama image references to ${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}" --body-file /tmp/pr-title --head "update-ramalama-references-${{ steps.get_ramalama_version.outputs.RAMALAMA_VERSION }}" --base "main")
echo "📢 Pull request created: ${pullRequestUrl}"
echo "➡️ Flag the PR as being ready for review"
gh pr ready "${pullRequestUrl}"
env:
GITHUB_TOKEN: ${{ secrets.PODMAN_DESKTOP_BOT_TOKEN }}

View File

@ -21,7 +21,6 @@ COPY packages/backend/package.json /extension/
COPY packages/backend/media/ /extension/media
COPY LICENSE /extension/
COPY packages/backend/icon.png /extension/
COPY packages/backend/brain.woff2 /extension/
COPY README.md /extension/
COPY api/openapi.yaml /extension/api/

View File

@ -3,7 +3,7 @@
"displayName": "ai-lab-monorepo",
"description": "ai-lab-monorepo",
"publisher": "redhat",
"version": "1.9.0-next",
"version": "1.7.0",
"license": "Apache-2.0",
"private": true,
"engines": {
@ -24,7 +24,6 @@
"test:unit": "pnpm run test:backend && pnpm run test:shared && pnpm run test:frontend",
"test:e2e": "cd tests/playwright && pnpm run test:e2e",
"test:e2e:smoke": "cd tests/playwright && pnpm run test:e2e:smoke",
"test:e2e:instructlab": "cd tests/playwright && pnpm run test:e2e:instructlab",
"typecheck:shared": "tsc --noEmit --project packages/shared",
"typecheck:frontend": "tsc --noEmit --project packages/frontend",
"typecheck:backend": "cd packages/backend && pnpm run typecheck",
@ -46,14 +45,14 @@
"devDependencies": {
"@commitlint/cli": "^19.8.1",
"@commitlint/config-conventional": "^19.8.1",
"@eslint/compat": "^1.3.2",
"@typescript-eslint/eslint-plugin": "^8.40.0",
"@typescript-eslint/parser": "^8.40.0",
"@vitest/coverage-v8": "^3.2.3",
"@eslint/compat": "^1.2.9",
"@typescript-eslint/eslint-plugin": "^8.32.1",
"@typescript-eslint/parser": "^8.32.1",
"@vitest/coverage-v8": "^3.0.5",
"autoprefixer": "^10.4.21",
"commitlint": "^19.8.1",
"concurrently": "^9.1.2",
"eslint": "^9.33.0",
"eslint": "^9.27.0",
"eslint-import-resolver-custom-alias": "^1.3.2",
"eslint-import-resolver-typescript": "^4.3.5",
"eslint-plugin-etc": "^2.0.3",
@ -61,19 +60,19 @@
"eslint-plugin-no-null": "^1.0.2",
"eslint-plugin-redundant-undefined": "^1.0.0",
"eslint-plugin-simple-import-sort": "^12.1.1",
"eslint-plugin-sonarjs": "^3.0.3",
"eslint-plugin-svelte": "^3.11.0",
"eslint-plugin-unicorn": "^60.0.0",
"eslint-plugin-sonarjs": "^3.0.2",
"eslint-plugin-svelte": "^3.8.1",
"eslint-plugin-unicorn": "^59.0.1",
"globals": "^16.1.0",
"husky": "^9.1.7",
"lint-staged": "^16.1.5",
"lint-staged": "^16.0.0",
"prettier": "^3.5.3",
"prettier-plugin-svelte": "^3.4.0",
"svelte-check": "^4.3.1",
"svelte-eslint-parser": "^1.3.1",
"typescript": "5.9.2",
"typescript-eslint": "^8.40.0",
"vite": "^7.1.3",
"svelte-check": "^4.2.1",
"svelte-eslint-parser": "^1.2.0",
"typescript": "5.8.3",
"typescript-eslint": "^8.32.1",
"vite": "^6.3.5",
"vitest": "^3.0.5"
},
"workspaces": {
@ -91,15 +90,7 @@
"pnpm": {
"overrides": {
"postman-collection>semver": "^7.5.2"
},
"ignoredBuiltDependencies": [
"@scarf/scarf",
"@tailwindcss/oxide",
"esbuild",
"postman-code-generators",
"svelte-preprocess",
"unrs-resolver"
]
}
},
"packageManager": "pnpm@10.12.4+sha512.5ea8b0deed94ed68691c9bad4c955492705c5eeb8a87ef86bc62c74a26b037b08ff9570f108b2e4dbd1dd1a9186fea925e527f141c648e85af45631074680184"
"packageManager": "pnpm@9.9.0+sha512.60c18acd138bff695d339be6ad13f7e936eea6745660d4cc4a776d5247c540d0edee1a563695c183a66eb917ef88f2b4feb1fc25f32a7adcadc7aaf3438e99c1"
}

View File

@ -2,7 +2,7 @@
"name": "ai-lab",
"displayName": "Podman AI Lab",
"description": "Podman AI Lab lets you work with LLMs locally, exploring AI fundamentals, experimenting with models and prompts, and serving models while maintaining data security and privacy.",
"version": "1.9.0-next",
"version": "1.7.0",
"icon": "icon.png",
"type": "module",
"publisher": "redhat",
@ -110,22 +110,22 @@
"typecheck": "pnpm run generate && tsc --noEmit"
},
"dependencies": {
"@ai-sdk/openai-compatible": "^0.2.16",
"@huggingface/gguf": "^0.2.1",
"@huggingface/hub": "^2.4.1",
"ai": "^4.3.19",
"@ai-sdk/openai-compatible": "^0.2.14",
"@huggingface/gguf": "^0.1.17",
"@huggingface/hub": "^2.1.0",
"ai": "^4.3.16",
"express": "^4.21.2",
"express-openapi-validator": "^5.5.8",
"isomorphic-git": "^1.33.0",
"express-openapi-validator": "^5.5.1",
"isomorphic-git": "^1.30.1",
"js-yaml": "^4.1.0",
"mustache": "^4.2.0",
"openai": "^5.15.0",
"openai": "^4.99.0",
"postman-code-generators": "^1.14.1",
"postman-collection": "^5.1.0",
"postman-collection": "^5.0.2",
"semver": "^7.7.2",
"swagger-ui-dist": "^5.27.1",
"swagger-ui-dist": "^5.21.0",
"swagger-ui-express": "^5.0.1",
"systeminformation": "^5.27.7",
"systeminformation": "^5.25.11",
"xml-js": "^1.6.11"
},
"devDependencies": {
@ -140,8 +140,8 @@
"@types/supertest": "^6.0.3",
"@types/swagger-ui-dist": "^3.30.5",
"@types/swagger-ui-express": "^4.1.8",
"openapi-typescript": "^7.9.1",
"supertest": "^7.1.4",
"openapi-typescript": "^7.8.0",
"supertest": "^7.1.1",
"vitest": "^3.0.5"
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,12 +1,12 @@
{
"whispercpp": {
"default": "quay.io/ramalama/ramalama-whisper-server@sha256:010aa34d8734e5e698fb4c5e852e43e5909baa928e3b6e991e1038a1973909ba"
"default": "quay.io/ramalama/ramalama-whisper-server@sha256:72bce4bed86e7f72e41c60960dd7b1fd9b5115328f520ddcae5dbdd689376995"
},
"llamacpp": {
"default": "quay.io/ramalama/ramalama-llama-server@sha256:4409a5c964382408f3bc08be1314754edaf2dfec1626f31974e34379bfeec41e",
"cuda": "quay.io/ramalama/cuda-llama-server@sha256:5e1a3a2508e4b802c8d8c3ecb97ad1778a1b4288fd114562b51fd411bad91841"
"default": "quay.io/ramalama/ramalama-llama-server@sha256:4e56101073e0bd6f2f2e15839b64315656d0dbfc1331a3385f2ae722e13f2279",
"cuda": "quay.io/ramalama/cuda-llama-server@sha256:56efc824e5b3ae6a6a11e9537ed9e2ac05f9f9fc6f2e81a55eb67b662c94fe95"
},
"openvino": {
"default": "quay.io/ramalama/openvino@sha256:705f3e0a44dcdc2c7b81c3931e42d5ee19d2502bdb5ebddf3f186932a2658e83"
"default": "quay.io/ramalama/openvino@sha256:670d91cc322933cc4263606459317cd4ca3fcfb16d59a46b11dcd498c2cd7cb5"
}
}

View File

@ -1,3 +1,3 @@
{
"default": "ghcr.io/containers/podman-ai-lab-stack:8d6a4a9a7c587c0a8e44703dd750355256e7a796"
"default": "quay.io/podman-ai-lab/distribution-podman-ai-lab@sha256:12a86f62e8623aaeb2a86120a77d274c0e52496d307d2a399969cc1f8f5260c5"
}

View File

@ -65,8 +65,6 @@ const modelsManager = {
getModelsInfo: vi.fn(),
isModelOnDisk: vi.fn(),
createDownloader: vi.fn(),
getLocalModelsFromDisk: vi.fn(),
sendModelsInfo: vi.fn(),
} as unknown as ModelsManager;
const catalogManager = {
@ -280,8 +278,6 @@ describe.each([undefined, true, false])('/api/pull endpoint, stream is %o', stre
});
test('/api/pull downloads model and returns success', async () => {
const getLocalModelsSpy = vi.spyOn(modelsManager, 'getLocalModelsFromDisk').mockResolvedValue();
const sendModelsInfoSpy = vi.spyOn(modelsManager, 'sendModelsInfo').mockResolvedValue();
expect(server.getListener()).toBeDefined();
vi.mocked(catalogManager.getModelByName).mockReturnValue({
id: 'modelId',
@ -316,8 +312,6 @@ describe.each([undefined, true, false])('/api/pull endpoint, stream is %o', stre
expect(lines[2]).toEqual('{"status":"success"}');
expect(lines[3]).toEqual('');
}
expect(getLocalModelsSpy).toHaveBeenCalledTimes(1);
expect(sendModelsInfoSpy).toHaveBeenCalledTimes(1);
});
test('/api/pull should return an error if an error occurs during download', async () => {

View File

@ -342,9 +342,7 @@ export class ApiServer implements Disposable {
downloader
.perform(modelName)
.then(async () => {
await this.modelsManager.getLocalModelsFromDisk();
await this.modelsManager.sendModelsInfo();
.then(() => {
this.sendResult(
res,
{
@ -507,7 +505,7 @@ export class ApiServer implements Disposable {
res.write(
JSON.stringify({
model: modelName,
response: chunk.choices[0].delta.content ?? '',
response: chunk.choices[0].delta.content,
done: chunk.choices[0].finish_reason === 'stop',
done_reason: chunk.choices[0].finish_reason === 'stop' ? 'stop' : undefined,
}) + '\n',
@ -518,7 +516,7 @@ export class ApiServer implements Disposable {
onNonStreamResponse: response => {
res.status(200).json({
model: modelName,
response: response.choices[0].message.content ?? '',
response: response.choices[0].message.content,
done: true,
done_reason: 'stop',
});
@ -573,7 +571,7 @@ export class ApiServer implements Disposable {
model: modelName,
message: {
role: 'assistant',
content: chunk.choices[0].delta.content ?? '',
content: chunk.choices[0].delta.content,
},
done: chunk.choices[0].finish_reason === 'stop',
done_reason: chunk.choices[0].finish_reason === 'stop' ? 'stop' : undefined,
@ -587,7 +585,7 @@ export class ApiServer implements Disposable {
model: modelName,
message: {
role: 'assistant',
content: response.choices[0].message.content ?? '',
content: response.choices[0].message.content,
},
done: true,
done_reason: 'stop',

View File

@ -31,8 +31,6 @@ import { VMType } from '@shared/models/IPodman';
import { POD_LABEL_MODEL_ID, POD_LABEL_RECIPE_ID } from '../../utils/RecipeConstants';
import type { InferenceServer } from '@shared/models/IInference';
import type { RpcExtension } from '@shared/messages/MessageProxy';
import type { LlamaStackManager } from '../llama-stack/llamaStackManager';
import type { ApplicationOptions } from '../../models/ApplicationOptions';
const taskRegistryMock = {
createTask: vi.fn(),
@ -77,10 +75,6 @@ const recipeManager = {
buildRecipe: vi.fn(),
} as unknown as RecipeManager;
const llamaStackManager = {
getLlamaStackContainer: vi.fn(),
} as unknown as LlamaStackManager;
vi.mock('@podman-desktop/api', () => ({
window: {
withProgress: vi.fn(),
@ -145,11 +139,6 @@ beforeEach(() => {
id: 'fake-task',
}));
vi.mocked(modelsManagerMock.uploadModelToPodmanMachine).mockResolvedValue('downloaded-model-path');
vi.mocked(llamaStackManager.getLlamaStackContainer).mockResolvedValue({
containerId: 'container1',
port: 10001,
playgroundPort: 10002,
});
});
function getInitializedApplicationManager(): ApplicationManager {
@ -162,7 +151,6 @@ function getInitializedApplicationManager(): ApplicationManager {
telemetryMock,
podManager,
recipeManager,
llamaStackManager,
);
manager.init();
@ -172,11 +160,11 @@ function getInitializedApplicationManager(): ApplicationManager {
describe('requestPullApplication', () => {
test('task should be set to error if pull application raise an error', async () => {
vi.mocked(window.withProgress).mockRejectedValue(new Error('pull application error'));
const trackingId = await getInitializedApplicationManager().requestPullApplication({
connection: connectionMock,
recipe: recipeMock,
model: remoteModelMock,
});
const trackingId = await getInitializedApplicationManager().requestPullApplication(
connectionMock,
recipeMock,
remoteModelMock,
);
// ensure the task is created
await vi.waitFor(() => {
@ -302,67 +290,40 @@ describe('startApplication', () => {
});
});
describe.each([true, false])('pullApplication, with model is %o', withModel => {
let applicationOptions: ApplicationOptions;
beforeEach(() => {
applicationOptions = withModel
? {
connection: connectionMock,
recipe: recipeMock,
model: remoteModelMock,
}
: {
connection: connectionMock,
recipe: recipeMock,
dependencies: {
llamaStack: true,
},
};
});
describe('pullApplication', () => {
test('labels should be propagated', async () => {
await getInitializedApplicationManager().pullApplication(applicationOptions, {
await getInitializedApplicationManager().pullApplication(connectionMock, recipeMock, remoteModelMock, {
'test-label': 'test-value',
});
// clone the recipe
expect(recipeManager.cloneRecipe).toHaveBeenCalledWith(recipeMock, {
'test-label': 'test-value',
'model-id': withModel ? remoteModelMock.id : '<none>',
'model-id': remoteModelMock.id,
});
// download model
expect(modelsManagerMock.requestDownloadModel).toHaveBeenCalledWith(remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
// upload model to podman machine
expect(modelsManagerMock.uploadModelToPodmanMachine).toHaveBeenCalledWith(connectionMock, remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
if (withModel) {
// download model
expect(modelsManagerMock.requestDownloadModel).toHaveBeenCalledWith(remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
// upload model to podman machine
expect(modelsManagerMock.uploadModelToPodmanMachine).toHaveBeenCalledWith(connectionMock, remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
}
// build the recipe
expect(recipeManager.buildRecipe).toHaveBeenCalledWith(
{
connection: connectionMock,
recipe: recipeMock,
model: withModel ? remoteModelMock : undefined,
dependencies: applicationOptions.dependencies,
},
{
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': withModel ? remoteModelMock.id : '<none>',
},
);
expect(recipeManager.buildRecipe).toHaveBeenCalledWith(connectionMock, recipeMock, remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
// create AI App task must be created
expect(taskRegistryMock.createTask).toHaveBeenCalledWith('Creating AI App', 'loading', {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': withModel ? remoteModelMock.id : '<none>',
'model-id': remoteModelMock.id,
});
// a pod must have been created
@ -371,7 +332,7 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
name: expect.any(String),
portmappings: [],
labels: {
[POD_LABEL_MODEL_ID]: withModel ? remoteModelMock.id : '<none>',
[POD_LABEL_MODEL_ID]: remoteModelMock.id,
[POD_LABEL_RECIPE_ID]: recipeMock.id,
},
});
@ -379,7 +340,7 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
expect(containerEngine.createContainer).toHaveBeenCalledWith('test-engine-id', {
Image: recipeImageInfoMock.id,
name: expect.any(String),
Env: withModel ? [] : ['MODEL_ENDPOINT=http://host.containers.internal:10001'],
Env: [],
HealthCheck: undefined,
HostConfig: undefined,
Detach: true,
@ -400,45 +361,34 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
},
} as InferenceServer,
});
vi.mocked(modelsManagerMock.requestDownloadModel).mockResolvedValue('/path/to/model');
await getInitializedApplicationManager().pullApplication(applicationOptions, {
await getInitializedApplicationManager().pullApplication(connectionMock, recipeMock, remoteModelMock, {
'test-label': 'test-value',
});
// clone the recipe
expect(recipeManager.cloneRecipe).toHaveBeenCalledWith(recipeMock, {
'test-label': 'test-value',
'model-id': withModel ? remoteModelMock.id : '<none>',
'model-id': remoteModelMock.id,
});
if (withModel) {
// download model
expect(modelsManagerMock.requestDownloadModel).toHaveBeenCalledWith(remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
// upload model to podman machine
expect(modelsManagerMock.uploadModelToPodmanMachine).not.toHaveBeenCalled();
}
// download model
expect(modelsManagerMock.requestDownloadModel).toHaveBeenCalledWith(remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
// upload model to podman machine
expect(modelsManagerMock.uploadModelToPodmanMachine).not.toHaveBeenCalled();
// build the recipe
expect(recipeManager.buildRecipe).toHaveBeenCalledWith(
{
connection: connectionMock,
recipe: recipeMock,
model: withModel ? remoteModelMock : undefined,
dependencies: applicationOptions.dependencies,
},
{
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': withModel ? remoteModelMock.id : '<none>',
},
);
expect(recipeManager.buildRecipe).toHaveBeenCalledWith(connectionMock, recipeMock, remoteModelMock, {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': remoteModelMock.id,
});
// create AI App task must be created
expect(taskRegistryMock.createTask).toHaveBeenCalledWith('Creating AI App', 'loading', {
'test-label': 'test-value',
'recipe-id': recipeMock.id,
'model-id': withModel ? remoteModelMock.id : '<none>',
'model-id': remoteModelMock.id,
});
// a pod must have been created
@ -447,7 +397,7 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
name: expect.any(String),
portmappings: [],
labels: {
[POD_LABEL_MODEL_ID]: withModel ? remoteModelMock.id : '<none>',
[POD_LABEL_MODEL_ID]: remoteModelMock.id,
[POD_LABEL_RECIPE_ID]: recipeMock.id,
},
});
@ -455,9 +405,7 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
expect(containerEngine.createContainer).toHaveBeenCalledWith('test-engine-id', {
Image: recipeImageInfoMock.id,
name: expect.any(String),
Env: withModel
? ['MODEL_ENDPOINT=http://host.containers.internal:56001']
: ['MODEL_ENDPOINT=http://host.containers.internal:10001'],
Env: ['MODEL_ENDPOINT=http://host.containers.internal:56001'],
HealthCheck: undefined,
HostConfig: undefined,
Detach: true,
@ -479,12 +427,12 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
},
} as unknown as PodInfo);
await getInitializedApplicationManager().pullApplication(applicationOptions);
await getInitializedApplicationManager().pullApplication(connectionMock, recipeMock, remoteModelMock);
// removing existing application should create a task to notify the user
expect(taskRegistryMock.createTask).toHaveBeenCalledWith('Removing AI App', 'loading', {
'recipe-id': recipeMock.id,
'model-id': withModel ? remoteModelMock.id : '<none>',
'model-id': remoteModelMock.id,
});
// the remove pod should have been called
expect(podManager.removePod).toHaveBeenCalledWith('test-engine-id', 'test-pod-id-existing');
@ -508,24 +456,22 @@ describe.each([true, false])('pullApplication, with model is %o', withModel => {
],
});
await getInitializedApplicationManager().pullApplication(applicationOptions);
await getInitializedApplicationManager().pullApplication(connectionMock, recipeMock, remoteModelMock);
// the remove pod should have been called
expect(containerEngine.createContainer).toHaveBeenCalledWith(
recipeImageInfoMock.engineId,
expect.objectContaining({
HostConfig: withModel
? {
Mounts: [
{
Mode: 'Z',
Source: 'downloaded-model-path',
Target: '/downloaded-model-path',
Type: 'bind',
},
],
}
: undefined,
HostConfig: {
Mounts: [
{
Mode: 'Z',
Source: 'downloaded-model-path',
Target: '/downloaded-model-path',
Type: 'bind',
},
],
},
}),
);
});

View File

@ -16,7 +16,7 @@
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import type { RecipeComponents, RecipeImage } from '@shared/models/IRecipe';
import type { Recipe, RecipeComponents, RecipeImage } from '@shared/models/IRecipe';
import * as path from 'node:path';
import { containerEngine, Disposable, window, ProgressLocation } from '@podman-desktop/api';
import type {
@ -28,6 +28,7 @@ import type {
PodContainerInfo,
ContainerProviderConnection,
} from '@podman-desktop/api';
import type { ModelInfo } from '@shared/models/IModelInfo';
import type { ModelsManager } from '../modelsManager';
import { getPortsFromLabel, getPortsInfo } from '../../utils/ports';
import { getDurationSecondsSince, timeout } from '../../utils/utils';
@ -54,8 +55,6 @@ import { RECIPE_START_ROUTE } from '../../registries/NavigationRegistry';
import type { RpcExtension } from '@shared/messages/MessageProxy';
import { TaskRunner } from '../TaskRunner';
import { getInferenceType } from '../../utils/inferenceUtils';
import type { LlamaStackManager } from '../llama-stack/llamaStackManager';
import { isApplicationOptionsWithModelInference, type ApplicationOptions } from '../../models/ApplicationOptions';
export class ApplicationManager extends Publisher<ApplicationState[]> implements Disposable {
#applications: ApplicationRegistry<ApplicationState>;
@ -72,7 +71,6 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
private telemetry: TelemetryLogger,
private podManager: PodManager,
private recipeManager: RecipeManager,
private llamaStackManager: LlamaStackManager,
) {
super(rpcExtension, MSG_APPLICATIONS_STATE_UPDATE, () => this.getApplicationsState());
this.#applications = new ApplicationRegistry<ApplicationState>();
@ -80,7 +78,11 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
this.#disposables = [];
}
async requestPullApplication(options: ApplicationOptions): Promise<string> {
async requestPullApplication(
connection: ContainerProviderConnection,
recipe: Recipe,
model: ModelInfo,
): Promise<string> {
// create a tracking id to put in the labels
const trackingId: string = getRandomString();
@ -92,23 +94,23 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
.runAsTask(
{
...labels,
'recipe-pulling': options.recipe.id, // this label should only be on the master task
'recipe-pulling': recipe.id, // this label should only be on the master task
},
{
loadingLabel: `Pulling ${options.recipe.name} recipe`,
errorMsg: err => `Something went wrong while pulling ${options.recipe.name}: ${String(err)}`,
loadingLabel: `Pulling ${recipe.name} recipe`,
errorMsg: err => `Something went wrong while pulling ${recipe.name}: ${String(err)}`,
},
() =>
window.withProgress(
{
location: ProgressLocation.TASK_WIDGET,
title: `Pulling ${options.recipe.name}.`,
title: `Pulling ${recipe.name}.`,
details: {
routeId: RECIPE_START_ROUTE,
routeArgs: [options.recipe.id, trackingId],
routeArgs: [recipe.id, trackingId],
},
},
() => this.pullApplication(options, labels),
() => this.pullApplication(connection, recipe, model, labels),
),
)
.catch(() => {});
@ -116,43 +118,37 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
return trackingId;
}
async pullApplication(options: ApplicationOptions, labels: Record<string, string> = {}): Promise<void> {
let modelId: string;
if (isApplicationOptionsWithModelInference(options)) {
modelId = options.model.id;
} else {
modelId = '<none>';
}
async pullApplication(
connection: ContainerProviderConnection,
recipe: Recipe,
model: ModelInfo,
labels: Record<string, string> = {},
): Promise<void> {
// clear any existing status / tasks related to the pair recipeId-modelId.
this.taskRegistry.deleteByLabels({
'recipe-id': options.recipe.id,
'model-id': modelId,
'recipe-id': recipe.id,
'model-id': model.id,
});
const startTime = performance.now();
try {
// init application (git clone, models download etc.)
const podInfo: PodInfo = await this.initApplication(options, labels);
const podInfo: PodInfo = await this.initApplication(connection, recipe, model, labels);
// start the pod
await this.runApplication(podInfo, {
...labels,
'recipe-id': options.recipe.id,
'model-id': modelId,
'recipe-id': recipe.id,
'model-id': model.id,
});
// measure init + start time
const durationSeconds = getDurationSecondsSince(startTime);
this.telemetry.logUsage('recipe.pull', {
'recipe.id': options.recipe.id,
'recipe.name': options.recipe.name,
durationSeconds,
});
this.telemetry.logUsage('recipe.pull', { 'recipe.id': recipe.id, 'recipe.name': recipe.name, durationSeconds });
} catch (err: unknown) {
const durationSeconds = getDurationSecondsSince(startTime);
this.telemetry.logError('recipe.pull', {
'recipe.id': options.recipe.id,
'recipe.name': options.recipe.name,
'recipe.id': recipe.id,
'recipe.name': recipe.name,
durationSeconds,
message: 'error pulling application',
error: err,
@ -177,54 +173,48 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
* @param labels
* @private
*/
private async initApplication(options: ApplicationOptions, labels: Record<string, string> = {}): Promise<PodInfo> {
let modelId: string;
if (isApplicationOptionsWithModelInference(options)) {
modelId = options.model.id;
} else {
modelId = '<none>';
}
private async initApplication(
connection: ContainerProviderConnection,
recipe: Recipe,
model: ModelInfo,
labels: Record<string, string> = {},
): Promise<PodInfo> {
// clone the recipe
await this.recipeManager.cloneRecipe(options.recipe, { ...labels, 'model-id': modelId });
await this.recipeManager.cloneRecipe(recipe, { ...labels, 'model-id': model.id });
let modelPath: string | undefined;
if (isApplicationOptionsWithModelInference(options)) {
// get model by downloading it or retrieving locally
modelPath = await this.modelsManager.requestDownloadModel(options.model, {
...labels,
'recipe-id': options.recipe.id,
'model-id': modelId,
});
}
// build all images, one per container (for a basic sample we should have 2 containers = sample app + model service)
const recipeComponents = await this.recipeManager.buildRecipe(options, {
// get model by downloading it or retrieving locally
let modelPath = await this.modelsManager.requestDownloadModel(model, {
...labels,
'recipe-id': options.recipe.id,
'model-id': modelId,
'recipe-id': recipe.id,
'model-id': model.id,
});
if (isApplicationOptionsWithModelInference(options)) {
// upload model to podman machine if user system is supported
if (!recipeComponents.inferenceServer) {
modelPath = await this.modelsManager.uploadModelToPodmanMachine(options.connection, options.model, {
...labels,
'recipe-id': options.recipe.id,
'model-id': modelId,
});
}
// build all images, one per container (for a basic sample we should have 2 containers = sample app + model service)
const recipeComponents = await this.recipeManager.buildRecipe(connection, recipe, model, {
...labels,
'recipe-id': recipe.id,
'model-id': model.id,
});
// upload model to podman machine if user system is supported
if (!recipeComponents.inferenceServer) {
modelPath = await this.modelsManager.uploadModelToPodmanMachine(connection, model, {
...labels,
'recipe-id': recipe.id,
'model-id': model.id,
});
}
// first delete any existing pod with matching labels
if (await this.hasApplicationPod(options.recipe.id, modelId)) {
await this.removeApplication(options.recipe.id, modelId);
if (await this.hasApplicationPod(recipe.id, model.id)) {
await this.removeApplication(recipe.id, model.id);
}
// create a pod containing all the containers to run the application
return this.createApplicationPod(options, recipeComponents, modelPath, {
return this.createApplicationPod(connection, recipe, model, recipeComponents, modelPath, {
...labels,
'recipe-id': options.recipe.id,
'model-id': modelId,
'recipe-id': recipe.id,
'model-id': model.id,
});
}
@ -267,9 +257,11 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
}
protected async createApplicationPod(
options: ApplicationOptions,
connection: ContainerProviderConnection,
recipe: Recipe,
model: ModelInfo,
components: RecipeComponents,
modelPath: string | undefined,
modelPath: string,
labels?: { [key: string]: string },
): Promise<PodInfo> {
return this.#taskRunner.runAsTask<PodInfo>(
@ -279,25 +271,25 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
errorMsg: err => `Something went wrong while creating pod: ${String(err)}`,
},
async ({ updateLabels }): Promise<PodInfo> => {
const podInfo = await this.createPod(options, components.images);
const podInfo = await this.createPod(connection, recipe, model, components.images);
updateLabels(labels => ({
...labels,
'pod-id': podInfo.Id,
}));
await this.createContainerAndAttachToPod(options, podInfo, components, modelPath, labels);
await this.createContainerAndAttachToPod(connection, podInfo, components, model, modelPath);
return podInfo;
},
);
}
protected async createContainerAndAttachToPod(
options: ApplicationOptions,
connection: ContainerProviderConnection,
podInfo: PodInfo,
components: RecipeComponents,
modelPath: string | undefined,
labels?: { [key: string]: string },
modelInfo: ModelInfo,
modelPath: string,
): Promise<void> {
const vmType = options.connection.vmType ?? VMType.UNKNOWN;
const vmType = connection.vmType ?? VMType.UNKNOWN;
// temporary check to set Z flag or not - to be removed when switching to podman 5
await Promise.all(
components.images.map(async image => {
@ -305,39 +297,28 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
let envs: string[] = [];
let healthcheck: HealthConfig | undefined = undefined;
// if it's a model service we mount the model as a volume
if (modelPath && isApplicationOptionsWithModelInference(options)) {
if (image.modelService) {
const modelName = path.basename(modelPath);
hostConfig = {
Mounts: [
{
Target: `/${modelName}`,
Source: modelPath,
Type: 'bind',
Mode: vmType === VMType.QEMU ? undefined : 'Z',
},
],
};
envs = [`MODEL_PATH=/${modelName}`];
envs.push(...getModelPropertiesForEnvironment(options.model));
} else if (components.inferenceServer) {
const endPoint = `http://host.containers.internal:${components.inferenceServer.connection.port}`;
if (image.modelService) {
const modelName = path.basename(modelPath);
hostConfig = {
Mounts: [
{
Target: `/${modelName}`,
Source: modelPath,
Type: 'bind',
Mode: vmType === VMType.QEMU ? undefined : 'Z',
},
],
};
envs = [`MODEL_PATH=/${modelName}`];
envs.push(...getModelPropertiesForEnvironment(modelInfo));
} else if (components.inferenceServer) {
const endPoint = `http://host.containers.internal:${components.inferenceServer.connection.port}`;
envs = [`MODEL_ENDPOINT=${endPoint}`];
} else {
const modelService = components.images.find(image => image.modelService);
if (modelService && modelService.ports.length > 0) {
const endPoint = `http://localhost:${modelService.ports[0]}`;
envs = [`MODEL_ENDPOINT=${endPoint}`];
} else {
const modelService = components.images.find(image => image.modelService);
if (modelService && modelService.ports.length > 0) {
const endPoint = `http://localhost:${modelService.ports[0]}`;
envs = [`MODEL_ENDPOINT=${endPoint}`];
}
}
} else if (options.dependencies?.llamaStack) {
let stack = await this.llamaStackManager.getLlamaStackContainer();
if (!stack) {
await this.llamaStackManager.createLlamaStackContainer(options.connection, labels ?? {});
stack = await this.llamaStackManager.getLlamaStackContainer();
}
if (stack) {
envs = [`MODEL_ENDPOINT=http://host.containers.internal:${stack.port}`];
}
}
if (image.ports.length > 0) {
@ -365,7 +346,12 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
);
}
protected async createPod(options: ApplicationOptions, images: RecipeImage[]): Promise<PodInfo> {
protected async createPod(
connection: ContainerProviderConnection,
recipe: Recipe,
model: ModelInfo,
images: RecipeImage[],
): Promise<PodInfo> {
// find the exposed port of the sample app so we can open its ports on the new pod
const sampleAppImageInfo = images.find(image => !image.modelService);
if (!sampleAppImageInfo) {
@ -392,14 +378,9 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
// create new pod
const labels: Record<string, string> = {
[POD_LABEL_RECIPE_ID]: options.recipe.id,
[POD_LABEL_RECIPE_ID]: recipe.id,
[POD_LABEL_MODEL_ID]: model.id,
};
if (isApplicationOptionsWithModelInference(options)) {
labels[POD_LABEL_MODEL_ID] = options.model.id;
} else {
labels[POD_LABEL_MODEL_ID] = '<none>';
}
// collecting all modelService ports
const modelPorts = images
.filter(img => img.modelService)
@ -417,7 +398,7 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
labels[POD_LABEL_APP_PORTS] = appPorts.join(',');
}
const { engineId, Id } = await this.podManager.createPod({
provider: options.connection,
provider: connection,
name: getRandomName(`pod-${sampleAppImageInfo.appName}`),
portmappings: portmappings,
labels,
@ -654,28 +635,15 @@ export class ApplicationManager extends Publisher<ApplicationState[]> implements
const appPod = await this.getApplicationPod(recipeId, modelId);
await this.removeApplication(recipeId, modelId);
const recipe = this.catalogManager.getRecipeById(recipeId);
let opts: ApplicationOptions;
if (appPod.Labels[POD_LABEL_MODEL_ID] === '<none>') {
opts = {
connection,
recipe,
};
} else {
const model = this.catalogManager.getModelById(appPod.Labels[POD_LABEL_MODEL_ID]);
opts = {
connection,
recipe,
model,
};
}
const model = this.catalogManager.getModelById(appPod.Labels[POD_LABEL_MODEL_ID]);
// init the recipe
const podInfo = await this.initApplication(opts);
const podInfo = await this.initApplication(connection, recipe, model);
// start the pod
return this.runApplication(podInfo, {
'recipe-id': recipeId,
'model-id': modelId,
'recipe-id': recipe.id,
'model-id': model.id,
});
}

View File

@ -96,7 +96,7 @@ beforeEach(async () => {
describe('invalid user catalog', () => {
beforeEach(async () => {
vi.mocked(promises.readFile).mockResolvedValue('invalid json');
await catalogManager.init();
catalogManager.init();
});
test('expect correct model is returned with valid id', () => {
@ -116,7 +116,7 @@ describe('invalid user catalog', () => {
test('expect correct model is returned from default catalog with valid id when no user catalog exists', async () => {
vi.mocked(existsSync).mockReturnValue(false);
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getRecipes().length > 0);
const model = catalogManager.getModelById('llama-2-7b-chat.Q5_K_S');
@ -132,7 +132,7 @@ test('expect correct model is returned with valid id when the user catalog is va
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().some(model => model.id === 'model1'));
const model = catalogManager.getModelById('model1');
@ -146,7 +146,7 @@ test('expect to call writeFile in addLocalModelsToCatalog with catalog updated',
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getRecipes().length > 0);
const mtimeDate = new Date('2024-04-03T09:51:15.766Z');
@ -174,7 +174,7 @@ test('expect to call writeFile in removeLocalModelFromCatalog with catalog updat
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
vi.mocked(path.resolve).mockReturnValue('path');
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getRecipes().length > 0);
vi.mocked(promises.writeFile).mockResolvedValue();
@ -196,7 +196,7 @@ test('catalog should be the combination of user catalog and default catalog', as
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
vi.mocked(path.resolve).mockReturnValue('path');
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().length > userContent.models.length);
const mtimeDate = new Date('2024-04-03T09:51:15.766Z');
@ -238,7 +238,7 @@ test('catalog should use user items in favour of default', async () => {
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(overwriteFullCatalog));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().length > 0);
const mtimeDate = new Date('2024-04-03T09:51:15.766Z');
@ -330,7 +330,7 @@ test('filter recipes by language', async () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().some(model => model.id === 'model1'));
const result1 = catalogManager.filterRecipes({
languages: ['lang1'],
@ -375,7 +375,7 @@ test('filter recipes by tool', async () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().some(model => model.id === 'model1'));
const result1 = catalogManager.filterRecipes({
@ -445,7 +445,7 @@ test('filter recipes by framework', async () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().some(model => model.id === 'model1'));
const result1 = catalogManager.filterRecipes({
@ -519,7 +519,7 @@ test('filter recipes by language and framework', async () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().some(model => model.id === 'model1'));
const result1 = catalogManager.filterRecipes({
@ -546,7 +546,7 @@ test('filter recipes by language, tool and framework', async () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
await catalogManager.init();
catalogManager.init();
await vi.waitUntil(() => catalogManager.getModels().some(model => model.id === 'model1'));
const result1 = catalogManager.filterRecipes({
@ -567,15 +567,3 @@ test('filter recipes by language, tool and framework', async () => {
tools: [{ name: 'tool1', count: 1 }],
});
});
test('models are loaded as soon as init is finished when no user catalog', async () => {
await catalogManager.init();
expect(catalogManager.getModels()).toHaveLength(3);
});
test('models are loaded as soon as init is finished when user catalog exists', async () => {
vi.mocked(promises.readFile).mockResolvedValue(JSON.stringify(userContent));
vi.mocked(existsSync).mockReturnValue(true);
await catalogManager.init();
expect(catalogManager.getModels()).toHaveLength(5);
});

View File

@ -60,21 +60,16 @@ export class CatalogManager extends Publisher<ApplicationCatalog> implements Dis
/**
* The init method will start a watcher on the user catalog.json
*/
async init(): Promise<void> {
return new Promise<void>(resolve => {
// Creating a json watcher
this.#jsonWatcher = new JsonWatcher(this.getUserCatalogPath(), {
version: CatalogFormat.CURRENT,
recipes: [],
models: [],
categories: [],
});
this.#jsonWatcher.onContentUpdated(content => {
this.onUserCatalogUpdate(content);
resolve();
});
this.#jsonWatcher.init();
init(): void {
// Creating a json watcher
this.#jsonWatcher = new JsonWatcher(this.getUserCatalogPath(), {
version: CatalogFormat.CURRENT,
recipes: [],
models: [],
categories: [],
});
this.#jsonWatcher.onContentUpdated(content => this.onUserCatalogUpdate(content));
this.#jsonWatcher.init();
}
private loadDefaultCatalog(): void {

View File

@ -98,15 +98,6 @@ export class InferenceManager extends Publisher<InferenceServer[]> implements Di
return Array.from(this.#servers.values());
}
/**
* Get the Unique registered Inference provider types
*/
public getRegisteredProviders(): InferenceType[] {
const types: InferenceType[] = this.inferenceProviderRegistry.getAll().map(provider => provider.type);
return [...new Set(types)];
}
/**
* return an inference server
* @param containerId the containerId of the inference server

View File

@ -225,7 +225,7 @@ test('getModelsInfo should get models in local directory', async () => {
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.loadLocalModels();
expect(manager.getModelsInfo()).toEqual([
{
@ -277,7 +277,7 @@ test('getModelsInfo should return an empty array if the models folder does not e
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.getLocalModelsFromDisk();
expect(manager.getModelsInfo()).toEqual([]);
if (process.platform === 'win32') {
@ -318,7 +318,7 @@ test('getLocalModelsFromDisk should return undefined Date and size when stat fai
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.loadLocalModels();
expect(manager.getModelsInfo()).toEqual([
{
@ -377,7 +377,7 @@ test('getLocalModelsFromDisk should skip folders containing tmp files', async ()
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.loadLocalModels();
expect(manager.getModelsInfo()).toEqual([
{
@ -417,7 +417,7 @@ test('loadLocalModels should post a message with the message on disk and on cata
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.loadLocalModels();
expect(rpcExtensionMock.fire).toHaveBeenNthCalledWith(2, MSG_NEW_MODELS_STATE, [
{
@ -464,7 +464,7 @@ test('deleteModel deletes the model folder', async () => {
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.loadLocalModels();
await manager.deleteModel('model-id-1');
// check that the model's folder is removed from disk
@ -525,7 +525,7 @@ describe('deleting models', () => {
modelHandlerRegistry,
);
modelHandlerRegistry.register(new URLModelHandler(manager, modelsDir));
await manager.init();
manager.init();
await manager.loadLocalModels();
await manager.deleteModel('model-id-1');
// check that the model's folder is removed from disk
@ -899,7 +899,7 @@ describe('getModelMetadata', () => {
modelHandlerRegistry,
);
await manager.init();
manager.init();
const fakeMetadata: Record<string, string> = {
hello: 'world',
@ -939,7 +939,7 @@ describe('getModelMetadata', () => {
modelHandlerRegistry,
);
await manager.init();
manager.init();
const fakeMetadata: Record<string, string> = {
hello: 'world',
@ -995,7 +995,7 @@ describe('uploadModelToPodmanMachine', () => {
modelHandlerRegistry,
);
await manager.init();
manager.init();
const result = await manager.uploadModelToPodmanMachine(connectionMock, modelMock);
expect(result).toBe('uploader-result');
expect(performMock).toHaveBeenCalledWith(modelMock.id);
@ -1028,7 +1028,7 @@ describe('uploadModelToPodmanMachine', () => {
modelHandlerRegistry,
);
await manager.init();
manager.init();
await manager.uploadModelToPodmanMachine(connectionMock, modelMock);
expect(Uploader).not.toHaveBeenCalled();
});

View File

@ -63,7 +63,7 @@ export class ModelsManager implements Disposable {
this.modelHandlerRegistry.getAll().forEach(handler => handler.onUpdate(this.loadLocalModels));
}
async init(): Promise<void> {
init(): void {
const disposable = this.catalogManager.onUpdate(() => {
this.loadLocalModels().catch((err: unknown) => {
console.error(`Something went wrong when loading local models`, err);
@ -71,11 +71,9 @@ export class ModelsManager implements Disposable {
});
this.#disposables.push(disposable);
try {
await this.loadLocalModels();
} catch (err: unknown) {
this.loadLocalModels().catch((err: unknown) => {
console.error('Something went wrong while trying to load local models', err);
}
});
}
dispose(): void {

View File

@ -30,7 +30,6 @@ import { goarch } from '../../utils/arch';
import { VMType } from '@shared/models/IPodman';
import type { InferenceManager } from '../inference/inferenceManager';
import type { ModelInfo } from '@shared/models/IModelInfo';
import type { ApplicationOptions } from '../../models/ApplicationOptions';
const taskRegistryMock = {
createTask: vi.fn(),
@ -185,34 +184,21 @@ describe('cloneRecipe', () => {
});
});
describe.each([true, false])('buildRecipe, with model is %o', withModel => {
let applicationOptions: ApplicationOptions;
beforeEach(() => {
applicationOptions = withModel
? {
connection: connectionMock,
recipe: recipeMock,
model: modelInfoMock,
}
: {
connection: connectionMock,
recipe: recipeMock,
};
});
describe('buildRecipe', () => {
test('error in build propagate it', async () => {
vi.mocked(builderManagerMock.build).mockRejectedValue(new Error('build error'));
const manager = await getInitializedRecipeManager();
await expect(() => {
return manager.buildRecipe(applicationOptions);
return manager.buildRecipe(connectionMock, recipeMock, modelInfoMock);
}).rejects.toThrowError('build error');
});
test('labels should be propagated', async () => {
const manager = await getInitializedRecipeManager();
await manager.buildRecipe(applicationOptions, {
await manager.buildRecipe(connectionMock, recipeMock, modelInfoMock, {
'test-label': 'test-value',
});

View File

@ -26,12 +26,12 @@ import { parseYamlFile } from '../../models/AIConfig';
import { existsSync, statSync } from 'node:fs';
import { goarch } from '../../utils/arch';
import type { BuilderManager } from './BuilderManager';
import type { Disposable } from '@podman-desktop/api';
import type { ContainerProviderConnection, Disposable } from '@podman-desktop/api';
import { CONFIG_FILENAME } from '../../utils/RecipeConstants';
import type { InferenceManager } from '../inference/inferenceManager';
import type { ModelInfo } from '@shared/models/IModelInfo';
import { withDefaultConfiguration } from '../../utils/inferenceUtils';
import type { InferenceServer } from '@shared/models/IInference';
import { type ApplicationOptions, isApplicationOptionsWithModelInference } from '../../models/ApplicationOptions';
export interface AIContainers {
aiConfigFile: AIConfigFile;
@ -96,70 +96,73 @@ export class RecipeManager implements Disposable {
});
}
public async buildRecipe(options: ApplicationOptions, labels?: { [key: string]: string }): Promise<RecipeComponents> {
const localFolder = path.join(this.appUserDirectory, options.recipe.id);
public async buildRecipe(
connection: ContainerProviderConnection,
recipe: Recipe,
model: ModelInfo,
labels?: { [key: string]: string },
): Promise<RecipeComponents> {
const localFolder = path.join(this.appUserDirectory, recipe.id);
let inferenceServer: InferenceServer | undefined;
if (isApplicationOptionsWithModelInference(options)) {
// if the recipe has a defined backend, we gives priority to using an inference server
if (options.recipe.backend && options.recipe.backend === options.model.backend) {
let task: Task | undefined;
try {
inferenceServer = this.inferenceManager.findServerByModel(options.model);
task = this.taskRegistry.createTask('Starting Inference server', 'loading', labels);
if (!inferenceServer) {
const inferenceContainerId = await this.inferenceManager.createInferenceServer(
await withDefaultConfiguration({
modelsInfo: [options.model],
}),
);
inferenceServer = this.inferenceManager.get(inferenceContainerId);
this.taskRegistry.updateTask({
...task,
labels: {
...task.labels,
containerId: inferenceContainerId,
},
});
} else if (inferenceServer.status === 'stopped') {
await this.inferenceManager.startInferenceServer(inferenceServer.container.containerId);
}
task.state = 'success';
} catch (e) {
// we only skip the task update if the error is that we do not support this backend.
// If so, we build the image for the model service
if (task && String(e) !== 'no enabled provider could be found.') {
task.state = 'error';
task.error = `Something went wrong while starting the inference server: ${String(e)}`;
throw e;
}
} finally {
if (task) {
this.taskRegistry.updateTask(task);
}
// if the recipe has a defined backend, we gives priority to using an inference server
if (recipe.backend && recipe.backend === model.backend) {
let task: Task | undefined;
try {
inferenceServer = this.inferenceManager.findServerByModel(model);
task = this.taskRegistry.createTask('Starting Inference server', 'loading', labels);
if (!inferenceServer) {
const inferenceContainerId = await this.inferenceManager.createInferenceServer(
await withDefaultConfiguration({
modelsInfo: [model],
}),
);
inferenceServer = this.inferenceManager.get(inferenceContainerId);
this.taskRegistry.updateTask({
...task,
labels: {
...task.labels,
containerId: inferenceContainerId,
},
});
} else if (inferenceServer.status === 'stopped') {
await this.inferenceManager.startInferenceServer(inferenceServer.container.containerId);
}
task.state = 'success';
} catch (e) {
// we only skip the task update if the error is that we do not support this backend.
// If so, we build the image for the model service
if (task && String(e) !== 'no enabled provider could be found.') {
task.state = 'error';
task.error = `Something went wrong while starting the inference server: ${String(e)}`;
throw e;
}
} finally {
if (task) {
this.taskRegistry.updateTask(task);
}
}
}
// load and parse the recipe configuration file and filter containers based on architecture
const configAndFilteredContainers = this.getConfigAndFilterContainers(
options.recipe.basedir,
recipe.basedir,
localFolder,
!!inferenceServer,
{
...labels,
'recipe-id': options.recipe.id,
'recipe-id': recipe.id,
},
);
const images = await this.builderManager.build(
options.connection,
options.recipe,
connection,
recipe,
configAndFilteredContainers.containers,
configAndFilteredContainers.aiConfigFile.path,
{
...labels,
'recipe-id': options.recipe.id,
'recipe-id': recipe.id,
},
);

View File

@ -1,39 +0,0 @@
/**********************************************************************
* Copyright (C) 2025 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import type { ContainerProviderConnection } from '@podman-desktop/api';
import type { ModelInfo } from '@shared/models/IModelInfo';
import type { Recipe, RecipeDependencies } from '@shared/models/IRecipe';
export type ApplicationOptions = ApplicationOptionsDefault | ApplicationOptionsWithModelInference;
export interface ApplicationOptionsDefault {
connection: ContainerProviderConnection;
recipe: Recipe;
dependencies?: RecipeDependencies;
}
export type ApplicationOptionsWithModelInference = ApplicationOptionsDefault & {
model: ModelInfo;
};
export function isApplicationOptionsWithModelInference(
options: ApplicationOptions,
): options is ApplicationOptionsWithModelInference {
return 'model' in options;
}

View File

@ -170,42 +170,36 @@ beforeEach(async () => {
} as unknown as EventEmitter<unknown>);
});
describe.each([true, false])('with model is %o', withModel => {
test('expect requestPullApplication to provide a tracking id', async () => {
const connectionMock = {
name: 'Podman machine',
} as unknown as ContainerProviderConnection;
vi.mocked(podmanConnectionMock.findRunningContainerProviderConnection).mockReturnValue(connectionMock);
vi.spyOn(catalogManager, 'getRecipes').mockReturnValue([
{
id: 'recipe 1',
} as unknown as Recipe,
]);
vi.spyOn(catalogManager, 'getModelById').mockReturnValue({
id: 'model 1',
} as unknown as ModelInfo);
test('expect requestPullApplication to provide a tracking id', async () => {
const connectionMock = {
name: 'Podman machine',
} as unknown as ContainerProviderConnection;
vi.mocked(podmanConnectionMock.findRunningContainerProviderConnection).mockReturnValue(connectionMock);
vi.spyOn(catalogManager, 'getRecipes').mockReturnValue([
{
id: 'recipe 1',
} as unknown as Recipe,
]);
vi.spyOn(catalogManager, 'getModelById').mockReturnValue({
id: 'model 1',
} as unknown as ModelInfo);
vi.mocked(applicationManager.requestPullApplication).mockResolvedValue('dummy-tracker');
vi.mocked(applicationManager.requestPullApplication).mockResolvedValue('dummy-tracker');
const recipeId = 'recipe 1';
let modelId: string | undefined;
if (withModel) {
modelId = 'model1';
}
const trackingId = await studioApiImpl.requestPullApplication(withModel ? { recipeId, modelId } : { recipeId });
expect(applicationManager.requestPullApplication).toHaveBeenCalledWith({
connection: connectionMock,
recipe: expect.objectContaining({
id: 'recipe 1',
}),
model: withModel
? expect.objectContaining({
id: 'model 1',
})
: undefined,
});
expect(trackingId).toBe('dummy-tracker');
const trackingId = await studioApiImpl.requestPullApplication({
modelId: 'model1',
recipeId: 'recipe 1',
});
expect(applicationManager.requestPullApplication).toHaveBeenCalledWith(
connectionMock,
expect.objectContaining({
id: 'recipe 1',
}),
expect.objectContaining({
id: 'model 1',
}),
);
expect(trackingId).toBe('dummy-tracker');
});
test('requestRemoveApplication should ask confirmation', async () => {

View File

@ -30,7 +30,7 @@ import type { TaskRegistry } from './registries/TaskRegistry';
import type { LocalRepository } from '@shared/models/ILocalRepository';
import type { LocalRepositoryRegistry } from './registries/LocalRepositoryRegistry';
import path from 'node:path';
import type { InferenceServer, InferenceType } from '@shared/models/IInference';
import type { InferenceServer } from '@shared/models/IInference';
import type { CreationInferenceServerOptions } from '@shared/models/InferenceServerConfig';
import type { InferenceManager } from './managers/inference/inferenceManager';
import type { Conversation } from '@shared/models/IPlaygroundMessage';
@ -53,11 +53,10 @@ import type { ExtensionConfiguration } from '@shared/models/IExtensionConfigurat
import type { ConfigurationRegistry } from './registries/ConfigurationRegistry';
import type { RecipeManager } from './managers/recipes/RecipeManager';
import type { PodmanConnection } from './managers/podmanConnection';
import { isRecipePullOptionsWithModelInference, type RecipePullOptions } from '@shared/models/IRecipe';
import type { RecipePullOptions } from '@shared/models/IRecipe';
import type { ContainerProviderConnection } from '@podman-desktop/api';
import type { NavigationRegistry } from './registries/NavigationRegistry';
import type { FilterRecipesResult, RecipeFilters } from '@shared/models/FilterRecipesResult';
import type { ApplicationOptions } from './models/ApplicationOptions';
interface PortQuickPickItem extends podmanDesktopApi.QuickPickItem {
port: number;
@ -144,10 +143,6 @@ export class StudioApiImpl implements StudioAPI {
return this.inferenceManager.getServers();
}
async getRegisteredProviders(): Promise<InferenceType[]> {
return this.inferenceManager.getRegisteredProviders();
}
async requestDeleteInferenceServer(...containerIds: string[]): Promise<void> {
// Do not wait on the promise as the api would probably timeout before the user answer.
if (containerIds.length === 0) throw new Error('At least one container id should be provided.');
@ -234,6 +229,8 @@ export class StudioApiImpl implements StudioAPI {
const recipe = this.catalogManager.getRecipes().find(recipe => recipe.id === options.recipeId);
if (!recipe) throw new Error(`recipe with if ${options.recipeId} not found`);
const model = this.catalogManager.getModelById(options.modelId);
let connection: ContainerProviderConnection | undefined = undefined;
if (options.connection) {
connection = this.podmanConnection.getContainerProviderConnection(options.connection);
@ -243,25 +240,7 @@ export class StudioApiImpl implements StudioAPI {
if (!connection) throw new Error('no running container provider connection found.');
let model: ModelInfo | undefined;
let opts: ApplicationOptions;
if (isRecipePullOptionsWithModelInference(options)) {
model = this.catalogManager.getModelById(options.modelId);
opts = {
connection,
recipe,
dependencies: options.dependencies,
model,
};
} else {
opts = {
connection,
recipe,
dependencies: options.dependencies,
};
}
return this.applicationManager.requestPullApplication(opts);
return this.applicationManager.requestPullApplication(connection, recipe, model);
}
async getModelsInfo(): Promise<ModelInfo[]> {

View File

@ -21,12 +21,10 @@
import { afterEach, beforeEach, expect, test, vi, describe, type MockInstance } from 'vitest';
import { Studio } from './studio';
import { type ExtensionContext, EventEmitter, version } from '@podman-desktop/api';
import { CatalogManager } from './managers/catalogManager';
import * as fs from 'node:fs';
vi.mock('./managers/modelsManager');
vi.mock('./managers/catalogManager');
const mockedExtensionContext = {
subscriptions: [],
@ -126,12 +124,6 @@ beforeEach(() => {
} as unknown as EventEmitter<unknown>);
mocks.postMessage.mockResolvedValue(undefined);
vi.mocked(CatalogManager).mockReturnValue({
onUpdate: vi.fn(),
init: vi.fn(),
getRecipes: vi.fn().mockReturnValue([]),
} as unknown as CatalogManager);
});
afterEach(() => {

View File

@ -212,7 +212,7 @@ export class Studio {
* Create catalog manager, responsible for loading the catalog files and watching for changes
*/
this.#catalogManager = new CatalogManager(this.#rpcExtension, appUserDirectory);
await this.#catalogManager.init();
this.#catalogManager.init();
/**
* The builder manager is handling the building tasks, create corresponding tasks
@ -251,7 +251,7 @@ export class Studio {
const hfModelHandler = new HuggingFaceModelHandler(this.#modelsManager);
this.#extensionContext.subscriptions.push(hfModelHandler);
this.#extensionContext.subscriptions.push(modelHandlerRegistry.register(hfModelHandler));
await this.#modelsManager.init();
this.#modelsManager.init();
this.#extensionContext.subscriptions.push(this.#modelsManager);
/**
@ -358,7 +358,6 @@ export class Studio {
this.#telemetry,
this.#podManager,
this.#recipeManager,
this.#llamaStackManager,
);
this.#applicationManager.init();
this.#extensionContext.subscriptions.push(this.#applicationManager);

View File

@ -1,7 +1,7 @@
{
"name": "frontend-app",
"displayName": "UI for AI Lab",
"version": "1.9.0-next",
"version": "1.7.0",
"type": "module",
"license": "Apache-2.0",
"scripts": {
@ -18,32 +18,32 @@
"@fortawesome/free-regular-svg-icons": "^6.7.2",
"@podman-desktop/ui-svelte": "1.16.0-202501131429-9076680fda2",
"tinro": "^0.6.12",
"filesize": "^11.0.2",
"filesize": "^10.1.6",
"humanize-duration": "^3.32.2",
"moment": "^2.30.1",
"semver": "^7.7.2"
},
"devDependencies": {
"@sveltejs/vite-plugin-svelte": "5.1.0",
"@sveltejs/vite-plugin-svelte": "5.0.3",
"@tailwindcss/typography": "^0.5.16",
"@tailwindcss/vite": "^4.1.12",
"@testing-library/dom": "^10.4.1",
"@testing-library/jest-dom": "^6.8.0",
"@tailwindcss/vite": "^4.1.7",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/svelte": "^5.2.8",
"@testing-library/user-event": "^14.6.1",
"@tsconfig/svelte": "^5.0.5",
"@tsconfig/svelte": "^5.0.4",
"@types/humanize-duration": "^3.27.4",
"@typescript-eslint/eslint-plugin": "8.40.0",
"@typescript-eslint/eslint-plugin": "8.32.1",
"jsdom": "^26.1.0",
"monaco-editor": "^0.52.2",
"postcss": "^8.5.6",
"postcss": "^8.5.3",
"postcss-load-config": "^6.0.1",
"svelte": "5.38.2",
"svelte": "5.31.0",
"svelte-fa": "^4.0.4",
"svelte-select": "^5.8.3",
"svelte-markdown": "^0.4.1",
"svelte-preprocess": "^6.0.3",
"tailwindcss": "^4.1.12",
"tailwindcss": "^4.1.7",
"vitest": "^3.0.5"
}
}

View File

@ -117,7 +117,7 @@ function toggleExpanded(): void {
class="w-full flex flex-row gap-2 py-2"
class:overflow-hidden={!expanded}
class:flex-wrap={expanded}>
{#each TAGS as tag, i (i)}
{#each TAGS as tag, i (tag)}
<div bind:this={divTags[i]}>
<Badge class="{getBGColor(tag)} {getTextColor(tag)}" content={updateContent(tag)} />
</div>

View File

@ -21,8 +21,8 @@ import { gte } from 'semver';
const USE_CASES = ['natural-language-processing', 'audio', 'computer-vision'];
const LANGUAGES = ['java', 'javascript', 'python'];
export const FRAMEWORKS = ['langchain', 'langchain4j', 'quarkus', 'react', 'streamlit', 'vectordb', 'llama-stack-sdk'];
export const TOOLS = ['none', 'llama-cpp', 'whisper-cpp', 'llama-stack'];
export const FRAMEWORKS = ['langchain', 'langchain4j', 'quarkus', 'react', 'streamlit', 'vectordb'];
export const TOOLS = ['none', 'llama-cpp', 'whisper-cpp'];
// Defaulting to Podman Desktop min version we need to run
let version: string = '1.8.0';

View File

@ -1,99 +0,0 @@
/**********************************************************************
* Copyright (C) 2025 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import '@testing-library/jest-dom/vitest';
import { beforeEach, vi, test, expect } from 'vitest';
import { render, fireEvent, within } from '@testing-library/svelte';
import InferenceRuntimeSelect from '/@/lib/select/InferenceRuntimeSelect.svelte';
import { InferenceType } from '@shared/models/IInference';
const providers: InferenceType[] = [InferenceType.LLAMA_CPP, InferenceType.OPENVINO, InferenceType.WHISPER_CPP];
beforeEach(() => {
// mock scrollIntoView
window.HTMLElement.prototype.scrollIntoView = vi.fn();
});
test('Lists all runtime options', async () => {
const { container } = render(InferenceRuntimeSelect, {
value: undefined,
providers,
disabled: false,
});
const input = within(container).getByLabelText('Select Inference Runtime');
await fireEvent.pointerUp(input);
const items = container.querySelectorAll('div[class~="list-item"]');
const expectedOptions = providers;
expect(items.length).toBe(expectedOptions.length);
expectedOptions.forEach((option, i) => {
expect(items[i]).toHaveTextContent(option);
});
});
test('Selected value should be visible', async () => {
const { container } = render(InferenceRuntimeSelect, {
value: undefined,
providers,
disabled: false,
});
const input = within(container).getByLabelText('Select Inference Runtime');
await fireEvent.pointerUp(input);
const items = container.querySelectorAll('div[class~="list-item"]');
const expectedOptions = providers;
await fireEvent.click(items[0]);
const valueContainer = container.querySelector('.value-container');
if (!(valueContainer instanceof HTMLElement)) throw new Error('Missing value container');
const selectedLabel = within(valueContainer).getByText(expectedOptions[0]);
expect(selectedLabel).toBeDefined();
});
test('Exclude specific runtime from list', async () => {
const excluded = [InferenceType.WHISPER_CPP, InferenceType.OPENVINO];
const { container } = render(InferenceRuntimeSelect, {
value: undefined,
providers,
disabled: false,
exclude: excluded,
});
const input = within(container).getByLabelText('Select Inference Runtime');
await fireEvent.pointerUp(input);
const items = container.querySelectorAll('div[class~="list-item"]');
const itemTexts = Array.from(items).map(item => item.textContent?.trim());
excluded.forEach(excludedType => {
expect(itemTexts).not.toContain(excludedType);
});
const expected = providers.filter(type => !excluded.includes(type));
expected.forEach(included => {
expect(itemTexts).toContain(included);
});
});

View File

@ -1,34 +0,0 @@
<script lang="ts">
import Select from '/@/lib/select/Select.svelte';
import type { InferenceType } from '@shared/models/IInference';
interface Props {
disabled?: boolean;
value: InferenceType | undefined;
providers: InferenceType[];
exclude?: InferenceType[];
}
let { value = $bindable(), disabled, providers, exclude = [] }: Props = $props();
// Filter options based on optional exclude list
const options = $derived(() =>
providers.filter(type => !exclude.includes(type)).map(type => ({ value: type, label: type })),
);
function handleOnChange(nValue: { value: string } | undefined): void {
if (nValue) {
value = nValue.value as InferenceType;
} else {
value = undefined;
}
}
</script>
<Select
label="Select Inference Runtime"
name="select-inference-runtime"
disabled={disabled}
value={value ? { label: value, value: value } : undefined}
onchange={handleOnChange}
placeholder="Select Inference Runtime to use"
items={options()} />

View File

@ -421,47 +421,3 @@ test('model-id query should be used to select default model', async () => {
});
});
});
test('models with backend "none" should be filtered out', async () => {
const modelsInfoList = writable<ModelInfo[]>([
{
id: 'model-valid',
name: 'Valid Model',
description: 'A model with a valid backend',
backend: 'llama-cpp',
file: {
file: 'file',
path: '/valid-path',
},
} as unknown as ModelInfo,
{
id: 'model-none',
name: 'None Backend Model',
description: 'A model with backend none',
backend: 'none',
file: {
file: 'file',
path: '/none-path',
},
} as unknown as ModelInfo,
]);
vi.mocked(ModelsInfoStore).modelsInfo = modelsInfoList;
router.location.query.set('model-id', 'model-valid');
render(CreateService);
expect(screen.queryByText('None Backend Model')).toBeNull();
const createBtn = screen.getByTitle('Create service');
await vi.waitFor(() => {
expect(createBtn).toBeEnabled();
});
await fireEvent.click(createBtn);
expect(vi.mocked(studioClient.requestCreateInferenceServer)).toHaveBeenCalledWith(
expect.objectContaining({
modelsInfo: [expect.objectContaining({ id: 'model-valid' })],
}),
);
});

View File

@ -25,8 +25,8 @@ interface Props {
let { trackingId }: Props = $props();
// List of the models available locally exlude models with none backend
let localModels: ModelInfo[] = $derived($modelsInfo.filter(model => model.file && model.backend !== 'none'));
// List of the models available locally
let localModels: ModelInfo[] = $derived($modelsInfo.filter(model => model.file));
// The container provider connection to use
let containerProviderConnection: ContainerProviderConnectionInfo | undefined = $state(undefined);

View File

@ -16,7 +16,6 @@ import { tasks } from '/@/stores/tasks';
import ModelStatusIcon from '../lib/icons/ModelStatusIcon.svelte';
import { router } from 'tinro';
import { faBookOpen, faFileImport } from '@fortawesome/free-solid-svg-icons';
import { SvelteSet } from 'svelte/reactivity';
const columns = [
new TableColumn<ModelInfo>('Status', {
@ -25,21 +24,21 @@ const columns = [
comparator: (a, b): number => (a.file ? 0 : 1) - (b.file ? 0 : 1),
}),
new TableColumn<ModelInfo>('Name', {
width: 'minmax(100px,1fr)',
width: '3fr',
renderer: ModelColumnName,
comparator: (a, b): number => b.name.localeCompare(a.name),
}),
new TableColumn<ModelInfo>('Size', {
width: 'minmax(10px,50px)',
width: '50px',
renderer: ModelColumnSize,
comparator: (a, b): number => (a.file?.size ?? 0) - (b.file?.size ?? 0),
}),
new TableColumn<ModelInfo>('Age', {
width: 'minmax(10px,70px)',
width: '70px',
renderer: ModelColumnAge,
comparator: (a, b): number => (a.file?.creation?.getTime() ?? 0) - (b.file?.creation?.getTime() ?? 0),
}),
new TableColumn<ModelInfo>('', { width: 'minmax(50px,175px)', align: 'right', renderer: ModelColumnLabels }),
new TableColumn<ModelInfo>('', { width: '225px', align: 'right', renderer: ModelColumnLabels }),
new TableColumn<ModelInfo>('Actions', { align: 'right', width: '120px', renderer: ModelColumnActions }),
];
const row = new TableRow<ModelInfo>({});
@ -71,7 +70,7 @@ onMount(() => {
// Subscribe to the tasks store
const tasksUnsubscribe = tasks.subscribe(value => {
// Filter out duplicates
const modelIds = new SvelteSet<string>();
const modelIds = new Set<string>();
pullingTasks = value.reduce((filtered: Task[], task: Task) => {
if (
(task.state === 'loading' || task.state === 'error') &&

View File

@ -55,24 +55,11 @@ const dummyWhisperCppModel: ModelInfo = {
backend: InferenceType.WHISPER_CPP,
};
const dummyOpenVinoModel: ModelInfo = {
id: 'openvino-model-id',
name: 'Dummy Openvino model',
file: {
file: 'file',
path: path.resolve(os.tmpdir(), 'path'),
},
properties: {},
description: '',
backend: InferenceType.OPENVINO,
};
vi.mock('../utils/client', async () => {
return {
studioClient: {
requestCreatePlayground: vi.fn(),
getExtensionConfiguration: vi.fn().mockResolvedValue({}),
getRegisteredProviders: vi.fn().mockResolvedValue([]),
},
rpcBrowser: {
subscribe: (): unknown => {
@ -101,58 +88,28 @@ beforeEach(() => {
const tasksList = writable<Task[]>([]);
vi.mocked(tasksStore).tasks = tasksList;
vi.mocked(studioClient.getRegisteredProviders).mockResolvedValue([
InferenceType.LLAMA_CPP,
InferenceType.WHISPER_CPP,
InferenceType.OPENVINO,
]);
});
test('model should be selected by default when runtime is set', async () => {
test('model should be selected by default', () => {
const modelsInfoList = writable<ModelInfo[]>([dummyLlamaCppModel]);
vi.mocked(modelsInfoStore).modelsInfo = modelsInfoList;
vi.mocked(studioClient.requestCreatePlayground).mockRejectedValue('error creating playground');
const { container } = render(PlaygroundCreate, { props: { exclude: [InferenceType.NONE] } });
// Select our runtime
const dropdown = within(container).getByLabelText('Select Inference Runtime');
await userEvent.click(dropdown);
const llamacppOption = within(container).getByText(InferenceType.LLAMA_CPP);
await userEvent.click(llamacppOption);
const { container } = render(PlaygroundCreate);
const model = within(container).getByText(dummyLlamaCppModel.name);
expect(model).toBeInTheDocument();
});
test('selecting a runtime filters the displayed models', async () => {
const modelsInfoList = writable<ModelInfo[]>([dummyLlamaCppModel, dummyWhisperCppModel, dummyOpenVinoModel]);
vi.mocked(modelsInfoStore).modelsInfo = modelsInfoList;
const { container } = render(PlaygroundCreate, { props: { exclude: [InferenceType.NONE] } });
// Select our runtime
const dropdown = within(container).getByLabelText('Select Inference Runtime');
await userEvent.click(dropdown);
const openvinoOption = within(container).getByText(InferenceType.OPENVINO);
await userEvent.click(openvinoOption);
expect(within(container).queryByText(dummyOpenVinoModel.name)).toBeInTheDocument();
expect(within(container).queryByText(dummyLlamaCppModel.name)).toBeNull();
expect(within(container).queryByText(dummyWhisperCppModel.name)).toBeNull();
});
test('should show warning when no local models are available', () => {
const modelsInfoList = writable<ModelInfo[]>([]);
test('models with incompatible backend should not be listed', async () => {
const modelsInfoList = writable<ModelInfo[]>([dummyWhisperCppModel]);
vi.mocked(modelsInfoStore).modelsInfo = modelsInfoList;
const { container } = render(PlaygroundCreate);
const warning = within(container).getByText(/You don't have any models downloaded/);
expect(warning).toBeInTheDocument();
const model = within(container).queryByText(dummyWhisperCppModel.name);
expect(model).toBeNull();
});
test('should display error message if createPlayground fails', async () => {
@ -166,13 +123,6 @@ test('should display error message if createPlayground fails', async () => {
const errorMessage = within(container).queryByLabelText('Error Message Content');
expect(errorMessage).not.toBeInTheDocument();
// Select the runtime first
const runtimeDropdown = within(container).getByLabelText('Select Inference Runtime');
await userEvent.click(runtimeDropdown);
const runtimeOption = within(container).getByText(InferenceType.LLAMA_CPP);
await userEvent.click(runtimeOption);
const createButton = within(container).getByTitle('Create playground');
await userEvent.click(createButton);

View File

@ -14,34 +14,9 @@ import type { Unsubscriber } from 'svelte/store';
import { Button, ErrorMessage, FormPage, Input } from '@podman-desktop/ui-svelte';
import ModelSelect from '/@/lib/select/ModelSelect.svelte';
import { InferenceType } from '@shared/models/IInference';
import InferenceRuntimeSelect from '/@/lib/select/InferenceRuntimeSelect.svelte';
import { configuration } from '../stores/extensionConfiguration';
// Get recommended runtime
let runtime: InferenceType | undefined = undefined;
// Exlude certain runtimes from selection
export let exclude: InferenceType[] = [InferenceType.NONE, InferenceType.WHISPER_CPP];
// Get registered list of providers
let providers: InferenceType[] = [];
onMount(async () => {
providers = await studioClient.getRegisteredProviders();
const inferenceRuntime = $configuration?.inferenceRuntime;
if (
Object.values(InferenceType).includes(inferenceRuntime as InferenceType) &&
!exclude.includes(inferenceRuntime as InferenceType)
) {
runtime = inferenceRuntime as InferenceType;
}
});
let localModels: ModelInfo[];
$: localModels = $modelsInfo.filter(
model => model.file && (!runtime || model.backend === runtime) && !exclude.includes(model.backend as InferenceType),
);
$: localModels = $modelsInfo.filter(model => model.file && model.backend !== InferenceType.WHISPER_CPP);
$: availModels = $modelsInfo.filter(model => !model.file);
let model: ModelInfo | undefined = undefined;
let submitted: boolean = false;
@ -55,11 +30,10 @@ let trackingId: string | undefined = undefined;
// The trackedTasks are the tasks linked to the trackingId
let trackedTasks: Task[] = [];
// Preset model selection depending on runtime
$: if (localModels.length > 0) {
model = localModels[0];
} else {
model = undefined;
$: {
if (!model && localModels.length > 0) {
model = localModels[0];
}
}
function openModelsPage(): void {
@ -171,12 +145,6 @@ export function goToUpPage(): void {
placeholder="Leave blank to generate a name"
aria-label="playgroundName" />
<!-- inference runtime -->
<label for="inference-runtime" class="pt-4 block mb-2 font-bold text-[var(--pd-content-card-header-text)]">
Inference Runtime
</label>
<InferenceRuntimeSelect bind:value={runtime} providers={providers} exclude={exclude} />
<!-- model input -->
<label for="model" class="pt-4 block mb-2 font-bold text-[var(--pd-content-card-header-text)]">Model</label>
<ModelSelect models={localModels} disabled={submitted} bind:value={model} />

View File

@ -10,7 +10,6 @@ import { studioClient } from '../utils/client';
import type { CatalogFilterKey, Choice, RecipeChoices, RecipeFilters } from '@shared/models/FilterRecipesResult';
import { onMount } from 'svelte';
import { configuration } from '../stores/extensionConfiguration';
import { SvelteMap } from 'svelte/reactivity';
// filters available in the dropdowns for the user to select
let choices: RecipeChoices = $state({});
@ -54,7 +53,7 @@ let groups: Map<Category, Recipe[]> = $derived.by(() => {
if (!Object.keys(categoryDict).length) {
return new Map();
}
const output: Map<Category, Recipe[]> = new SvelteMap();
const output: Map<Category, Recipe[]> = new Map();
for (const recipe of recipes) {
if (recipe.categories.length === 0) {
output.set(UNCLASSIFIED, [...(output.get(UNCLASSIFIED) ?? []), recipe]);

View File

@ -69,14 +69,6 @@ const fakeRecipe: Recipe = {
categories: [],
} as unknown as Recipe;
const fakeLlamaStackRecipe: Recipe = {
id: 'dummy-llama-stack-recipe-id',
backend: 'llama-stack',
name: 'Dummy Llama Stack Recipe',
description: 'Dummy description',
categories: [],
} as unknown as Recipe;
const fakeRecommendedModel: ModelInfo = {
id: 'dummy-model-1',
backend: InferenceType.LLAMA_CPP,
@ -108,7 +100,7 @@ beforeEach(() => {
router.location.query.clear();
vi.mocked(CatalogStore).catalog = readable<ApplicationCatalog>({
recipes: [fakeRecipe, fakeLlamaStackRecipe],
recipes: [fakeRecipe],
models: [],
categories: [],
version: '',
@ -155,7 +147,7 @@ test('Recipe Local Repository should be visible when defined', async () => {
expect(span.textContent).toBe('dummy-recipe-path');
});
test('Submit button should be disabled when model is required and no model is selected', async () => {
test('Submit button should be disabled when no model is selected', async () => {
vi.mocked(ModelsInfoStore).modelsInfo = readable([]);
render(StartRecipe, {
@ -167,18 +159,6 @@ test('Submit button should be disabled when model is required and no model is se
expect(button).toBeDisabled();
});
test('Submit button should be enabled when model is not required', async () => {
vi.mocked(ModelsInfoStore).modelsInfo = readable([]);
render(StartRecipe, {
recipeId: 'dummy-llama-stack-recipe-id',
});
const button = screen.getByTitle(`Start ${fakeLlamaStackRecipe.name} recipe`);
expect(button).toBeDefined();
expect(button).toBeEnabled();
});
test('First recommended model should be selected as default model', async () => {
const { container } = render(StartRecipe, {
recipeId: 'dummy-recipe-id',
@ -285,29 +265,6 @@ test('Submit button should call requestPullApplication with proper arguments', a
connection: containerProviderConnection,
recipeId: fakeRecipe.id,
modelId: fakeRecommendedModel.id,
dependencies: {
llamaStack: false,
},
});
});
});
test('Submit button should call requestPullApplication with proper arguments for llama-stack recipe', async () => {
render(StartRecipe, {
recipeId: 'dummy-llama-stack-recipe-id',
});
const button = screen.getByTitle(`Start ${fakeLlamaStackRecipe.name} recipe`);
expect(button).toBeEnabled();
await fireEvent.click(button);
await vi.waitFor(() => {
expect(studioClient.requestPullApplication).toHaveBeenCalledWith({
connection: containerProviderConnection,
recipeId: fakeLlamaStackRecipe.id,
dependencies: {
llamaStack: true,
},
});
});
});

View File

@ -2,7 +2,7 @@
import { faFolder, faRocket, faUpRightFromSquare, faWarning } from '@fortawesome/free-solid-svg-icons';
import { catalog } from '/@/stores/catalog';
import Fa from 'svelte-fa';
import type { Recipe, RecipePullOptions, RecipePullOptionsWithModelInference } from '@shared/models/IRecipe';
import type { Recipe } from '@shared/models/IRecipe';
import type { LocalRepository } from '@shared/models/ILocalRepository';
import { findLocalRepositoryByRecipeId } from '/@/utils/localRepositoriesUtils';
import { localRepositories } from '/@/stores/localRepositories';
@ -53,16 +53,6 @@ let completed: boolean = $state(false);
let errorMsg: string | undefined = $state(undefined);
let formValid = $derived.by<boolean>((): boolean => {
if (!recipe) {
return false;
}
if (!isModelNeeded(recipe)) {
return true;
}
return !!model;
});
$effect(() => {
// Select default connection
if (!containerProviderConnection && startedContainerProviderConnectionInfo.length > 0) {
@ -110,22 +100,16 @@ function populateModelFromTasks(trackedTasks: Task[]): void {
}
async function submit(): Promise<void> {
if (!recipe || !formValid) return;
if (!recipe || !model) return;
errorMsg = undefined;
try {
const options: RecipePullOptions = {
const trackingId = await studioClient.requestPullApplication({
recipeId: $state.snapshot(recipe.id),
modelId: $state.snapshot(model.id),
connection: $state.snapshot(containerProviderConnection),
dependencies: {
llamaStack: recipe.backend === 'llama-stack',
},
};
if (model) {
(options as RecipePullOptionsWithModelInference).modelId = $state.snapshot(model.id);
}
const trackingId = await studioClient.requestPullApplication(options);
});
router.location.query.set('trackingId', trackingId);
} catch (err: unknown) {
console.error('Something wrong while trying to create the inference server.', err);
@ -140,10 +124,6 @@ export function goToUpPage(): void {
function handleOnClick(): void {
router.goto(`/recipe/${recipeId}/running`);
}
function isModelNeeded(recipe: Recipe): boolean {
return recipe.backend !== 'llama-stack';
}
</script>
<FormPage
@ -203,18 +183,17 @@ function isModelNeeded(recipe: Recipe): boolean {
bind:value={containerProviderConnection}
containerProviderConnections={startedContainerProviderConnectionInfo} />
{/if}
{#if isModelNeeded(recipe)}
<!-- model form -->
<label for="select-model" class="pt-4 block mb-2 font-bold text-[var(--pd-content-card-header-text)]"
>Model</label>
<ModelSelect bind:value={model} disabled={loading} recommended={recipe.recommended} models={models} />
{#if model && model.file === undefined}
<div class="text-gray-800 text-sm flex items-center">
<Fa class="mr-2" icon={faWarning} />
<span role="alert"
>The selected model will be downloaded. This action can take some time depending on your connection</span>
</div>
{/if}
<!-- model form -->
<label for="select-model" class="pt-4 block mb-2 font-bold text-[var(--pd-content-card-header-text)]"
>Model</label>
<ModelSelect bind:value={model} disabled={loading} recommended={recipe.recommended} models={models} />
{#if model && model.file === undefined}
<div class="text-gray-800 text-sm flex items-center">
<Fa class="mr-2" icon={faWarning} />
<span role="alert"
>The selected model will be downloaded. This action can take some time depending on your connection</span>
</div>
{/if}
</div>
@ -230,7 +209,7 @@ function isModelNeeded(recipe: Recipe): boolean {
title="Start {recipe.name} recipe"
inProgress={loading}
on:click={submit}
disabled={!formValid || loading || !containerProviderConnection}
disabled={!model || loading || !containerProviderConnection}
icon={faRocket}>
Start {recipe.name} recipe
</Button>

View File

@ -98,7 +98,8 @@ test('Instructions block should be displayed if Llama Stack container is found',
});
render(StartLlamaStackContainer);
await vi.waitFor(() => screen.getByText('Instructions'));
await tick();
screen.getByText('Instructions');
});
test('start button should be displayed and enabled', async () => {

View File

@ -17,7 +17,6 @@
***********************************************************************/
import type { ModelInfo } from './models/IModelInfo';
import type { InferenceType } from '@shared/models/IInference';
import type { ApplicationCatalog } from './models/IApplicationCatalog';
import type { OpenDialogOptions, Uri } from '@podman-desktop/api';
import type { ApplicationState } from './models/IApplicationState';
@ -122,11 +121,6 @@ export interface StudioAPI {
*/
getInferenceServers(): Promise<InferenceServer[]>;
/**
* Get inference providers
*/
getRegisteredProviders(): Promise<InferenceType[]>;
/**
* Request to start an inference server
* @param options The options to use

View File

@ -19,26 +19,10 @@ import type { ContainerProviderConnectionInfo } from './IContainerConnectionInfo
import type { InferenceServer } from './IInference';
export type RecipePullOptions = RecipePullOptionsDefault | RecipePullOptionsWithModelInference;
export interface RecipePullOptionsDefault {
export interface RecipePullOptions {
connection?: ContainerProviderConnectionInfo;
recipeId: string;
dependencies?: RecipeDependencies;
}
export type RecipePullOptionsWithModelInference = RecipePullOptionsDefault & {
modelId: string;
};
export interface RecipeDependencies {
llamaStack?: boolean;
}
export function isRecipePullOptionsWithModelInference(
options: RecipePullOptions,
): options is RecipePullOptionsWithModelInference {
return 'modelId' in options;
}
export interface RecipeComponents {

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +1,20 @@
{
"name": "ai-lab-tests-playwright",
"version": "1.9.0-next",
"version": "1.7.0",
"description": "Podman Desktop AI Lab extension Playwright E2E tests",
"scripts": {
"test:e2e": "xvfb-maybe --auto-servernum --server-args='-screen 0 1280x960x24' -- npx playwright test src/",
"test:e2e:smoke": "xvfb-maybe --auto-servernum --server-args='-screen 0 1280x960x24' -- npx playwright test src/ -g @smoke",
"test:e2e:instructlab": "xvfb-maybe --auto-servernum --server-args='-screen 0 1280x960x24' -- npx playwright test src/ -g @instructlab"
"test:e2e:smoke": "xvfb-maybe --auto-servernum --server-args='-screen 0 1280x960x24' -- npx playwright test src/ -g @smoke"
},
"author": "Red Hat",
"license": "Apache-2.0",
"devDependencies": {
"@playwright/test": "^1.55.0",
"@podman-desktop/tests-playwright": "1.21.0",
"@playwright/test": "^1.52.0",
"@podman-desktop/tests-playwright": "1.18.1",
"@types/node": "^22",
"typescript": "^5.9.2",
"electron": "^36.2.1",
"typescript": "^5.8.3",
"vitest": "^3.0.5",
"xvfb-maybe": "^0.2.1"
},
"type": "module"

View File

@ -21,7 +21,6 @@ import { defineConfig, devices } from '@playwright/test';
export default defineConfig({
outputDir: './output/',
workers: 1,
timeout: 60_000,
reporter: [
['list'],

View File

@ -16,84 +16,39 @@
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
/**
* The 'test-audio-to-text.wav' file used in this test was sourced from the
* whisper.cpp project (https://github.com/ggml-org/whisper.cpp).
* It is licensed under the MIT License (see https://github.com/ggml-org/whisper.cpp/blob/master/LICENSE for details).
* This specific WAV file is used solely for Playwright testing purposes within this repository.
*/
import type { APIResponse, Locator } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import type { NavigationBar, ExtensionsPage } from '@podman-desktop/tests-playwright';
import {
ContainerDetailsPage,
ContainerState,
expect as playExpect,
test,
RunnerOptions,
isWindows,
waitForPodmanMachineStartup,
isLinux,
isMac,
isCI,
resetPodmanMachinesFromCLI,
} from '@podman-desktop/tests-playwright';
import type { AILabDashboardPage } from './model/ai-lab-dashboard-page';
import { AILabPage } from './model/ai-lab-page';
import type { AILabRecipesCatalogPage } from './model/ai-lab-recipes-catalog-page';
import { AILabExtensionDetailsPage } from './model/podman-extension-ai-lab-details-page';
import type { AILabCatalogPage } from './model/ai-lab-catalog-page';
import { handleWebview } from './utils/webviewHandler';
import type { AILabServiceDetailsPage } from './model/ai-lab-service-details-page';
import type { AILabPlaygroundsPage } from './model/ai-lab-playgrounds-page';
import type { AILabPlaygroundDetailsPage } from './model/ai-lab-playground-details-page';
import {
getExtensionCard,
getExtensionVersion,
openAILabExtensionDetails,
openAILabPreferences,
reopenAILabDashboard,
waitForExtensionToInitialize,
} from './utils/aiLabHandler';
import * as fs from 'node:fs';
import * as path from 'node:path';
import { fileURLToPath } from 'node:url';
import type { AILabTryInstructLabPage } from './model/ai-lab-try-instructlab-page';
const AI_LAB_EXTENSION_OCI_IMAGE =
process.env.EXTENSION_OCI_IMAGE ?? 'ghcr.io/containers/podman-desktop-extension-ai-lab:nightly';
const AI_LAB_EXTENSION_PREINSTALLED: boolean = process.env.EXTENSION_PREINSTALLED === 'true';
const AI_LAB_CATALOG_EXTENSION_LABEL: string = 'redhat.ai-lab';
const AI_LAB_CATALOG_EXTENSION_NAME: string = 'Podman AI Lab extension';
const AI_LAB_CATALOG_STATUS_ACTIVE: string = 'ACTIVE';
let aiLabPage: AILabDashboardPage;
let webview: Page;
let aiLabPage: AILabPage;
const runnerOptions = {
customFolder: 'ai-lab-tests-pd',
aiLabModelUploadDisabled: isWindows ? true : false,
};
interface AiApp {
appName: string;
appModel: string;
}
const AI_APPS: AiApp[] = [
{ appName: 'Audio to Text', appModel: 'ggerganov/whisper.cpp' },
{ appName: 'ChatBot', appModel: 'ibm-granite/granite-3.3-8b-instruct-GGUF' },
{ appName: 'Summarizer', appModel: 'ibm-granite/granite-3.3-8b-instruct-GGUF' },
{ appName: 'Code Generation', appModel: 'ibm-granite/granite-3.3-8b-instruct-GGUF' },
{ appName: 'RAG Chatbot', appModel: 'ibm-granite/granite-3.3-8b-instruct-GGUF' },
{ appName: 'Function calling', appModel: 'ibm-granite/granite-3.3-8b-instruct-GGUF' },
{ appName: 'Object Detection', appModel: 'facebook/detr-resnet-101' },
];
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const TEST_AUDIO_FILE_PATH: string = path.resolve(
__dirname,
'..',
'..',
'playwright',
'resources',
`test-audio-to-text.wav`,
);
test.use({
runnerOptions: new RunnerOptions(runnerOptions),
});
@ -108,13 +63,13 @@ test.beforeAll(async ({ runner, welcomePage, page }) => {
});
test.afterAll(async ({ runner }) => {
test.setTimeout(180_000);
await resetPodmanMachinesFromCLI();
test.setTimeout(120_000);
await cleanupServiceModels();
await runner.close();
});
test.describe.serial(`AI Lab extension installation and verification`, () => {
test.describe.serial(`AI Lab extension installation`, { tag: ['@smoke', '@instructLab'] }, () => {
test.describe.serial(`AI Lab extension installation`, { tag: '@smoke' }, () => {
let extensionsPage: ExtensionsPage;
test(`Open Settings -> Extensions page`, async ({ navigationBar }) => {
@ -131,63 +86,40 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
});
test('Extension (card) is installed, present and active', async ({ navigationBar }) => {
await waitForExtensionToInitialize(navigationBar);
const extensionCard = await getExtensionCard(navigationBar);
const extensions = await navigationBar.openExtensions();
await playExpect
.poll(async () => await extensions.extensionIsInstalled(AI_LAB_CATALOG_EXTENSION_LABEL), { timeout: 30000 })
.toBeTruthy();
const extensionCard = await extensions.getInstalledExtension(
AI_LAB_CATALOG_EXTENSION_NAME,
AI_LAB_CATALOG_EXTENSION_LABEL,
);
await playExpect(extensionCard.status).toHaveText(AI_LAB_CATALOG_STATUS_ACTIVE);
});
test(`Extension's details show correct status, no error`, async ({ navigationBar }) => {
const aiLabExtensionDetailsPage = await openAILabExtensionDetails(navigationBar);
await aiLabExtensionDetailsPage.waitForLoad();
await aiLabExtensionDetailsPage.checkIsActive(AI_LAB_CATALOG_STATUS_ACTIVE);
await aiLabExtensionDetailsPage.checkForErrors();
test(`Extension's details show correct status, no error`, async ({ page, navigationBar }) => {
const extensions = await navigationBar.openExtensions();
const extensionCard = await extensions.getInstalledExtension('ai-lab', AI_LAB_CATALOG_EXTENSION_LABEL);
await extensionCard.openExtensionDetails(AI_LAB_CATALOG_EXTENSION_NAME);
const details = new AILabExtensionDetailsPage(page);
await playExpect(details.heading).toBeVisible();
await playExpect(details.status).toHaveText(AI_LAB_CATALOG_STATUS_ACTIVE);
const errorTab = details.tabs.getByRole('button', { name: 'Error' });
// we would like to propagate the error's stack trace into test failure message
let stackTrace = '';
if ((await errorTab.count()) > 0) {
await details.activateTab('Error');
stackTrace = await details.errorStackTrace.innerText();
}
await playExpect(errorTab, `Error Tab was present with stackTrace: ${stackTrace}`).not.toBeVisible();
});
test(`Verify AI Lab is accessible`, async ({ runner, page, navigationBar }) => {
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
test(`Verify AI Lab extension is installed`, async ({ runner, page, navigationBar }) => {
[page, webview] = await handleWebview(runner, page, navigationBar);
aiLabPage = new AILabPage(page, webview);
await aiLabPage.navigationBar.waitForLoad();
});
});
test.describe.serial(`AI Lab extension GPU preferences`, { tag: '@smoke' }, () => {
test(`Verify GPU support banner is visible, preferences are disabled`, async ({ page, navigationBar }) => {
test.setTimeout(15_000);
await playExpect(aiLabPage.gpuSupportBanner).toBeVisible();
await playExpect(aiLabPage.enableGpuButton).toBeVisible();
await playExpect(aiLabPage.dontDisplayButton).toBeVisible();
const preferencesPage = await openAILabPreferences(navigationBar, page);
await preferencesPage.waitForLoad();
playExpect(await preferencesPage.isGPUPreferenceEnabled()).toBeFalsy();
});
test(`Enable GPU support and verify preferences`, async ({ runner, page, navigationBar }) => {
test.setTimeout(30_000);
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
await aiLabPage.waitForLoad();
await aiLabPage.enableGpuSupport();
const preferencesPage = await openAILabPreferences(navigationBar, page);
await preferencesPage.waitForLoad();
playExpect(await preferencesPage.isGPUPreferenceEnabled()).toBeTruthy();
});
test.afterAll(
`Disable GPU support, return to AI Lab Dashboard and hide banner`,
async ({ runner, page, navigationBar }) => {
test.setTimeout(30_000);
const preferencesPage = await openAILabPreferences(navigationBar, page);
await preferencesPage.waitForLoad();
await preferencesPage.disableGPUPreference();
playExpect(await preferencesPage.isGPUPreferenceEnabled()).toBeFalsy();
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
await playExpect(aiLabPage.gpuSupportBanner).toBeVisible();
await playExpect(aiLabPage.enableGpuButton).toBeVisible();
await playExpect(aiLabPage.dontDisplayButton).toBeVisible();
await aiLabPage.dontDisplayButton.click();
await playExpect(aiLabPage.gpuSupportBanner).toBeHidden();
},
);
});
test.describe.serial('AI Lab API endpoint e2e test', { tag: '@smoke' }, () => {
let localServerPort: string;
let extensionVersion: string | undefined;
@ -195,8 +127,11 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
test.beforeAll(
'Get AI Lab extension version and open AI Lab navigation bar',
async ({ page, runner, navigationBar }) => {
extensionVersion = await getExtensionVersion(navigationBar);
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
const extensions = await navigationBar.openExtensions();
extensionVersion = await extensions.getInstalledExtensionVersion('ai-lab', AI_LAB_CATALOG_EXTENSION_LABEL);
[page, webview] = await handleWebview(runner, page, navigationBar);
aiLabPage = new AILabPage(page, webview);
await aiLabPage.navigationBar.waitForLoad();
},
);
@ -225,9 +160,8 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
playExpect(apiResponse.version).toBe(extensionVersion);
});
// This test is currently failing due to a known issue: https://github.com/containers/podman-desktop-extension-ai-lab/issues/2925
test.skip(`Download ${model} via API`, async ({ request }) => {
test.setTimeout(610_000);
test(`Download ${model} via API`, async ({ request }) => {
test.setTimeout(300_000);
const catalogPage = await aiLabPage.navigationBar.openCatalog();
await catalogPage.waitForLoad();
console.log(`Downloading ${model}...`);
@ -241,13 +175,17 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
insecure: false,
stream: true,
},
timeout: 600_000,
timeout: 300_000,
});
const body = await response.body();
const text = body.toString();
playExpect(text).toContain('success');
await aiLabPage.navigationBar.openCatalog();
});
// This test is currently failing due to a known issue: https://github.com/containers/podman-desktop-extension-ai-lab/issues/2925
test.fail(`Verify ${model} is available in AI Lab Catalog`, async () => {
const catalogPage = await aiLabPage.navigationBar.openCatalog();
await catalogPage.waitForLoad();
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
@ -256,7 +194,7 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
});
// This test is currently failing due to a known issue: https://github.com/containers/podman-desktop-extension-ai-lab/issues/2925
test.skip(`Verify ${model} is listed in models fetched from API`, async ({ request }) => {
test.fail(`Verify ${model} is listed in models fetched from API`, async ({ request }) => {
const response = await request.get(`http://127.0.0.1:${localServerPort}/api/tags`, {
headers: {
Accept: 'application/json',
@ -270,7 +208,7 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
});
// This test is currently failing due to a known issue: https://github.com/containers/podman-desktop-extension-ai-lab/issues/2925
test.skip(`Delete ${model} model`, async () => {
test.fail(`Delete ${model} model`, async () => {
test.skip(isWindows, 'Model deletion is currently very buggy in azure cicd');
test.setTimeout(310_000);
const catalogPage = await aiLabPage.navigationBar.openCatalog();
@ -289,7 +227,8 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
let catalogPage: AILabCatalogPage;
test.beforeEach(`Open AI Lab Catalog`, async ({ runner, page, navigationBar }) => {
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
[page, webview] = await handleWebview(runner, page, navigationBar);
aiLabPage = new AILabPage(page, webview);
await aiLabPage.navigationBar.waitForLoad();
catalogPage = await aiLabPage.navigationBar.openCatalog();
@ -297,24 +236,24 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
});
test(`Download ${modelName} model`, async () => {
test.setTimeout(610_000);
test.setTimeout(310_000);
if (!(await catalogPage.isModelDownloaded(modelName))) {
await catalogPage.downloadModel(modelName);
}
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
.poll(async () => await waitForCatalogModel(modelName), { timeout: 600_000, intervals: [5_000] })
.poll(async () => await waitForCatalogModel(modelName), { timeout: 300_000, intervals: [5_000] })
.toBeTruthy();
});
test(`Delete ${modelName} model`, async () => {
test.skip(isWindows, 'Model deletion is currently very buggy in azure cicd');
test.setTimeout(610_000);
test.setTimeout(310_000);
playExpect(await catalogPage.isModelDownloaded(modelName)).toBeTruthy();
await catalogPage.deleteModel(modelName);
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
.poll(async () => await waitForCatalogModel(modelName), { timeout: 600_000, intervals: [2_500] })
.poll(async () => await waitForCatalogModel(modelName), { timeout: 300_000, intervals: [2_500] })
.toBeFalsy();
});
});
@ -331,7 +270,8 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
);
test.beforeAll(`Open AI Lab Catalog`, async ({ runner, page, navigationBar }) => {
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
[page, webview] = await handleWebview(runner, page, navigationBar);
aiLabPage = new AILabPage(page, webview);
await aiLabPage.navigationBar.waitForLoad();
catalogPage = await aiLabPage.navigationBar.openCatalog();
@ -359,7 +299,6 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
await playExpect(modelServiceDetailsPage.modelName).toContainText(modelName);
await playExpect(modelServiceDetailsPage.inferenceServerType).toContainText('Inference');
await playExpect(modelServiceDetailsPage.inferenceServerType).toContainText(/CPU|GPU/);
});
test(`Make GET request to the model service for ${modelName}`, async ({ request }) => {
@ -403,36 +342,16 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
}).toPass({ timeout: 600_000, intervals: [5_000] });
});
test(`Restart model service for ${modelName}`, async () => {
test.skip(modelName === 'ggerganov/whisper.cpp');
test.setTimeout(180_000);
await modelServiceDetailsPage.stopService();
await playExpect(modelServiceDetailsPage.startServiceButton).toBeEnabled({ timeout: 120_000 });
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
.poll(async () => await modelServiceDetailsPage.getServiceState(), { timeout: 120_000 })
.toBe('');
await modelServiceDetailsPage.startService();
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
.poll(async () => await modelServiceDetailsPage.getServiceState(), { timeout: 120_000 })
.toBe('RUNNING');
});
test(`Delete model service and model for ${modelName}`, async () => {
test(`Delete model service for ${modelName}`, async () => {
test.setTimeout(150_000);
await cleanupServices();
await deleteAllModels();
const modelServicePage = await modelServiceDetailsPage.deleteService();
await playExpect(modelServicePage.heading).toBeVisible({ timeout: 120_000 });
});
});
});
// Do not use non-instruct models in playground tests.
// They break out of guilderails and fail the tests.
['ibm-granite/granite-3.3-8b-instruct-GGUF', 'TheBloke/Mistral-7B-Instruct-v0.2-GGUF'].forEach(modelName => {
test.describe.serial(`AI Lab playground creation and deletion for ${modelName}`, { tag: '@smoke' }, () => {
['lmstudio-community/granite-3.0-8b-instruct-GGUF'].forEach(modelName => {
test.describe.serial(`AI Lab playground creation and deletion`, () => {
let catalogPage: AILabCatalogPage;
let playgroundsPage: AILabPlaygroundsPage;
let playgroundDetailsPage: AILabPlaygroundDetailsPage;
@ -442,7 +361,8 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
const systemPrompt = 'Always respond with: "Hello, I am Chat Bot"';
test.beforeAll(`Open AI Lab Catalog`, async ({ runner, page, navigationBar }) => {
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
[page, webview] = await handleWebview(runner, page, navigationBar);
aiLabPage = new AILabPage(page, webview);
await aiLabPage.navigationBar.waitForLoad();
catalogPage = await aiLabPage.navigationBar.openCatalog();
@ -450,13 +370,13 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
});
test(`Download ${modelName} model if not available`, async () => {
test.setTimeout(610_000);
test.setTimeout(310_000);
if (!(await catalogPage.isModelDownloaded(modelName))) {
await catalogPage.downloadModel(modelName);
}
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
.poll(async () => await waitForCatalogModel(modelName), { timeout: 600_000, intervals: [5_000] })
.poll(async () => await waitForCatalogModel(modelName), { timeout: 300_000, intervals: [5_000] })
.toBeTruthy();
});
@ -512,13 +432,12 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
test.afterAll(`Cleaning up service model`, async () => {
test.setTimeout(60_000);
await cleanupServices();
await deleteAllModels();
await cleanupServiceModels();
});
});
});
AI_APPS.forEach(({ appName, appModel }) => {
['Audio to Text', 'ChatBot', 'Summarizer', 'Code Generation', 'RAG Chatbot'].forEach(appName => {
test.describe.serial(`AI Recipe installation`, () => {
test.skip(
!process.env.EXT_TEST_RAG_CHATBOT && appName === 'RAG Chatbot',
@ -526,8 +445,9 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
);
let recipesCatalogPage: AILabRecipesCatalogPage;
test.beforeAll(`Open Recipes Catalog`, async ({ runner, page, navigationBar }) => {
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
test.beforeEach(`Open Recipes Catalog`, async ({ runner, page, navigationBar }) => {
[page, webview] = await handleWebview(runner, page, navigationBar);
aiLabPage = new AILabPage(page, webview);
await aiLabPage.navigationBar.waitForLoad();
recipesCatalogPage = await aiLabPage.navigationBar.openRecipesCatalog();
@ -535,187 +455,23 @@ test.describe.serial(`AI Lab extension installation and verification`, () => {
});
test(`Install ${appName} example app`, async () => {
test.skip(
appName === 'Object Detection' && isCI && !isMac,
'Currently we are facing issues with the Object Detection app installation on Windows and Linux CI.',
);
test.setTimeout(1_500_000);
const demoApp = await recipesCatalogPage.openRecipesCatalogApp(appName);
await demoApp.waitForLoad();
await demoApp.startNewDeployment();
});
test(`Verify ${appName} app HTTP page is reachable`, async ({ request }) => {
test.setTimeout(60_000);
/// In the future, we could use this test for other AI applications
test.skip(
appName !== 'Object Detection' || (isCI && !isMac),
'Runs only for Object Detection app on macOS CI or any local platform',
);
const aiRunningAppsPage = await aiLabPage.navigationBar.openRunningApps();
const appPort = await aiRunningAppsPage.getAppPort(appName);
const response = await request.get(`http://localhost:${appPort}`, { timeout: 60_000 });
playExpect(response.ok()).toBeTruthy();
const body = await response.text();
playExpect(body).toContain('<title>Streamlit</title>');
});
test(`Verify that model service for the ${appName} is working`, async ({ request }) => {
test.skip(appName !== 'Function calling' && appName !== 'Audio to Text');
test.fail(
appName === 'Audio to Text',
'Expected failure due to issue #3111: https://github.com/containers/podman-desktop-extension-ai-lab/issues/3111',
);
test.setTimeout(600_000);
const modelServicePage = await aiLabPage.navigationBar.openServices();
const serviceDetailsPage = await modelServicePage.openServiceDetails(appModel);
await playExpect
// eslint-disable-next-line sonarjs/no-nested-functions
.poll(async () => await serviceDetailsPage.getServiceState(), { timeout: 60_000 })
.toBe('RUNNING');
const port = await serviceDetailsPage.getInferenceServerPort();
const baseUrl = `http://localhost:${port}`;
let response: APIResponse;
let expectedResponse: string;
switch (appModel) {
case 'ggerganov/whisper.cpp': {
expectedResponse =
'And so my fellow Americans, ask not what your country can do for you, ask what you can do for your country';
const audioFileContent = fs.readFileSync(TEST_AUDIO_FILE_PATH);
response = await request.post(`${baseUrl}/inference`, {
headers: {
Accept: 'application/json',
},
multipart: {
file: {
name: 'test.wav',
mimeType: 'audio/wav',
buffer: audioFileContent,
},
},
timeout: 600_000,
});
break;
}
case 'ibm-granite/granite-3.3-8b-instruct-GGUF': {
expectedResponse = 'Prague';
response = await request.post(`${baseUrl}/v1/chat/completions`, {
data: {
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: 'What is the capital of Czech Republic?' },
],
},
timeout: 600_000,
});
break;
}
default:
throw new Error(`Unhandled model type: ${appModel}`);
}
playExpect(response.ok()).toBeTruthy();
const body = await response.body();
const text = body.toString();
playExpect(text).toContain(expectedResponse);
});
test(`${appName}: Restart, Stop, Delete. Clean up model service`, async () => {
test.skip(
appName === 'Object Detection' && isCI && !isMac,
'Currently we are facing issues with the Object Detection app installation on Windows and Linux CI.',
);
test.afterEach(`Stop ${appName} app`, async ({ navigationBar }) => {
test.setTimeout(150_000);
await restartApp(appName);
await stopAndDeleteApp(appName);
await cleanupServices();
});
test.afterAll(`Ensure cleanup of "${appName}" app, related service, and images`, async ({ navigationBar }) => {
test.setTimeout(150_000);
await stopAndDeleteApp(appName);
await cleanupServices();
await deleteAllModels();
await cleanupServiceModels();
await deleteUnusedImages(navigationBar);
});
});
});
test.describe.serial('InstructLab container startup', { tag: '@instructlab' }, () => {
let instructLabPage: AILabTryInstructLabPage;
const instructLabContainerName = /^instructlab-\d+$/;
let exactInstructLabContainerName = '';
test.skip(!!process.env.GITHUB_ACTIONS && !!isLinux);
test.beforeAll('Open Try InstructLab page', async ({ runner, page, navigationBar }) => {
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
await aiLabPage.navigationBar.waitForLoad();
instructLabPage = await aiLabPage.navigationBar.openTryInstructLab();
await instructLabPage.waitForLoad();
});
test('Start and verify InstructLab container', async ({ page }) => {
test.setTimeout(1_000_000);
await playExpect(instructLabPage.startInstructLabButton).toBeVisible();
await playExpect(instructLabPage.startInstructLabButton).toBeEnabled();
await instructLabPage.startInstructLabButton.click();
await playExpect(instructLabPage.openInstructLabButton).toBeVisible({ timeout: 900_000 });
await playExpect(instructLabPage.openInstructLabButton).toBeEnabled({ timeout: 10_000 });
await playExpect(instructLabPage.statusMessageBox).toContainText('Starting InstructLab container');
const checkMarkLocator = instructLabPage.statusMessageBox.locator('[class*="text-green"]');
await playExpect(checkMarkLocator).toHaveCount(3);
await instructLabPage.openInstructLabButton.click();
const containerName = await page
.getByRole('region', { name: 'Header' })
.getByLabel(instructLabContainerName)
.textContent();
if (typeof containerName === 'string') {
exactInstructLabContainerName = containerName;
}
const containerDetailsPage = new ContainerDetailsPage(page, exactInstructLabContainerName);
await playExpect(containerDetailsPage.heading).toBeVisible();
await playExpect(containerDetailsPage.heading).toContainText(exactInstructLabContainerName);
await playExpect
.poll(async () => containerDetailsPage.getState(), { timeout: 90_000, intervals: [1_000] })
.toContain(ContainerState.Running);
});
test('Cleanup the InstructLab container', async ({ runner, page, navigationBar }) => {
const containerDetailsPage = new ContainerDetailsPage(page, exactInstructLabContainerName);
await playExpect(containerDetailsPage.heading).toBeVisible();
await containerDetailsPage.deleteContainer();
const containersPage = await navigationBar.openContainers();
await playExpect(containersPage.heading).toBeVisible({ timeout: 30_000 });
await playExpect
.poll(async () => containersPage.containerExists(exactInstructLabContainerName), { timeout: 100_000 })
.toBeFalsy();
aiLabPage = await reopenAILabDashboard(runner, page, navigationBar);
await aiLabPage.navigationBar.waitForLoad();
instructLabPage = await aiLabPage.navigationBar.openTryInstructLab();
await instructLabPage.waitForLoad();
await playExpect(instructLabPage.startInstructLabButton).toBeEnabled();
});
});
});
async function cleanupServices(): Promise<void> {
async function cleanupServiceModels(): Promise<void> {
try {
const modelServicePage = await aiLabPage.navigationBar.openServices();
await modelServicePage.waitForLoad();
@ -726,36 +482,9 @@ async function cleanupServices(): Promise<void> {
}
}
async function deleteAllModels(): Promise<void> {
const modelCatalogPage = await aiLabPage.navigationBar.openCatalog();
await modelCatalogPage.waitForLoad();
await modelCatalogPage.deleteAllModels();
}
async function restartApp(appName: string): Promise<void> {
const aiRunningAppsPage = await aiLabPage.navigationBar.openRunningApps();
const aiApp = await aiRunningAppsPage.getRowForApp(appName);
await aiRunningAppsPage.waitForLoad();
await playExpect.poll(async () => await aiRunningAppsPage.appExists(appName), { timeout: 10_000 }).toBeTruthy();
await playExpect
.poll(async () => await aiRunningAppsPage.getCurrentStatusForApp(appName), { timeout: 60_000 })
.toBe('RUNNING');
await aiRunningAppsPage.restartApp(appName);
const appProgressBar = aiApp.getByRole('progressbar', { name: 'Loading' });
await playExpect(appProgressBar).toBeVisible({ timeout: 60_000 });
await playExpect
.poll(async () => await aiRunningAppsPage.getCurrentStatusForApp(appName), { timeout: 60_000 })
.toBe('RUNNING');
}
async function stopAndDeleteApp(appName: string): Promise<void> {
const aiRunningAppsPage = await aiLabPage.navigationBar.openRunningApps();
await aiRunningAppsPage.waitForLoad();
if (!(await aiRunningAppsPage.appExists(appName))) {
console.log(`"${appName}" is not present in the running apps list. Skipping stop and delete operations.`);
return;
}
await playExpect.poll(async () => await aiRunningAppsPage.appExists(appName), { timeout: 10_000 }).toBeTruthy();
await playExpect
.poll(async () => await aiRunningAppsPage.getCurrentStatusForApp(appName), { timeout: 60_000 })

View File

@ -17,30 +17,17 @@
***********************************************************************/
import type { Locator, Page } from '@playwright/test';
import { expect as playExpect } from '@playwright/test';
export abstract class AILabBasePage {
readonly page: Page;
readonly webview: Page;
readonly heading: Locator;
readonly gpuSupportBanner: Locator;
readonly enableGpuButton: Locator;
readonly dontDisplayButton: Locator;
constructor(page: Page, webview: Page, heading: string | undefined) {
this.page = page;
this.webview = webview;
this.heading = webview.getByRole('heading', { name: heading, exact: true }).first();
this.gpuSupportBanner = this.webview.getByLabel('GPU promotion banner');
this.enableGpuButton = this.gpuSupportBanner.getByRole('button', { name: 'Enable GPU support' });
this.dontDisplayButton = this.gpuSupportBanner.getByRole('button', { name: `Don't display anymore` });
}
abstract waitForLoad(): Promise<void>;
async enableGpuSupport(): Promise<void> {
await playExpect(this.gpuSupportBanner).toBeVisible();
await this.enableGpuButton.click();
await playExpect(this.gpuSupportBanner).not.toBeVisible();
}
}

View File

@ -19,7 +19,7 @@
import type { Locator, Page } from '@playwright/test';
import { expect as playExpect } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { handleConfirmationDialog, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import { handleConfirmationDialog } from '@podman-desktop/tests-playwright';
import { AILabCreatingModelServicePage } from './ai-lab-creating-model-service-page';
export class AILabCatalogPage extends AILabBasePage {
@ -50,12 +50,6 @@ export class AILabCatalogPage extends AILabBasePage {
return undefined;
}
async getModelNameByRow(row: Locator): Promise<string> {
const modelNameCell = row.getByLabel('Model Name');
const modelName = await modelNameCell.textContent();
return modelName?.trim() ?? '';
}
async downloadModel(modelName: string): Promise<void> {
const modelRow = await this.getModelRowByName(modelName);
if (!modelRow) {
@ -81,35 +75,16 @@ export class AILabCatalogPage extends AILabBasePage {
}
async deleteModel(modelName: string): Promise<void> {
if (!modelName || modelName.trim() === '') {
console.warn('Model name is empty, skipping deletion.');
return;
}
const modelRow = await this.getModelRowByName(modelName);
if (!modelRow) {
throw new Error(`Model ${modelName} not found`);
}
const deleteButton = modelRow.getByRole('button', { name: 'Delete Model' });
await playExpect.poll(async () => await deleteButton.isEnabled(), { timeout: 10_000 }).toBeTruthy();
await playExpect(deleteButton).toBeEnabled();
await deleteButton.focus();
await deleteButton.click();
await this.page.waitForTimeout(1_000);
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Confirm');
await playExpect.poll(async () => await this.isModelDownloaded(modelName), { timeout: 30_000 }).toBeFalsy();
}
async deleteAllModels(): Promise<void> {
const modelRows = await this.getAllModelRows();
if (modelRows.length === 0) {
return;
}
for (const modelRow of modelRows) {
const modelName = await this.getModelNameByRow(modelRow);
if (await this.isModelDownloaded(modelName)) {
await this.deleteModel(modelName);
}
}
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
}
async isModelDownloaded(modelName: string): Promise<boolean> {

View File

@ -19,9 +19,8 @@
import { expect as playExpect } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { handleConfirmationDialog, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import { handleConfirmationDialog } from '@podman-desktop/tests-playwright';
import { AILabCreatingModelServicePage } from './ai-lab-creating-model-service-page';
import { AILabServiceDetailsPage } from './ai-lab-service-details-page';
export class AiModelServicePage extends AILabBasePage {
readonly additionalActions: Locator;
@ -60,35 +59,13 @@ export class AiModelServicePage extends AILabBasePage {
await playExpect(this.deleteSelectedItems).toBeEnabled();
await this.deleteSelectedItems.click();
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Confirm');
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
}
async getCurrentModelCount(): Promise<number> {
return (await this.getAllTableRows()).length;
}
async openServiceDetails(modelName: string): Promise<AILabServiceDetailsPage> {
const serviceRow = await this.getServiceByModel(modelName);
if (serviceRow === undefined) {
throw new Error(`Model [${modelName}] service doesn't exist`);
}
const serviceRowName = serviceRow.getByRole('cell').nth(3);
await serviceRowName.click();
return new AILabServiceDetailsPage(this.page, this.webview);
}
async getServiceByModel(modelName: string): Promise<Locator | undefined> {
const rows = await this.getAllTableRows();
for (let rowNum = 1; rowNum < rows.length; rowNum++) {
//skip header
const serviceModel = rows[rowNum].getByRole('cell').nth(4);
if ((await serviceModel.textContent()) === modelName) {
return rows[rowNum];
}
}
return undefined;
}
private async getAllTableRows(): Promise<Locator[]> {
return await this.webview.getByRole('row').all();
}

View File

@ -25,12 +25,9 @@ import { AiModelServicePage } from './ai-lab-model-service-page';
import { AILabCatalogPage } from './ai-lab-catalog-page';
import { AILabPlaygroundsPage } from './ai-lab-playgrounds-page';
import { AILabLocalServerPage } from './ai-lab-local-server-page';
import { AILabDashboardPage } from './ai-lab-dashboard-page';
import { AILabTryInstructLabPage } from './ai-lab-try-instructlab-page';
export class AILabNavigationBar extends AILabBasePage {
readonly navigationBar: Locator;
readonly dashboardButton: Locator;
readonly recipesCatalogButton: Locator;
readonly runningAppsButton: Locator;
readonly catalogButton: Locator;
@ -38,12 +35,10 @@ export class AILabNavigationBar extends AILabBasePage {
readonly playgroundsButton: Locator;
readonly tuneButton: Locator;
readonly localServerButton: Locator;
readonly tryInstructLabButton: Locator;
constructor(page: Page, webview: Page) {
super(page, webview, undefined);
this.navigationBar = this.webview.getByRole('navigation', { name: 'PreferencesNavigation' });
this.dashboardButton = this.navigationBar.getByRole('link', { name: 'Dashboard', exact: true });
this.recipesCatalogButton = this.navigationBar.getByRole('link', { name: 'Recipe Catalog', exact: true });
this.runningAppsButton = this.navigationBar.getByRole('link', { name: 'Running' });
this.catalogButton = this.navigationBar.getByRole('link', { name: 'Catalog', exact: true });
@ -51,19 +46,12 @@ export class AILabNavigationBar extends AILabBasePage {
this.playgroundsButton = this.navigationBar.getByRole('link', { name: 'Playgrounds' });
this.tuneButton = this.navigationBar.getByRole('link', { name: 'Tune with InstructLab' });
this.localServerButton = this.navigationBar.getByRole('link', { name: 'Local Server' });
this.tryInstructLabButton = this.navigationBar.getByRole('link', { name: 'Try InstructLab' });
}
async waitForLoad(): Promise<void> {
await playExpect(this.navigationBar).toBeVisible();
}
async openDashboard(): Promise<AILabDashboardPage> {
await playExpect(this.dashboardButton).toBeEnabled();
await this.dashboardButton.click();
return new AILabDashboardPage(this.page, this.webview);
}
async openRecipesCatalog(): Promise<AILabRecipesCatalogPage> {
await playExpect(this.recipesCatalogButton).toBeEnabled();
await this.recipesCatalogButton.click();
@ -99,10 +87,4 @@ export class AILabNavigationBar extends AILabBasePage {
await this.localServerButton.click();
return new AILabLocalServerPage(this.page, this.webview);
}
async openTryInstructLab(): Promise<AILabTryInstructLabPage> {
await playExpect(this.tryInstructLabButton).toBeEnabled();
await this.tryInstructLabButton.click();
return new AILabTryInstructLabPage(this.page, this.webview);
}
}

View File

@ -21,7 +21,7 @@ import { expect as playExpect } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { AILabNavigationBar } from './ai-lab-navigation-bar';
export class AILabDashboardPage extends AILabBasePage {
export class AILabPage extends AILabBasePage {
readonly navigationBar: AILabNavigationBar;
constructor(page: Page, webview: Page) {

View File

@ -20,7 +20,7 @@ import { expect as playExpect } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { AILabPlaygroundsPage } from './ai-lab-playgrounds-page';
import { handleConfirmationDialog, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import { handleConfirmationDialog } from '@podman-desktop/tests-playwright';
export class AILabPlaygroundDetailsPage extends AILabBasePage {
readonly name: string;
@ -73,14 +73,14 @@ export class AILabPlaygroundDetailsPage extends AILabBasePage {
async deletePlayground(): Promise<AILabPlaygroundsPage> {
await playExpect(this.deletePlaygroundButton).toBeEnabled();
await this.deletePlaygroundButton.click();
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Confirm');
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
return new AILabPlaygroundsPage(this.page, this.webview);
}
async submitUserInput(prompt: string): Promise<void> {
await this.promptTextAreaLocator.fill(prompt);
await playExpect(this.promptTextAreaLocator).toHaveValue(prompt);
await playExpect(this.sendPromptButton).toBeEnabled({ timeout: 80_000 });
await playExpect(this.sendPromptButton).toBeEnabled({ timeout: 30_000 });
await this.sendPromptButton.click();
}

View File

@ -19,7 +19,7 @@
import type { Locator, Page } from '@playwright/test';
import { expect as playExpect } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { handleConfirmationDialog, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import { handleConfirmationDialog } from '@podman-desktop/tests-playwright';
import { AILabPlaygroundDetailsPage } from './ai-lab-playground-details-page';
export class AILabPlaygroundsPage extends AILabBasePage {
@ -60,7 +60,7 @@ export class AILabPlaygroundsPage extends AILabBasePage {
const deleteButton = playgroundRow.getByRole('button', { name: 'Delete conversation', exact: true });
await playExpect(deleteButton).toBeEnabled();
await deleteButton.click();
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Confirm');
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
return this;
}

View File

@ -1,5 +1,5 @@
/**********************************************************************
* Copyright (C) 2024-2025 Red Hat, Inc.
* Copyright (C) 2024 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -19,7 +19,7 @@
import { expect as playExpect } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { handleConfirmationDialog, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import { handleConfirmationDialog } from '@podman-desktop/tests-playwright';
export class AiRunningAppsPage extends AILabBasePage {
constructor(page: Page, webview: Page) {
@ -46,15 +46,6 @@ export class AiRunningAppsPage extends AILabBasePage {
return `${await row.getByRole('cell').nth(1).getByRole('status').getAttribute('title', { timeout: 60_000 })}`;
}
async restartApp(appName: string): Promise<void> {
const dropDownMenu = await this.openKebabMenuForApp(appName);
const restartButton = dropDownMenu.getByTitle('Restart AI App');
await playExpect(restartButton).toBeVisible();
await restartButton.click();
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
}
async stopApp(appName: string): Promise<void> {
const row = await this.getRowForApp(appName);
const stopButton = row.getByLabel('Stop AI App');
@ -62,21 +53,20 @@ export class AiRunningAppsPage extends AILabBasePage {
await stopButton.click();
}
async openKebabMenuForApp(appName: string): Promise<Locator> {
async openKebabMenuForApp(appName: string): Promise<void> {
const row = await this.getRowForApp(appName);
const kebabMenu = row.getByLabel('kebab menu');
await playExpect(kebabMenu).toBeEnabled();
await kebabMenu.click();
return this.webview.getByTitle('Drop Down Menu Items');
}
async deleteAIApp(appName: string): Promise<void> {
const dropDownMenu = await this.openKebabMenuForApp(appName);
const deleteButton = dropDownMenu.getByTitle('Delete AI App');
await this.openKebabMenuForApp(appName);
const deleteButton = this.webview.getByRole('none').nth(2);
await playExpect(deleteButton).toBeVisible();
await deleteButton.click();
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Confirm');
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
}
async appExists(appName: string): Promise<boolean> {
@ -92,18 +82,6 @@ export class AiRunningAppsPage extends AILabBasePage {
}
}
async getAppPort(appName: string): Promise<string> {
const appRow = await this.getRowForApp(appName);
//Update this locator after issue https://github.com/containers/podman-desktop-extension-ai-lab/issues/3113 is resolved
const portCell = appRow.getByRole('cell').nth(3);
const rawPortText = await portCell.getByText(/PORT\s\d+/).textContent();
if (!rawPortText) {
throw new Error(`Failed to extract port for app: ${appName}.`);
}
const portNumber = rawPortText.replace(/[^\d]/g, '');
return portNumber;
}
private async getAllTableRows(): Promise<Locator[]> {
return await this.webview.getByRole('row').all();
}

View File

@ -20,7 +20,7 @@ import { expect as playExpect } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { AiModelServicePage } from './ai-lab-model-service-page';
import { handleConfirmationDialog, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import { handleConfirmationDialog } from '@podman-desktop/tests-playwright';
export class AILabServiceDetailsPage extends AILabBasePage {
readonly endpointURL: Locator;
@ -29,7 +29,6 @@ export class AILabServiceDetailsPage extends AILabBasePage {
readonly codeSnippet: Locator;
readonly deleteServiceButton: Locator;
readonly stopServiceButton: Locator;
readonly startServiceButton: Locator;
constructor(page: Page, webview: Page) {
super(page, webview, 'Service details');
@ -39,7 +38,6 @@ export class AILabServiceDetailsPage extends AILabBasePage {
this.codeSnippet = this.webview.getByLabel('Code Snippet', { exact: true });
this.deleteServiceButton = this.webview.getByRole('button', { name: 'Delete service' });
this.stopServiceButton = this.webview.getByRole('button', { name: 'Stop service' });
this.startServiceButton = this.webview.getByRole('button', { name: 'Start service' });
}
async waitForLoad(): Promise<void> {
@ -49,28 +47,13 @@ export class AILabServiceDetailsPage extends AILabBasePage {
async deleteService(): Promise<AiModelServicePage> {
await playExpect(this.deleteServiceButton).toBeEnabled();
await this.deleteServiceButton.click();
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Confirm');
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Confirm');
return new AiModelServicePage(this.page, this.webview);
}
async stopService(): Promise<void> {
await playExpect(this.stopServiceButton).toBeEnabled();
await this.stopServiceButton.click();
}
async startService(): Promise<void> {
await playExpect(this.startServiceButton).toBeEnabled();
await this.startServiceButton.click();
}
async getInferenceServerPort(): Promise<string> {
const split = (await this.endpointURL.textContent())?.split(':');
const port = split ? split[split.length - 1].split('/')[0] : '';
return port;
}
async getServiceState(): Promise<string> {
const serviceState = await this.webview.getByRole('status').getAttribute('title');
return serviceState ?? 'UNKNOWN';
}
}

View File

@ -19,7 +19,7 @@
import { expect as playExpect } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
import { StatusBar, handleConfirmationDialog, podmanAILabExtension, waitUntil } from '@podman-desktop/tests-playwright';
import { StatusBar, handleConfirmationDialog, waitUntil } from '@podman-desktop/tests-playwright';
import { AILabNavigationBar } from './ai-lab-navigation-bar';
export class AILabStartRecipePage extends AILabBasePage {
@ -33,7 +33,7 @@ export class AILabStartRecipePage extends AILabBasePage {
super(page, webview, 'Start recipe');
this.recipeStatus = this.webview.getByRole('status');
this.applicationDetailsPanel = this.webview.getByLabel('application details panel');
this.startRecipeButton = this.webview.getByRole('button', { name: /^Start .+ recipe$/i });
this.startRecipeButton = this.webview.getByRole('button', { name: /Start(\s+([a-z]+\s+)+)recipe/i });
this.openAIAppButton = this.applicationDetailsPanel.getByRole('button', { name: 'Open AI App' });
this.deleteAIAppButton = this.applicationDetailsPanel.getByRole('button', { name: 'Delete AI App' });
}
@ -46,7 +46,7 @@ export class AILabStartRecipePage extends AILabBasePage {
await playExpect(this.startRecipeButton).toBeEnabled();
await this.startRecipeButton.click();
try {
await handleConfirmationDialog(this.page, podmanAILabExtension.extensionName, true, 'Reset');
await handleConfirmationDialog(this.page, 'Podman AI Lab', true, 'Reset');
} catch (error) {
console.warn(`Warning: Could not reset the app, repository probably clean.\n\t${error}`);
}

View File

@ -1,38 +0,0 @@
/**********************************************************************
* Copyright (C) 2025 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*025
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import { expect as playExpect } from '@playwright/test';
import type { Locator, Page } from '@playwright/test';
import { AILabBasePage } from './ai-lab-base-page';
export class AILabTryInstructLabPage extends AILabBasePage {
readonly startInstructLabButton: Locator;
readonly openInstructLabButton: Locator;
readonly statusMessageBox: Locator;
constructor(page: Page, webview: Page) {
super(page, webview, 'Run InstructLab as a container');
this.startInstructLabButton = this.webview.getByRole('button', { name: 'Start InstructLab container' });
this.openInstructLabButton = this.webview.getByRole('button', { name: 'Open InstructLab container' });
this.statusMessageBox = this.webview.getByRole('status');
}
async waitForLoad(): Promise<void> {
await playExpect(this.heading).toBeVisible();
}
}

View File

@ -16,32 +16,11 @@
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import type { Locator, Page } from '@playwright/test';
import { expect as playExpect, ExtensionDetailsPage } from '@podman-desktop/tests-playwright';
import type { Page } from '@playwright/test';
import { ExtensionDetailsPage } from '@podman-desktop/tests-playwright';
export class AILabExtensionDetailsPage extends ExtensionDetailsPage {
readonly errorTab: Locator;
constructor(page: Page) {
super(page, 'Podman AI Lab extension');
this.errorTab = this.tabs.getByRole('button', { name: 'Error' });
}
async waitForLoad(): Promise<void> {
await playExpect(this.heading).toBeVisible();
}
async checkIsActive(statusTest: string): Promise<void> {
await playExpect(this.status).toHaveText(statusTest);
}
async checkForErrors(): Promise<void> {
// we would like to propagate the error's stack trace into test failure message
let stackTrace = '';
if ((await this.errorTab.count()) > 0) {
await this.activateTab('Error');
stackTrace = await this.errorStackTrace.innerText();
}
await playExpect(this.errorTab, `Error Tab was present with stackTrace: ${stackTrace}`).not.toBeVisible();
}
}

View File

@ -1,51 +0,0 @@
/**********************************************************************
* Copyright (C) 2025 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import type { Locator, Page } from '@playwright/test';
import { expect as playExpect, PreferencesPage } from '@podman-desktop/tests-playwright';
export class ExtensionAILabPreferencesPage extends PreferencesPage {
public static readonly tabName = 'Extension: AI Lab';
readonly heading: Locator;
readonly experimentalGPUCheckbox: Locator;
constructor(page: Page) {
super(page);
this.heading = this.content.getByText(ExtensionAILabPreferencesPage.tabName, { exact: true });
this.experimentalGPUCheckbox = this.content.getByRole('checkbox', {
name: 'Experimental GPU support for inference servers',
});
}
async waitForLoad(): Promise<void> {
await playExpect(this.heading).toBeVisible();
}
public async disableGPUPreference(): Promise<void> {
await this.experimentalGPUCheckbox.uncheck({ force: true });
await playExpect(this.experimentalGPUCheckbox).not.toBeChecked();
}
public async enableGPUPreference(): Promise<void> {
await this.experimentalGPUCheckbox.check({ force: true });
await playExpect(this.experimentalGPUCheckbox).toBeChecked();
}
public async isGPUPreferenceEnabled(): Promise<boolean> {
return await this.experimentalGPUCheckbox.isChecked();
}
}

View File

@ -1,91 +0,0 @@
/**********************************************************************
* Copyright (C) 2025 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
***********************************************************************/
import type { Page } from '@playwright/test';
import type { Runner, NavigationBar, ExtensionCardPage } from '@podman-desktop/tests-playwright';
import { expect as playExpect, podmanAILabExtension } from '@podman-desktop/tests-playwright';
import type { AILabDashboardPage } from 'src/model/ai-lab-dashboard-page';
import { handleWebview } from './webviewHandler';
import { ExtensionAILabPreferencesPage } from 'src/model/preferences-extension-ai-lab-page';
import { AILabExtensionDetailsPage } from 'src/model/podman-extension-ai-lab-details-page';
export async function reopenAILabDashboard(
runner: Runner,
page: Page,
navigationBar: NavigationBar,
): Promise<AILabDashboardPage> {
const dashboardPage = await navigationBar.openDashboard();
await playExpect(dashboardPage.mainPage).toBeVisible();
// eslint-disable-next-line @typescript-eslint/no-unused-vars, sonarjs/no-unused-vars
const [_locPage, _webview, aiLabNavigationBar] = await handleWebview(runner, page, navigationBar);
const aiLabDashboardPage = await aiLabNavigationBar.openDashboard();
await aiLabDashboardPage.waitForLoad();
return aiLabDashboardPage;
}
export async function openAILabPreferences(
navigationBar: NavigationBar,
page: Page,
): Promise<ExtensionAILabPreferencesPage> {
const dashboardPage = await navigationBar.openDashboard();
await playExpect(dashboardPage.mainPage).toBeVisible();
const settingsBar = await navigationBar.openSettings();
await playExpect(settingsBar.preferencesTab).toBeVisible();
await settingsBar.expandPreferencesTab();
await playExpect(settingsBar.preferencesTab).toBeVisible();
await settingsBar.getPreferencesLinkLocator(ExtensionAILabPreferencesPage.tabName).click();
const aiLabPreferencesPage = new ExtensionAILabPreferencesPage(page);
await aiLabPreferencesPage.waitForLoad();
return aiLabPreferencesPage;
}
export async function openAILabExtensionDetails(navigationBar: NavigationBar): Promise<AILabExtensionDetailsPage> {
const extensionCard = await getExtensionCard(navigationBar);
const extensionDetails = await extensionCard.openExtensionDetails(podmanAILabExtension.extensionFullName);
const aiLabExtensionDetails = new AILabExtensionDetailsPage(extensionDetails.page);
await aiLabExtensionDetails.waitForLoad();
return aiLabExtensionDetails;
}
export async function getExtensionCard(navigationBar: NavigationBar): Promise<ExtensionCardPage> {
const extensions = await navigationBar.openExtensions();
const extensionCard = await extensions.getInstalledExtension(
podmanAILabExtension.extensionLabel,
podmanAILabExtension.extensionFullLabel,
);
return extensionCard;
}
export async function waitForExtensionToInitialize(navigationBar: NavigationBar): Promise<void> {
const extensions = await navigationBar.openExtensions();
await playExpect
.poll(async () => await extensions.extensionIsInstalled(podmanAILabExtension.extensionFullLabel), {
timeout: 30000,
})
.toBeTruthy();
}
export async function getExtensionVersion(navigationBar: NavigationBar): Promise<string> {
const extensionsPage = await navigationBar.openExtensions();
const extensionVersion = await extensionsPage.getInstalledExtensionVersion(
podmanAILabExtension.extensionLabel,
podmanAILabExtension.extensionFullLabel,
);
playExpect(extensionVersion, `Extension version could not be retrieved.`).toBeDefined();
return String(extensionVersion);
}

View File

@ -19,13 +19,8 @@
import type { Page } from '@playwright/test';
import type { NavigationBar, Runner } from '@podman-desktop/tests-playwright';
import { expect as playExpect } from '@podman-desktop/tests-playwright';
import { AILabNavigationBar } from 'src/model/ai-lab-navigation-bar';
export async function handleWebview(
runner: Runner,
page: Page,
navigationBar: NavigationBar,
): Promise<[Page, Page, AILabNavigationBar]> {
export async function handleWebview(runner: Runner, page: Page, navigationBar: NavigationBar): Promise<[Page, Page]> {
const AI_LAB_NAVBAR_EXTENSION_LABEL: string = 'AI Lab';
const AI_LAB_PAGE_BODY_LABEL: string = 'Webview AI Lab';
@ -48,6 +43,6 @@ export async function handleWebview(
console.log(`element is null`);
}
});
const aiLabNavigationBar = new AILabNavigationBar(mainPage, webViewPage);
return [mainPage, webViewPage, aiLabNavigationBar];
return [mainPage, webViewPage];
}