Skip to content

Commit dc8a5e7

Browse files
authored
Pick Model Name from the Annotated Catalog Resource provided by the Model Catalog Bridge (#105)
Signed-off-by: Maysun J Faisal <[email protected]>
1 parent ef4b1ef commit dc8a5e7

File tree

7 files changed

+11
-11
lines changed

7 files changed

+11
-11
lines changed

skeleton/template.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -553,7 +553,7 @@ spec:
553553
appRunCommand: "${APP_RUN_COMMAND}"
554554
modelServiceContainer: ${MODEL_SERVICE_CONTAINER}
555555
modelServicePort: ${MODEL_SERVICE_PORT}
556-
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
556+
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
557557
modelName: ${MODEL_NAME}
558558
modelSrc: ${MODEL_SRC}
559559
modelServerName: ${{ parameters.modelServer }}
@@ -652,7 +652,7 @@ spec:
652652
# for vllm
653653
vllmSelected: ${{ parameters.modelServer === 'vLLM' }}
654654
vllmModelServiceContainer: ${VLLM_CONTAINER}
655-
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else '${LLM_MODEL_NAME}') }}
655+
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else '${LLM_MODEL_NAME}') }}
656656
modelSrc: ${MODEL_SRC}
657657
maxModelLength: ${LLM_MAX_MODEL_LEN}
658658
# SED_LLM_SERVER_END

templates/audio-to-text/template.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -330,7 +330,7 @@ spec:
330330
appRunCommand: "streamlit run whisper_client.py"
331331
modelServiceContainer: quay.io/redhat-ai-dev/whispercpp:1.8.0
332332
modelServicePort: 8001
333-
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
333+
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
334334
modelName: ggerganov/whisper.cpp
335335
modelSrc: https://huggingface.co/ggerganov/whisper.cpp
336336
modelServerName: ${{ parameters.modelServer }}

templates/chatbot/template.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ spec:
362362
appRunCommand: "streamlit run chatbot_ui.py"
363363
modelServiceContainer: quay.io/redhat-ai-dev/llamacpp_python:0.3.16
364364
modelServicePort: 8001
365-
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
365+
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
366366
modelName: ibm-granite/granite-3.1-8b-instruct
367367
modelSrc: https://huggingface.co/ibm-granite/granite-3.1-8b-instruct
368368
modelServerName: ${{ parameters.modelServer }}
@@ -461,7 +461,7 @@ spec:
461461
# for vllm
462462
vllmSelected: ${{ parameters.modelServer === 'vLLM' }}
463463
vllmModelServiceContainer: quay.io/redhat-ai-dev/vllm-openai-ubi9:v0.11.0
464-
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else 'ibm-granite/granite-3.1-8b-instruct') }}
464+
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else 'ibm-granite/granite-3.1-8b-instruct') }}
465465
modelSrc: https://huggingface.co/ibm-granite/granite-3.1-8b-instruct
466466
maxModelLength: 4096
467467
# SED_LLM_SERVER_END

templates/codegen/template.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ spec:
362362
appRunCommand: "streamlit run codegen-app.py"
363363
modelServiceContainer: quay.io/redhat-ai-dev/llamacpp_python:0.3.16
364364
modelServicePort: 8001
365-
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
365+
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
366366
modelName: TheBloke/Mistral-7B-Instruct-v0.2-AWQ
367367
modelSrc: https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-AWQ
368368
modelServerName: ${{ parameters.modelServer }}
@@ -461,7 +461,7 @@ spec:
461461
# for vllm
462462
vllmSelected: ${{ parameters.modelServer === 'vLLM' }}
463463
vllmModelServiceContainer: quay.io/redhat-ai-dev/vllm-openai-ubi9:v0.11.0
464-
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else 'TheBloke/Mistral-7B-Instruct-v0.2-AWQ') }}
464+
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else 'TheBloke/Mistral-7B-Instruct-v0.2-AWQ') }}
465465
modelSrc: https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-AWQ
466466
maxModelLength: 4096
467467
# SED_LLM_SERVER_END

templates/model-server/template.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ spec:
248248
# for vllm
249249
vllmSelected: ${{ parameters.modelServer === 'vLLM' }}
250250
vllmModelServiceContainer: quay.io/redhat-ai-dev/vllm-openai-ubi9:v0.11.0
251-
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else 'ibm-granite/granite-3.1-8b-instruct') }}
251+
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else 'ibm-granite/granite-3.1-8b-instruct') }}
252252
modelSrc:
253253
maxModelLength: 4096
254254
# SED_LLM_SERVER_END

templates/object-detection/template.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -330,7 +330,7 @@ spec:
330330
appRunCommand: "streamlit run object_detection_client.py"
331331
modelServiceContainer: quay.io/redhat-ai-dev/object_detection_python:latest
332332
modelServicePort: 8000
333-
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
333+
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
334334
modelName: facebook/detr-resnet-101
335335
modelSrc: https://huggingface.co/facebook/detr-resnet-101
336336
modelServerName: ${{ parameters.modelServer }}

templates/rag/template.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ spec:
362362
appRunCommand: "streamlit run rag_app.py"
363363
modelServiceContainer: quay.io/redhat-ai-dev/llamacpp_python:0.3.16
364364
modelServicePort: 8001
365-
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
365+
customModelName: ${{ steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else parameters.modelName }}
366366
modelName: ibm-granite/granite-3.1-8b-instruct
367367
modelSrc: https://huggingface.co/ibm-granite/granite-3.1-8b-instruct
368368
modelServerName: ${{ parameters.modelServer }}
@@ -461,7 +461,7 @@ spec:
461461
# for vllm
462462
vllmSelected: ${{ parameters.modelServer === 'vLLM' }}
463463
vllmModelServiceContainer: quay.io/redhat-ai-dev/vllm-openai-ubi9:v0.11.0
464-
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.name if parameters.modelServer === 'choose-from-the-catalog' else 'ibm-granite/granite-3.1-8b-instruct') }}
464+
modelName: ${{ parameters.modelName if parameters.modelServer === 'Bring you own model server' else (steps['fetch-model-from-catalog'].output.entity.metadata.annotations | pick('rhdh.modelcatalog.io/model-name') if parameters.modelServer === 'choose-from-the-catalog' else 'ibm-granite/granite-3.1-8b-instruct') }}
465465
modelSrc: https://huggingface.co/ibm-granite/granite-3.1-8b-instruct
466466
maxModelLength: 4096
467467
# SED_LLM_SERVER_END

0 commit comments

Comments
 (0)