Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,24 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v4
- name: set up docker-model-plugin (for llm-model)
run: |
# Add Docker's official GPG key:
sudo apt-get update
sudo apt-get install ca-certificates curl
sudo install -m 0755 -d /etc/apt/keyrings
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
sudo chmod a+r /etc/apt/keyrings/docker.asc

# Add the repository to Apt sources:
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null

# Update and Install
sudo apt-get update
sudo apt-get install -y docker-model-plugin
- name: setup-kind-cluster
run: |
.scripts/setup-kind-cluster.sh
Expand Down
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ compose.yaml
manifests.yaml
.score-compose/
.score-k8s/
score.yaml
./score.yaml
2 changes: 1 addition & 1 deletion .scripts/test-provisioners.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ do
for provisioner in $provisioners;
do
echo "###### With ${provisioner}:"
if [[ "$provisioner" = "score-compose/10-dns-in-codespace.provisioners.yaml" || "$provisioner" = "score-k8s/10-dns-in-codespace.provisioners.yaml" ]]; then
if [[ "$provisioner" = "score-compose/10-dns-in-codespace.provisioners.yaml" || "$provisioner" = "score-k8s/10-dns-in-codespace.provisioners.yaml" || "$provisioner" = "score-compose/10-dmr-llm-model-via-curl-cmd.provisioners.yaml" ]]; then
echo "Skipped."
else
${implementation%?} init --no-sample --provisioners $provisioner
Expand Down
19 changes: 11 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,17 @@ score-compose init --provisioners https://raw.githubusercontent.com/score-spec/c

| File | Type | Class | Params | Outputs | Description
| ---- | ---- | ----- | ------ | ------- | -----------
| 10-redis-dapr-pubsub.provisioners.yaml | `dapr-pubsub` | (any) | (none) | `name` | Generates a Dapr PubSub `Component` pointing to a Redis `Service`.
| 10-redis-dapr-state-store.provisioners.yaml | `dapr-state-store` | (any) | (none) | `name` | Generates a Dapr StateStore `Component` pointing to a Redis `Service`.
| 10-dapr-subscription.provisioners.yaml | `dapr-subscription` | (any) | `pubsub`, `topic` | `name`, `topic` | Generates a Dapr `Subscription` on a given Topic and `PubSub`.
| 10-dns-in-codespace.provisioners.yaml | `dns` | (any) | (none) | `host`, `url` | Get the forwarded port URL in current GitHub Codespace on port `8080`.
| 10-dns-with-url.provisioners.yaml | `dns` | (any) | (none) | `host`, `url` | Outputs a `*.localhost` domain as the hostname and associated URL in http on port `8080`.
| 10-env.provisioners.yaml | `environment` | (any) | (none) | (none) | Loads environment variables from a local `.env` file.
| 10-hpa.provisioners.yaml | `horizontal-pod-autoscaler` | (any) | (none) | (none) | Generates an empty object because HPA is not supported in Docker Compose.
| 10-service.provisioners.yaml | `service` | (any) | (none) | `name` | Outputs the name of the Workload dependency if it exists in the list of Workloads.
| 10-redis-dapr-pubsub.provisioners.yaml | `dapr-pubsub` | (any) | (none) | `name` | Generates a Dapr PubSub `Component` pointing to a Redis `Service`.
| 10-redis-dapr-state-store.provisioners.yaml | `dapr-state-store` | (any) | (none) | `name` | Generates a Dapr StateStore `Component` pointing to a Redis `Service`.
| 10-dapr-subscription.provisioners.yaml | `dapr-subscription` | (any) | `pubsub`, `topic` | `name`, `topic` | Generates a Dapr `Subscription` on a given Topic and `PubSub`.
| 10-dns-in-codespace.provisioners.yaml | `dns` | (any) | (none) | `host`, `url` | Gets the forwarded port URL in current GitHub Codespace on port `8080`.
| 10-dns-with-url.provisioners.yaml | `dns` | (any) | (none) | `host`, `url` | Outputs a `*.localhost` domain as the hostname and associated URL in http on port `8080`.
| 10-env.provisioners.yaml | `environment` | (any) | (none) | (none) | Loads environment variables from a local `.env` file.
| 10-hpa.provisioners.yaml | `horizontal-pod-autoscaler` | (any) | (none) | (none) | Generates an empty object because HPA is not supported in Docker Compose.
| 10-dmr-llm-model-via-curl-cmd.provisioners.yaml | `llm-model` | (any) | `model` | `model`, `url`, `api-key` | Runs `curl` to download the model with the Docker Model Runner (DMR).
| 10-dmr-llm-model-via-curl-service.provisioners.yaml | `llm-model` | (any) | `model` | `model`, `url`, `api-key` | Generates a `curl` service downloading the model with the Docker Model Runner (DMR).
| 10-dmr-llm-model-via-service-provider.provisioners.yaml | `llm-model` | (any) | `model` | `model`, `url`, `api-key` | Generates the LLM model service via the Docker Model Runner (DMR) provider.
| 10-service.provisioners.yaml | `service` | (any) | (none) | `name` | Outputs the name of the Workload dependency if it exists in the list of Workloads.

## For `score-k8s`

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
- uri: cmd://bash#dmr-llm-model-via-curl
type: llm-model
description: Runs curl to download the model with the Docker Model Runner (DMR).
supported_params:
- model
expected_outputs:
- model
- url
- api-key
args:
- -c
- |
STDIN=$(cat)
MODEL=$(echo $STDIN | yq eval -p json '.resource_params.model')
if [ "$MODEL" == "" ]; then
MODEL="ai/smollm2:135M-Q4_0"
fi
set -eu -o pipefail
curl -v -d '{"from":"'"$MODEL"'"}' "http://localhost:12434/models/create" >&2
OUTPUTS='{"resource_outputs":{"model":"%s","url":"http://172.17.0.1:12434/engines/v1/","api-key":"not-needed"}}'
printf "$OUTPUTS" "$MODEL"
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
- uri: template://community-provisioners/dmr-llm-model-via-curl
type: llm-model
description: Generates a curl service downloading the model with the Docker Model Runner (DMR).
supported_params:
- model
outputs: |
model: {{ .Init.model }}
url: "http://172.17.0.1:12434/engines/v1/"
api-key: "not-needed"
expected_outputs:
- model
- url
- api-key
init: |
model: {{ .Params.model | default "ai/smollm2:135M-Q4_0" }}
services: |
{{ .Id }}:
image: curlimages/curl:latest
command: ["curl", "-v", "-d", '{"from": "{{ .Init.model }}"}', "http://172.17.0.1:12434/models/create"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
- uri: template://community-provisioners/dmr-llm-model-via-service-provider
type: llm-model
description: Generates the LLM model service via the Docker Model Runner (DMR) provider.
supported_params:
- model
outputs: |
model: {{ .Init.model }}
url: "http://172.17.0.1:12434/engines/v1/"
api-key: "not-needed"
expected_outputs:
- model
- url
- api-key
init: |
model: {{ .Params.model | default "ai/smollm2:135M-Q4_0" }}
services: |
{{ .Id }}:
provider:
type: model
options:
model: {{ .Init.model }}
16 changes: 16 additions & 0 deletions llm-model/score.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
apiVersion: score.dev/v1b1
metadata:
name: my-workload
containers:
my-container:
image: busybox
command: ["/bin/sh"]
args: ["-c", "while true; do echo $LLM_BASE_URL; sleep 5; done"]
variables:
LLM_MODEL_NAME: "${resources.model.model}"
LLM_BASE_URL: "${resources.model.url}"
resources:
model:
type: llm-model
params:
model: ai/smollm2:135M-Q4_0