diff --git a/fixes/cncf-generated/ollama/ollama-2006-rate-limit-download-speed-on-pulling-new-models.json b/fixes/cncf-generated/ollama/ollama-2006-rate-limit-download-speed-on-pulling-new-models.json new file mode 100644 index 00000000..6391d006 --- /dev/null +++ b/fixes/cncf-generated/ollama/ollama-2006-rate-limit-download-speed-on-pulling-new-models.json @@ -0,0 +1,75 @@ +{ + "version": "kc-mission-v1", + "name": "ollama-2006-rate-limit-download-speed-on-pulling-new-models", + "missionClass": "fixer", + "author": "KubeStellar Bot", + "authorGithub": "kubestellar", + "mission": { + "title": "ollama: Rate limit download speed on pulling new models", + "description": "Rate limit download speed on pulling new models. Requested by 96+ users.", + "type": "feature", + "status": "completed", + "steps": [ + { + "title": "Check current ollama setup", + "description": "Verify your ollama version and configuration:\n```bash\nollama --version\n```\nThis feature requires a working ollama installation." + }, + { + "title": "Review ollama configuration", + "description": "Review the relevant ollama configuration:\nIs there interest in implementing a rate limiter in the `pull` command? I'm open to working on this, this is the syntax I have in mind for now:\n\n`ollama pull modelname --someflagname 1024` <-- this would limit to 1024 kbps\n\nI took a look at the code" + }, + { + "title": "Apply the fix for Rate limit download speed on pulling new models", + "description": "My linux box (i5) got reliably stuck *every* *single* *time* I pulled a model... so +1 for the `--rate-limit` feature.\n\nTwo solutions, that did help me limp on for now:\n1. As soon as I started the fetch, I used `iotop` to change the `ionice` priority (using `i`) to `idle`. That made the issue\n```yaml\npid=`ps -ef | grep \"ollama run\" | awk '{print $2}'`\nsudo ionice -c3 -p `ps -T -p $pid | awk '{print $2}' | grep -v SPID | tr '\\r\\n' ' '`\n```" + }, + { + "title": "Verify the feature works", + "description": "Test that the new capability is working as expected.\nConfirm the feature described in \"Rate limit download speed on pulling new models\" is functioning correctly." + } + ], + "resolution": { + "summary": "My linux box (i5) got reliably stuck *every* *single* *time* I pulled a model... so +1 for the `--rate-limit` feature.\n\nTwo solutions, that did help me limp on for now:\n1. As soon as I started the fetch, I used `iotop` to change the `ionice` priority (using `i`) to `idle`. That made the issue completely go away in that, although the downloads were still fast, the linux system was quite usable.", + "codeSnippets": [ + "pid=`ps -ef | grep \"ollama run\" | awk '{print $2}'`\nsudo ionice -c3 -p `ps -T -p $pid | awk '{print $2}' | grep -v SPID | tr '\\r\\n' ' '`", + "$ sudo ethtool -s eth0 autoneg on speed 10 duplex full", + "version: '3'\nservices:\n ollama:\n image: ollama/ollama\n container_name: ollama\n ports:\n - 11434:11434\n restart: unless-stopped\n labels:\n - \"com.docker-tc.enabled=1\"\n - \"com.docker-tc.limit=30mbit\"\n\n docker-tc:\n image: lukaszlach/docker-tc\n cap_add:\n - NET_ADMIN\n network_mode: host\n volumes:\n - /var/run/docker.sock:/var/run/docker.sock\n - /var/docker-tc:/var/docker-tc" + ] + } + }, + "metadata": { + "tags": [ + "ollama", + "community", + "llm-serving", + "feature" + ], + "cncfProjects": [ + "ollama" + ], + "targetResourceKinds": [], + "difficulty": "intermediate", + "issueTypes": [ + "feature" + ], + "maturity": "community", + "sourceUrls": { + "issue": "https://github.com/ollama/ollama/issues/2006", + "repo": "https://github.com/ollama/ollama" + }, + "reactions": 96, + "comments": 82, + "synthesizedBy": "copilot" + }, + "prerequisites": { + "tools": [ + "ollama" + ], + "description": "A working ollama installation or development environment." + }, + "security": { + "scannedAt": "2026-04-14T07:01:31.126Z", + "scannerVersion": "cncf-gen-3.0.0", + "sanitized": true, + "findings": [] + } +}