projects:zibaldone:vscode

Differences

This shows you the differences between two versions of the page.

Link to this comparison view

Both sides previous revision Previous revision
Next revision
Previous revision
projects:zibaldone:vscode [2025/04/15 17:53] sscipioniprojects:zibaldone:vscode [2025/09/13 21:44] (current) sscipioni
Line 155: Line 155:
  
  
-===== continue.dev =====+===== ollama ===== 
 + 
 +models: 
 +- https://ollama.com/hoangquan456/qwen3-nothink
  
 install ollama install ollama
Line 164: Line 167:
 sudo systemctl start ollama sudo systemctl start ollama
  
 +ollama pull hoangquan456/qwen3-nothink:1.7b
 ollama pull nomic-embed-text:latest ollama pull nomic-embed-text:latest
-ollama pull qwen2.5-coder:1.5b-base +ollama pull llama3.2 
-ollama pull llama3.1:8b+ 
 + 
 +# mimo and a alias 
 +ollama pull hf.co/jedisct1/MiMo-7B-RL-GGUF:Q4_K_M 
 +ollama cp hf.co/jedisct1/MiMo-7B-RL-GGUF:Q4_K_M mimo 
 + 
 +# vision 
 +ollama pull ZimaBlueAI/MiniCPM-o-2_6
 </code> </code>
  
-disable copilot github CTRL+SHIFT+p+===== void ===== 
 + 
 +<code> 
 +void --install-extension ms-python.python@2025.6.1 
 +</code> 
 + 
 +use pyright instead of pylance as python language server 
 + 
 +===== kilo code ===== 
 + 
 +disable copilot githubCTRL+SHIFT+p and search for "Chat: hide AI ..." 
 + 
 + 
 +qdrant vectore store 
 +<code | docker-compose.yaml> 
 +services: 
 +  qdrant: 
 +    image: qdrant/qdrant 
 +    ports: 
 +      - '6333:6333' 
 +    volumes: 
 +      - qdrant_storage:/qdrant/storage 
 +    restart: unless-stopped 
 +volumes: 
 +  qdrant_storage: 
 +</code> 
 +===== continue.dev ===== 
 + 
 + 
 + 
 +disable copilot github: CTRL+SHIFT+p and search for "Chat: hide AI ..."
  
 install continue.dev extension install continue.dev extension
  
  
 +<code | ~/.continue/config.yaml>
 +name: Local Assistant
 +version: 1.0.0
 +schema: v1
 +models:
 +  - name: Qwen3-coder
 +    provider: ollama
 +    apiBase: http://10.1.109.10:11434
 +    model: hoangquan456/qwen3-nothink:1.7b
 +    roles:
 +      - edit
 +      - apply
 +      - autocomplete
 +      - chat
 +  - name: Nomic Embed
 +    provider: ollama
 +    apiBase: http://10.1.109.10:11434
 +    model: nomic-embed-text:latest
 +    roles:
 +      - embed
 +  - name: Autodetect
 +    provider: ollama
 +    model: AUTODETECT
 +system_message: "You are an AI assistant running locally on an 8GB GPU. Keep responses concise and efficient."
 +context_providers:
 +  - name: "file"  # Current file context
 +    params:
 +      max_chars: 2000  # Prevent overload
 +  - name: "terminal"  # Shell command output
 +    params:
 +      max_lines: 50
 +  - name: "diff"  # Git/svn changes
 +    params:
 +      max_chars: 1000
 +  - name: "github"  # PRs/issues (needs auth)
 +    params:
 +      repo: "your/repo"  # Optional filtering
 +  - name: "search"  # Codebase search
 +    params:
 +      max_results: 3
 +  - name: "url"  # Webpage context
 +    params:
 +      max_chars: 1500
 +  - name: "open"  # Recently opened files
 +    params:
 +      max_files: 5
 +</code>
  
  • projects/zibaldone/vscode.1744732431.txt.gz
  • Last modified: 2025/04/15 17:53
  • by sscipioni