Visual Studio Code Editor
sudo apt install fonts-firacode
paru -S ttf-fira-code
{
"editor.fontSize": 16,
"editor.fontFamily": "Fira Code",
"editor.fontLigatures": true,
"window.zoomLevel": 1.5,
"editor.tabSize": 4,
"editor.insertSpaces": true,
"python.formatting.provider": "black"
}
code --install-extension Angular.ng-template code --install-extension alefragnani.Bookmarks code --install-extension dbaeumer.vscode-eslint code --install-extension donjayamanne.githistory code --install-extension doublefint.pgsql code --install-extension eg2.tslint code --install-extension ms-python.python
on vscodium enable marketplace and install dev-containers extension
yay -S vscodium-bin-marketplace
first time remote containers attach:
{
"workspaceFolder": "/app/mev_query",
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance"
],
"settings": {
"python.defaultInterpreterPath": "/usr/local/bin/python",
"python.formatting.provider": "black",
"python.linting.enabled": true,
"python.linting.pylintEnabled": true,
"python.linting.mypyEnabled": true,
"editor.insertSpaces": true,
"editor.tabSize": 4,
"editor.detectIndentation": false,
"editor.formatOnSave": false,
"[python]": {
"editor.formatOnSave": true
},
"python.testing.pytestPath": "pytest",
"python.testing.pytestEnabled": true
}
}
remote containers attach:
edit created configurations:
{
"workspaceFolder": "/app",
"extensions": [
"christian-kohler.npm-intellisense",
"xabikos.JavaScriptSnippets",
"dbaeumer.vscode-eslint"
],
"settings": {
}
}
For debugger create in app folder (for example dnnutils) file .vscode/launch.json. From vscode attached to docker container is now enabled debugger from 'Run and Debug' view
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: Attach",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 5678
}
}
]
}
[
{ "key": "ctrl+s", "command": "workbench.action.files.saveAll" },
{ "key": "alt+left", "command": "workbench.action.navigateBack" },
{ "key": "alt+right", "command": "workbench.action.navigateForward" }
]
models: - https://ollama.com/hoangquan456/qwen3-nothink
install ollama
yay -S ollama
sudo systemctl enable ollama
sudo systemctl start ollama
ollama pull hoangquan456/qwen3-nothink:1.7b
ollama pull nomic-embed-text:latest
ollama pull llama3.2
# mimo and a alias
ollama pull hf.co/jedisct1/MiMo-7B-RL-GGUF:Q4_K_M
ollama cp hf.co/jedisct1/MiMo-7B-RL-GGUF:Q4_K_M mimo
# vision
ollama pull ZimaBlueAI/MiniCPM-o-2_6
void --install-extension ms-python.python@2025.6.1
use pyright instead of pylance as python language server
disable copilot github: CTRL+SHIFT+p and search for “Chat: hide AI …”
qdrant vectore store
services:
qdrant:
image: qdrant/qdrant
ports:
- '6333:6333'
volumes:
- qdrant_storage:/qdrant/storage
restart: unless-stopped
volumes:
qdrant_storage:
disable copilot github: CTRL+SHIFT+p and search for “Chat: hide AI …”
install continue.dev extension
name: Local Assistant version: 1.0.0 schema: v1 models: - name: Qwen3-coder provider: ollama apiBase: http://10.1.109.10:11434 model: hoangquan456/qwen3-nothink:1.7b roles: - edit - apply - autocomplete - chat - name: Nomic Embed provider: ollama apiBase: http://10.1.109.10:11434 model: nomic-embed-text:latest roles: - embed - name: Autodetect provider: ollama model: AUTODETECT system_message: "You are an AI assistant running locally on an 8GB GPU. Keep responses concise and efficient." context_providers: - name: "file" # Current file context params: max_chars: 2000 # Prevent overload - name: "terminal" # Shell command output params: max_lines: 50 - name: "diff" # Git/svn changes params: max_chars: 1000 - name: "github" # PRs/issues (needs auth) params: repo: "your/repo" # Optional filtering - name: "search" # Codebase search params: max_results: 3 - name: "url" # Webpage context params: max_chars: 1500 - name: "open" # Recently opened files params: max_files: 5