@@ -71,7 +71,7 @@ run() {
71
71
echo_color " Serve granite via RamaLama run"
72
72
exec_color " ramalama --dryrun run granite | grep --color podman"
73
73
echo " "
74
- exec_color " ramalama --dryrun run granite | grep --color quay.io.*latest "
74
+ exec_color " ramalama --dryrun run granite | grep --color \" quay.io[^ ]* \" "
75
75
echo " "
76
76
exec_color " ramalama --dryrun run granite | grep --color -- --cap-drop.*privileges"
77
77
echo " "
@@ -88,7 +88,7 @@ run() {
88
88
89
89
serve () {
90
90
echo_color " Serve granite via RamaLama model service"
91
- exec_color " ramalama serve --name granite-service -d granite"
91
+ exec_color " ramalama serve --port 8080 -- name granite-service -d granite"
92
92
echo " "
93
93
94
94
echo_color " List RamaLama containers"
@@ -99,6 +99,23 @@ serve() {
99
99
exec_color " podman ps "
100
100
echo " "
101
101
102
+ echo_color " Use web browser to show interaction"
103
+ exec_color " firefox http://localhost:8080"
104
+
105
+ echo_color " Stop the ramalama container"
106
+ exec_color " ramalama stop granite-service"
107
+ echo " "
108
+
109
+ echo_color " Serve granite via RamaLama model service"
110
+ exec_color " ramalama serve --port 8085 --api llama-stack --name granite-service -d granite"
111
+ echo " "
112
+
113
+ # echo_color "Use web browser to show interaction"
114
+ # exec_color "firefox http://localhost:8085"
115
+
116
+ # echo_color "Use web browser to show interaction"
117
+ # exec_color "firefox http://localhost:8085/v1/openai"
118
+
102
119
echo_color " Stop the ramalama container"
103
120
exec_color " ramalama stop granite-service"
104
121
echo " "
161
178
162
179
run
163
180
181
+ serve
182
+
164
183
kubernetes
165
184
166
185
quadlet
0 commit comments