-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy path05-install-ai.sh
More file actions
executable file
·78 lines (71 loc) · 2.16 KB
/
05-install-ai.sh
File metadata and controls
executable file
·78 lines (71 loc) · 2.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
#!/bin/bash
set -e
echo "--> Configuring AI..."
useradd -r -s /bin/false -m -d /usr/share/ollama ollama
chown -R ollama:ollama /usr/share/ollama
mkdir -p /usr/local/share/lumin/ai
# Modelfile
MODELFILE="/usr/local/share/lumin/ai/Modelfile"
echo "FROM llama3" > "${MODELFILE}"
echo 'SYSTEM """You are Lumin, the integrated assistant for the LuminOS operating system. You are calm, clear, kind, and respectful."""' >> "${MODELFILE}"
chown root:root "${MODELFILE}"
chmod 444 "${MODELFILE}"
# Reassemble Script
cat > /usr/local/bin/luminos-reassemble.sh << "EOF"
#!/bin/bash
# Find files marked as split
while IFS= read -r -d '' marker; do
ORIG_FILE="${marker%.is_split}"
if [ ! -f "$ORIG_FILE" ]; then
echo "Reassembling $ORIG_FILE..."
# Combine parts .partaa, .partab...
cat "${ORIG_FILE}.part"* > "$ORIG_FILE"
chown ollama:ollama "$ORIG_FILE"
fi
done < <(find /usr/share/ollama/.ollama -name "*.is_split" -print0)
EOF
chmod +x /usr/local/bin/luminos-reassemble.sh
# Reassemble Service
cat > /etc/systemd/system/lumin-reassemble.service << "SERVICE"
[Unit]
Description=Reassemble AI Models
Before=ollama.service
[Service]
Type=oneshot
ExecStart=/usr/local/bin/luminos-reassemble.sh
[Install]
WantedBy=multi-user.target
SERVICE
# Ollama Service
cat > /etc/systemd/system/ollama.service << "SERVICE"
[Unit]
Description=Ollama API Server
After=network-online.target lumin-reassemble.service
[Service]
ExecStart=/usr/local/bin/ollama serve
User=ollama
Group=ollama
Restart=always
Environment="OLLAMA_HOST=0.0.0.0"
Environment="OLLAMA_MODELS=/usr/share/ollama/.ollama/models"
[Install]
WantedBy=default.target
SERVICE
# Lumin Setup Service
cat > /etc/systemd/system/lumin-setup.service << "SERVICE"
[Unit]
Description=Init Lumin AI
After=ollama.service
Requires=ollama.service
ConditionPathExists=!/var/lib/lumin-setup-done
[Service]
Type=oneshot
ExecStartPre=/bin/sleep 10
ExecStart=/usr/local/bin/ollama create lumin -f /usr/local/share/lumin/ai/Modelfile
ExecStartPost=/usr/bin/touch /var/lib/lumin-setup-done
[Install]
WantedBy=multi-user.target
SERVICE
systemctl enable lumin-reassemble.service
systemctl enable ollama.service
systemctl enable lumin-setup.service