-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathstart_backend.bat
More file actions
68 lines (59 loc) · 1.69 KB
/
start_backend.bat
File metadata and controls
68 lines (59 loc) · 1.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
@echo off
title AuraVest Backend Server
echo.
echo ====================================
echo 🚀 Starting AuraVest Backend...
echo ====================================
echo.
REM Check if Python is available
python --version >nul 2>&1
if errorlevel 1 (
echo ❌ Python not found. Please install Python 3.10+ from https://python.org/
pause
exit /b 1
)
REM Show Python version
echo 🐍 Python version:
python --version
REM Check if main.py exists
if not exist "main.py" (
echo ❌ main.py not found. Are you in the correct directory?
pause
exit /b 1
)
REM Check for NVIDIA GPU
nvidia-smi >nul 2>&1
if not errorlevel 1 (
echo 🚀 NVIDIA GPU detected!
nvidia-smi --query-gpu=name,memory.total --format=csv,noheader,nounits
) else (
echo 💻 No NVIDIA GPU detected (CPU mode)
)
REM Check if Ollama is available
ollama --version >nul 2>&1
if not errorlevel 1 (
echo 🤖 Ollama available
ollama list | findstr "llama3.1:8b-instruct-q4_K_M" >nul
if not errorlevel 1 (
echo ✅ Llama 3.1 model available
) else (
echo ⚠️ Llama 3.1 model not found. AI features may not work.
echo Run: ollama pull llama3.1:8b-instruct-q4_K_M
)
) else (
echo ⚠️ Ollama not installed. AI features will be disabled.
echo Download from: https://ollama.ai/download/windows
)
echo.
echo ====================================
echo 🌟 Starting AuraVest Backend Server...
echo 📊 API Documentation: http://localhost:8000/docs
echo 🔍 Health Check: http://localhost:8000/health
echo 🛑 Press Ctrl+C to stop
echo ====================================
echo.
REM Start the FastAPI server
python main.py
echo.
echo Backend server stopped.
pause