1
1
#! /usr/bin/env bash
2
2
3
3
# Copyright (c) Meta Platforms, Inc. and affiliates.
4
- # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
4
+ # This software may be used and distributed according to the terms of the Llama 3.1 Community License Agreement.
5
5
6
6
set -e
7
7
8
8
read -p " Enter the URL from email: " PRESIGNED_URL
9
- echo " "
10
- read -p " Enter the list of models to download without spaces (7B,13B,70B,7B-chat,13B-chat,70B-chat), or press Enter for all: " MODEL_SIZE
9
+ ALL_MODELS_LIST=" meta-llama-3.1-405b,meta-llama-3.1-70b,meta-llama-3.1-8b,meta-llama-guard-3-8b,prompt-guard"
10
+ printf " \n **** Model list ***\n"
11
+ for MODEL in ${ALL_MODELS_LIST// ,/ }
12
+ do
13
+ printf " - ${MODEL} \n"
14
+ done
15
+ read -p " Choose the model to download: " SELECTED_MODEL
16
+ printf " \n Selected model: ${SELECTED_MODEL} \n"
17
+
18
+ SELECTED_MODELS=" "
19
+ if [[ $SELECTED_MODEL == " meta-llama-3.1-405b" ]]; then
20
+ MODEL_LIST=" meta-llama-3.1-405b-instruct-mp16,meta-llama-3.1-405b-instruct-mp8,meta-llama-3.1-405b-instruct-fb8,meta-llama-3.1-405b-mp16,meta-llama-3.1-405b-mp8,meta-llama-3.1-405b-fp8"
21
+ elif [[ $SELECTED_MODEL == " meta-llama-3.1-70b" ]]; then
22
+ MODEL_LIST=" meta-llama-3.1-70b-instruct,meta-llama-3.1-70b"
23
+ elif [[ $SELECTED_MODEL == " meta-llama-3.1-8b" ]]; then
24
+ MODEL_LIST=" meta-llama-3.1-8b-instruct,meta-llama-3.1-8b"
25
+ elif [[ $SELECTED_MODEL == " meta-llama-guard-3-8b" ]]; then
26
+ MODEL_LIST=" meta-llama-guard-3-8b-int8-hf,meta-llama-guard-3-8b"
27
+ elif [[ $SELECTED_MODEL == " prompt-guard" ]]; then
28
+ SELECTED_MODELS=" prompt-guard"
29
+ MODEL_LIST=" "
30
+ fi
31
+
32
+ if [[ -z " $SELECTED_MODELS " ]]; then
33
+ printf " \n **** Available models to download: ***\n"
34
+ for MODEL in ${MODEL_LIST// ,/ }
35
+ do
36
+ printf " - ${MODEL} \n"
37
+ done
38
+ read -p " Enter the list of models to download without spaces or press Enter for all: " SELECTED_MODELS
39
+ fi
40
+
11
41
TARGET_FOLDER=" ." # where all files should end up
12
42
mkdir -p ${TARGET_FOLDER}
13
43
14
- if [[ $MODEL_SIZE == " " ]]; then
15
- MODEL_SIZE= " 7B,13B,70B,7B-chat,13B-chat,70B-chat "
44
+ if [[ $SELECTED_MODELS == " " ]]; then
45
+ SELECTED_MODELS= ${MODEL_LIST}
16
46
fi
17
47
18
- echo " Downloading LICENSE and Acceptable Usage Policy"
48
+ if [[ $SELECTED_MODEL == " meta-llama-3.1-405b" ]]; then
49
+ printf " \nModel requires significant storage and computational resources, occupying approximately 750GB of disk storage space and necessitating two nodes on MP16 for inferencing.\n"
50
+ read -p " Enter Y to continue: " ACK
51
+ if [[ $ACK != ' Y' ]]; then
52
+ printf " Exiting..."
53
+ exit 1
54
+ fi
55
+ fi
56
+
57
+ printf " Downloading LICENSE and Acceptable Usage Policy\n"
19
58
wget --continue ${PRESIGNED_URL/ ' *' / " LICENSE" } -O ${TARGET_FOLDER} " /LICENSE"
20
59
wget --continue ${PRESIGNED_URL/ ' *' / " USE_POLICY.md" } -O ${TARGET_FOLDER} " /USE_POLICY.md"
21
60
22
- echo " Downloading tokenizer"
23
- wget --continue ${PRESIGNED_URL/ ' *' / " tokenizer.model" } -O ${TARGET_FOLDER} " /tokenizer.model"
24
- wget --continue ${PRESIGNED_URL/ ' *' / " tokenizer_checklist.chk" } -O ${TARGET_FOLDER} " /tokenizer_checklist.chk"
25
- CPU_ARCH=$( uname -m)
26
- if [ " $CPU_ARCH " = " arm64" ]; then
27
- (cd ${TARGET_FOLDER} && md5 tokenizer_checklist.chk)
28
- else
29
- (cd ${TARGET_FOLDER} && md5sum -c tokenizer_checklist.chk)
30
- fi
31
-
32
- for m in ${MODEL_SIZE// ,/ }
61
+ for m in ${SELECTED_MODELS// ,/ }
33
62
do
34
- if [[ $m == " 7B" ]]; then
35
- SHARD=0
36
- MODEL_PATH=" llama-2-7b"
37
- elif [[ $m == " 7B-chat" ]]; then
38
- SHARD=0
39
- MODEL_PATH=" llama-2-7b-chat"
40
- elif [[ $m == " 13B" ]]; then
41
- SHARD=1
42
- MODEL_PATH=" llama-2-13b"
43
- elif [[ $m == " 13B-chat" ]]; then
44
- SHARD=1
45
- MODEL_PATH=" llama-2-13b-chat"
46
- elif [[ $m == " 70B" ]]; then
47
- SHARD=7
48
- MODEL_PATH=" llama-2-70b"
49
- elif [[ $m == " 70B-chat" ]]; then
50
- SHARD=7
51
- MODEL_PATH=" llama-2-70b-chat"
63
+
64
+ ADDITIONAL_FILES=" "
65
+ TOKENIZER_MODEL=1
66
+ if [[ $m == " meta-llama-3.1-405b-instruct-mp16" ]]; then
67
+ PTH_FILE_COUNT=15
68
+ MODEL_PATH=" Meta-Llama-3.1-405B-Instruct-MP16"
69
+ elif [[ $m == " meta-llama-3.1-405b-instruct-mp8" ]]; then
70
+ PTH_FILE_COUNT=7
71
+ MODEL_PATH=" Meta-Llama-3.1-405B-Instruct-MP8"
72
+ elif [[ $m == " meta-llama-3.1-405b-instruct-fp8" ]]; then
73
+ PTH_FILE_COUNT=7
74
+ MODEL_PATH=" Meta-Llama-3.1-405B-Instruct"
75
+ ADDITIONAL_FILES=" fp8_scales_0.pt,fp8_scales_1.pt,fp8_scales_2.pt,fp8_scales_3.pt,fp8_scales_4.pt,fp8_scales_5.pt,fp8_scales_6.pt,fp8_scales_7.pt"
76
+ elif [[ $m == " meta-llama-3.1-405b-mp16" ]]; then
77
+ PTH_FILE_COUNT=15
78
+ MODEL_PATH=" Meta-Llama-3.1-405B-MP16"
79
+ elif [[ $m == " meta-llama-3.1-405b-mp8" ]]; then
80
+ PTH_FILE_COUNT=7
81
+ MODEL_PATH=" Meta-Llama-3.1-405B-MP8"
82
+ elif [[ $m == " meta-llama-3.1-405b-fp8" ]]; then
83
+ PTH_FILE_COUNT=7
84
+ MODEL_PATH=" Meta-Llama-3.1-405B"
85
+ elif [[ $m == " meta-llama-3.1-70b-instruct" ]]; then
86
+ PTH_FILE_COUNT=7
87
+ MODEL_PATH=" Meta-Llama-3.1-70B-Instruct"
88
+ elif [[ $m == " meta-llama-3.1-70b" ]]; then
89
+ PTH_FILE_COUNT=7
90
+ MODEL_PATH=" Meta-Llama-3.1-70B"
91
+ elif [[ $m == " meta-llama-3.1-8b-instruct" ]]; then
92
+ PTH_FILE_COUNT=0
93
+ MODEL_PATH=" Meta-Llama-3.1-8B-Instruct"
94
+ elif [[ $m == " meta-llama-3.1-8b" ]]; then
95
+ PTH_FILE_COUNT=0
96
+ MODEL_PATH=" Meta-Llama-3.1-8B"
97
+ elif [[ $m == " meta-llama-guard-3-8b-int8-hf" ]]; then
98
+ PTH_FILE_COUNT=-1
99
+ MODEL_PATH=" Meta-Llama-Guard-3-8B-INT8-HF"
100
+ ADDITIONAL_FILES=" generation_config.json,model-00001-of-00002.safetensors,model-00002-of-00002.safetensors,model.safetensors.index.json,special_tokens_map.json,tokenizer_config.json,tokenizer.json"
101
+ TOKENIZER_MODEL=0
102
+ elif [[ $m == " meta-llama-guard-3-8b" ]]; then
103
+ PTH_FILE_COUNT=0
104
+ MODEL_PATH=" Meta-Llama-Guard-3-8B"
105
+ elif [[ $m == " prompt-guard" ]]; then
106
+ PTH_FILE_COUNT=-1
107
+ MODEL_PATH=" Prompt-Guard"
108
+ ADDITIONAL_FILES=" model.safetensors,special_tokens_map.json,tokenizer_config.json,tokenizer.json"
109
+ TOKENIZER_MODEL=0
52
110
fi
53
111
54
- echo " Downloading ${MODEL_PATH} "
112
+ printf " \n*** Downloading ${MODEL_PATH} ***\n "
55
113
mkdir -p ${TARGET_FOLDER} " /${MODEL_PATH} "
56
114
57
- for s in $( seq -f " 0%g" 0 ${SHARD} )
115
+ if [[ $TOKENIZER_MODEL == 1 ]]; then
116
+ printf " Downloading tokenizer\n"
117
+ wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /tokenizer.model" } -O ${TARGET_FOLDER} " /${MODEL_PATH} /tokenizer.model"
118
+ fi
119
+
120
+
121
+ if [[ $PTH_FILE_COUNT -ge 0 ]]; then
122
+ for s in $( seq -f " 0%g" 0 ${PTH_FILE_COUNT} )
123
+ do
124
+ printf " Downloading consolidated.${s} .pth\n"
125
+ wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /consolidated.${s} .pth" } -O ${TARGET_FOLDER} " /${MODEL_PATH} /consolidated.${s} .pth"
126
+ done
127
+ fi
128
+
129
+ for ADDITIONAL_FILE in ${ADDITIONAL_FILES// ,/ }
58
130
do
59
- wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /consolidated.${s} .pth" } -O ${TARGET_FOLDER} " /${MODEL_PATH} /consolidated.${s} .pth"
131
+ printf " Downloading $ADDITIONAL_FILE ...\n"
132
+ wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /${ADDITIONAL_FILE} " } -O ${TARGET_FOLDER} " /${MODEL_PATH} /${ADDITIONAL_FILE} "
60
133
done
61
134
62
- wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /params.json" } -O ${TARGET_FOLDER} " /${MODEL_PATH} /params.json"
63
- wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /checklist.chk" } -O ${TARGET_FOLDER} " /${MODEL_PATH} /checklist.chk"
64
- echo " Checking checksums"
65
- if [ " $CPU_ARCH " = " arm64" ]; then
66
- (cd ${TARGET_FOLDER} " /${MODEL_PATH} " && md5 checklist.chk)
67
- else
68
- (cd ${TARGET_FOLDER} " /${MODEL_PATH} " && md5sum -c checklist.chk)
135
+ if [[ $m != " prompt-guard" && $m != " meta-llama-guard-3-8b-int8-hf" ]]; then
136
+ printf " Downloading params.json...\n"
137
+ wget --continue ${PRESIGNED_URL/ ' *' / " ${MODEL_PATH} /params.json" } -O ${TARGET_FOLDER} " /${MODEL_PATH} /params.json"
69
138
fi
70
- done
139
+ done
0 commit comments