pandaall commited on
Commit
9f18d55
·
verified ·
1 Parent(s): 10a1fd2

Upload hfd.sh

Browse files
Files changed (1) hide show
  1. hfd.sh +328 -0
hfd.sh ADDED
@@ -0,0 +1,328 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ # Color definitions
3
+ RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; NC='\033[0m' # No Color
4
+
5
+ trap 'printf "${YELLOW}\nDownload interrupted. You can resume by re-running the command.\n${NC}"; exit 1' INT
6
+
7
+ display_help() {
8
+ cat << EOF
9
+ Usage:
10
+ hfd <REPO_ID> [--include include_pattern1 include_pattern2 ...] [--exclude exclude_pattern1 exclude_pattern2 ...] [--hf_username username] [--hf_token token] [--tool aria2c|wget] [-x threads] [-j jobs] [--dataset] [--local-dir path] [--revision rev]
11
+
12
+ Description:
13
+ Downloads a model or dataset from Hugging Face using the provided repo ID.
14
+
15
+ Arguments:
16
+ REPO_ID The Hugging Face repo ID (Required)
17
+ Format: 'org_name/repo_name' or legacy format (e.g., gpt2)
18
+ Options:
19
+ include/exclude_pattern The patterns to match against file path, supports wildcard characters.
20
+ e.g., '--exclude *.safetensor *.md', '--include vae/*'.
21
+ --include (Optional) Patterns to include files for downloading (supports multiple patterns).
22
+ --exclude (Optional) Patterns to exclude files from downloading (supports multiple patterns).
23
+ --hf_username (Optional) Hugging Face username for authentication (not email).
24
+ --hf_token (Optional) Hugging Face token for authentication.
25
+ --tool (Optional) Download tool to use: aria2c (default) or wget.
26
+ -x (Optional) Number of download threads for aria2c (default: 4).
27
+ -j (Optional) Number of concurrent downloads for aria2c (default: 5).
28
+ --dataset (Optional) Flag to indicate downloading a dataset.
29
+ --local-dir (Optional) Directory path to store the downloaded data.
30
+ Defaults to the current directory with a subdirectory named 'repo_name'
31
+ if REPO_ID is is composed of 'org_name/repo_name'.
32
+ --revision (Optional) Model/Dataset revision to download (default: main).
33
+
34
+ Example:
35
+ hfd gpt2
36
+ hfd bigscience/bloom-560m --exclude *.safetensors
37
+ hfd meta-llama/Llama-2-7b --hf_username myuser --hf_token mytoken -x 4
38
+ hfd lavita/medical-qa-shared-task-v1-toy --dataset
39
+ hfd bartowski/Phi-3.5-mini-instruct-exl2 --revision 5_0
40
+ EOF
41
+ exit 1
42
+ }
43
+
44
+ [[ -z "$1" || "$1" =~ ^-h || "$1" =~ ^--help ]] && display_help
45
+
46
+ REPO_ID=$1
47
+ shift
48
+
49
+ # Default values
50
+ TOOL="aria2c"
51
+ THREADS=4
52
+ CONCURRENT=5
53
+ HF_ENDPOINT=${HF_ENDPOINT:-"https://huggingface.co"}
54
+ INCLUDE_PATTERNS=()
55
+ EXCLUDE_PATTERNS=()
56
+ REVISION="main"
57
+
58
+ validate_number() {
59
+ [[ "$2" =~ ^[1-9][0-9]*$ && "$2" -le "$3" ]] || { printf "${RED}[Error] $1 must be 1-$3${NC}\n"; exit 1; }
60
+ }
61
+
62
+ # Argument parsing
63
+ while [[ $# -gt 0 ]]; do
64
+ case $1 in
65
+ --include) shift; while [[ $# -gt 0 && ! ($1 =~ ^--) && ! ($1 =~ ^-[^-]) ]]; do INCLUDE_PATTERNS+=("$1"); shift; done ;;
66
+ --exclude) shift; while [[ $# -gt 0 && ! ($1 =~ ^--) && ! ($1 =~ ^-[^-]) ]]; do EXCLUDE_PATTERNS+=("$1"); shift; done ;;
67
+ --hf_username) HF_USERNAME="$2"; shift 2 ;;
68
+ --hf_token) HF_TOKEN="$2"; shift 2 ;;
69
+ --tool)
70
+ case $2 in
71
+ aria2c|wget)
72
+ TOOL="$2"
73
+ ;;
74
+ *)
75
+ printf "%b[Error] Invalid tool. Use 'aria2c' or 'wget'.%b\n" "$RED" "$NC"
76
+ exit 1
77
+ ;;
78
+ esac
79
+ shift 2
80
+ ;;
81
+ -x) validate_number "threads (-x)" "$2" 10; THREADS="$2"; shift 2 ;;
82
+ -j) validate_number "concurrent downloads (-j)" "$2" 10; CONCURRENT="$2"; shift 2 ;;
83
+ --dataset) DATASET=1; shift ;;
84
+ --local-dir) LOCAL_DIR="$2"; shift 2 ;;
85
+ --revision) REVISION="$2"; shift 2 ;;
86
+ *) display_help ;;
87
+ esac
88
+ done
89
+
90
+ # Generate current command string
91
+ generate_command_string() {
92
+ local cmd_string="REPO_ID=$REPO_ID"
93
+ cmd_string+=" TOOL=$TOOL"
94
+ cmd_string+=" INCLUDE_PATTERNS=${INCLUDE_PATTERNS[*]}"
95
+ cmd_string+=" EXCLUDE_PATTERNS=${EXCLUDE_PATTERNS[*]}"
96
+ cmd_string+=" DATASET=${DATASET:-0}"
97
+ cmd_string+=" HF_USERNAME=${HF_USERNAME:-}"
98
+ cmd_string+=" HF_TOKEN=${HF_TOKEN:-}"
99
+ cmd_string+=" HF_TOKEN=${HF_ENDPOINT:-}"
100
+ cmd_string+=" REVISION=$REVISION"
101
+ echo "$cmd_string"
102
+ }
103
+
104
+ # Check if aria2, wget, curl are installed
105
+ check_command() {
106
+ if ! command -v $1 &>/dev/null; then
107
+ printf "%b%s is not installed. Please install it first.%b\n" "$RED" "$1" "$NC"
108
+ exit 1
109
+ fi
110
+ }
111
+
112
+ check_command curl; check_command "$TOOL"
113
+
114
+ LOCAL_DIR="${LOCAL_DIR:-${REPO_ID#*/}}"
115
+ mkdir -p "$LOCAL_DIR/.hfd"
116
+
117
+ if [[ "$DATASET" == 1 ]]; then
118
+ METADATA_API_PATH="datasets/$REPO_ID"
119
+ DOWNLOAD_API_PATH="datasets/$REPO_ID"
120
+ CUT_DIRS=5
121
+ else
122
+ METADATA_API_PATH="models/$REPO_ID"
123
+ DOWNLOAD_API_PATH="$REPO_ID"
124
+ CUT_DIRS=4
125
+ fi
126
+
127
+ # Modify API URL, construct based on revision
128
+ if [[ "$REVISION" != "main" ]]; then
129
+ METADATA_API_PATH="$METADATA_API_PATH/revision/$REVISION"
130
+ fi
131
+ API_URL="$HF_ENDPOINT/api/$METADATA_API_PATH"
132
+
133
+ METADATA_FILE="$LOCAL_DIR/.hfd/repo_metadata.json"
134
+
135
+ # Fetch and save metadata
136
+ fetch_and_save_metadata() {
137
+ status_code=$(curl -L -s -w "%{http_code}" -o "$METADATA_FILE" ${HF_TOKEN:+-H "Authorization: Bearer $HF_TOKEN"} "$API_URL")
138
+ RESPONSE=$(cat "$METADATA_FILE")
139
+ if [ "$status_code" -eq 200 ]; then
140
+ printf "%s\n" "$RESPONSE"
141
+ else
142
+ printf "%b[Error] Failed to fetch metadata from $API_URL. HTTP status code: $status_code.%b\n$RESPONSE\n" "${RED}" "${NC}" >&2
143
+ rm $METADATA_FILE
144
+ exit 1
145
+ fi
146
+ }
147
+
148
+ check_authentication() {
149
+ local response="$1"
150
+ if command -v jq &>/dev/null; then
151
+ local gated
152
+ gated=$(echo "$response" | jq -r '.gated // false')
153
+ if [[ "$gated" != "false" && ( -z "$HF_TOKEN" || -z "$HF_USERNAME" ) ]]; then
154
+ printf "${RED}The repository requires authentication, but --hf_username and --hf_token is not passed. Please get token from https://huggingface.co/settings/tokens.\nExiting.\n${NC}"
155
+ exit 1
156
+ fi
157
+ else
158
+ if echo "$response" | grep -q '"gated":[^f]' && [[ -z "$HF_TOKEN" || -z "$HF_USERNAME" ]]; then
159
+ printf "${RED}The repository requires authentication, but --hf_username and --hf_token is not passed. Please get token from https://huggingface.co/settings/tokens.\nExiting.\n${NC}"
160
+ exit 1
161
+ fi
162
+ fi
163
+ }
164
+
165
+ if [[ ! -f "$METADATA_FILE" ]]; then
166
+ printf "%bFetching repo metadata...%b\n" "$YELLOW" "$NC"
167
+ RESPONSE=$(fetch_and_save_metadata) || exit 1
168
+ check_authentication "$RESPONSE"
169
+ else
170
+ printf "%bUsing cached metadata: $METADATA_FILE%b\n" "$GREEN" "$NC"
171
+ RESPONSE=$(cat "$METADATA_FILE")
172
+ check_authentication "$RESPONSE"
173
+ fi
174
+
175
+ should_regenerate_filelist() {
176
+ local command_file="$LOCAL_DIR/.hfd/last_download_command"
177
+ local current_command=$(generate_command_string)
178
+
179
+ # If file list doesn't exist, regenerate
180
+ if [[ ! -f "$LOCAL_DIR/$fileslist_file" ]]; then
181
+ echo "$current_command" > "$command_file"
182
+ return 0
183
+ fi
184
+
185
+ # If command file doesn't exist, regenerate
186
+ if [[ ! -f "$command_file" ]]; then
187
+ echo "$current_command" > "$command_file"
188
+ return 0
189
+ fi
190
+
191
+ # Compare current command with saved command
192
+ local saved_command=$(cat "$command_file")
193
+ if [[ "$current_command" != "$saved_command" ]]; then
194
+ echo "$current_command" > "$command_file"
195
+ return 0
196
+ fi
197
+
198
+ return 1
199
+ }
200
+
201
+ fileslist_file=".hfd/${TOOL}_urls.txt"
202
+
203
+ if should_regenerate_filelist; then
204
+ # Remove existing file list if it exists
205
+ [[ -f "$LOCAL_DIR/$fileslist_file" ]] && rm "$LOCAL_DIR/$fileslist_file"
206
+
207
+ printf "%bGenerating file list...%b\n" "$YELLOW" "$NC"
208
+
209
+ # Convert include and exclude patterns to regex
210
+ INCLUDE_REGEX=""
211
+ EXCLUDE_REGEX=""
212
+ if ((${#INCLUDE_PATTERNS[@]})); then
213
+ INCLUDE_REGEX=$(printf '%s\n' "${INCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
214
+ fi
215
+ if ((${#EXCLUDE_PATTERNS[@]})); then
216
+ EXCLUDE_REGEX=$(printf '%s\n' "${EXCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
217
+ fi
218
+
219
+ # Check if jq is available
220
+ if command -v jq &>/dev/null; then
221
+ process_with_jq() {
222
+ if [[ "$TOOL" == "aria2c" ]]; then
223
+ printf "%s" "$RESPONSE" | jq -r \
224
+ --arg endpoint "$HF_ENDPOINT" \
225
+ --arg repo_id "$DOWNLOAD_API_PATH" \
226
+ --arg token "$HF_TOKEN" \
227
+ --arg include_regex "$INCLUDE_REGEX" \
228
+ --arg exclude_regex "$EXCLUDE_REGEX" \
229
+ --arg revision "$REVISION" \
230
+ '
231
+ .siblings[]
232
+ | select(
233
+ .rfilename != null
234
+ and ($include_regex == "" or (.rfilename | test($include_regex)))
235
+ and ($exclude_regex == "" or (.rfilename | test($exclude_regex) | not))
236
+ )
237
+ | [
238
+ ($endpoint + "/" + $repo_id + "/resolve/" + $revision + "/" + .rfilename),
239
+ " dir=" + (.rfilename | split("/")[:-1] | join("/")),
240
+ " out=" + (.rfilename | split("/")[-1]),
241
+ if $token != "" then " header=Authorization: Bearer " + $token else empty end,
242
+ ""
243
+ ]
244
+ | join("\n")
245
+ '
246
+ else
247
+ printf "%s" "$RESPONSE" | jq -r \
248
+ --arg endpoint "$HF_ENDPOINT" \
249
+ --arg repo_id "$DOWNLOAD_API_PATH" \
250
+ --arg include_regex "$INCLUDE_REGEX" \
251
+ --arg exclude_regex "$EXCLUDE_REGEX" \
252
+ --arg revision "$REVISION" \
253
+ '
254
+ .siblings[]
255
+ | select(
256
+ .rfilename != null
257
+ and ($include_regex == "" or (.rfilename | test($include_regex)))
258
+ and ($exclude_regex == "" or (.rfilename | test($exclude_regex) | not))
259
+ )
260
+ | ($endpoint + "/" + $repo_id + "/resolve/" + $revision + "/" + .rfilename)
261
+ '
262
+ fi
263
+ }
264
+ result=$(process_with_jq)
265
+ printf "%s\n" "$result" > "$LOCAL_DIR/$fileslist_file"
266
+ else
267
+ printf "%b[Warning] jq not installed, using grep/awk for metadata json parsing (slower). Consider installing jq for better parsing performance.%b\n" "$YELLOW" "$NC"
268
+ process_with_grep_awk() {
269
+ local include_pattern=""
270
+ local exclude_pattern=""
271
+ local output=""
272
+
273
+ if ((${#INCLUDE_PATTERNS[@]})); then
274
+ include_pattern=$(printf '%s\n' "${INCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
275
+ fi
276
+ if ((${#EXCLUDE_PATTERNS[@]})); then
277
+ exclude_pattern=$(printf '%s\n' "${EXCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
278
+ fi
279
+
280
+ local files=$(printf '%s' "$RESPONSE" | grep -o '"rfilename":"[^"]*"' | awk -F'"' '{print $4}')
281
+
282
+ if [[ -n "$include_pattern" ]]; then
283
+ files=$(printf '%s\n' "$files" | grep -E "$include_pattern")
284
+ fi
285
+ if [[ -n "$exclude_pattern" ]]; then
286
+ files=$(printf '%s\n' "$files" | grep -vE "$exclude_pattern")
287
+ fi
288
+
289
+ while IFS= read -r file; do
290
+ if [[ -n "$file" ]]; then
291
+ if [[ "$TOOL" == "aria2c" ]]; then
292
+ output+="$HF_ENDPOINT/$DOWNLOAD_API_PATH/resolve/$REVISION/$file"$'\n'
293
+ output+=" dir=$(dirname "$file")"$'\n'
294
+ output+=" out=$(basename "$file")"$'\n'
295
+ [[ -n "$HF_TOKEN" ]] && output+=" header=Authorization: Bearer $HF_TOKEN"$'\n'
296
+ output+=$'\n'
297
+ else
298
+ output+="$HF_ENDPOINT/$DOWNLOAD_API_PATH/resolve/$REVISION/$file"$'\n'
299
+ fi
300
+ fi
301
+ done <<< "$files"
302
+
303
+ printf '%s' "$output"
304
+ }
305
+
306
+ result=$(process_with_grep_awk)
307
+ printf "%s\n" "$result" > "$LOCAL_DIR/$fileslist_file"
308
+ fi
309
+ else
310
+ printf "%bResume from file list: $LOCAL_DIR/$fileslist_file%b\n" "$GREEN" "$NC"
311
+ fi
312
+
313
+ # Perform download
314
+ printf "${YELLOW}Starting download with $TOOL to $LOCAL_DIR...\n${NC}"
315
+
316
+ cd "$LOCAL_DIR"
317
+ if [[ "$TOOL" == "aria2c" ]]; then
318
+ aria2c --console-log-level=error --file-allocation=none -x "$THREADS" -j "$CONCURRENT" -s "$THREADS" -k 1M -c -i "$fileslist_file" --save-session="$fileslist_file"
319
+ elif [[ "$TOOL" == "wget" ]]; then
320
+ wget -x -nH --cut-dirs="$CUT_DIRS" ${HF_TOKEN:+--header="Authorization: Bearer $HF_TOKEN"} --input-file="$fileslist_file" --continue
321
+ fi
322
+
323
+ if [[ $? -eq 0 ]]; then
324
+ printf "${GREEN}Download completed successfully. Repo directory: $PWD\n${NC}"
325
+ else
326
+ printf "${RED}Download encountered errors.\n${NC}"
327
+ exit 1
328
+ fi