| skipped 1 lines |
2 | 2 | | # -*- coding: utf-8 -*- |
3 | 3 | | |
4 | 4 | | # 测试 |
5 | | - | # set -xeuo pipefail |
| 5 | + | set -xeuo pipefail |
6 | 6 | | |
7 | 7 | | # ANSI颜色代码 |
8 | 8 | | RED='\033[91m' |
| skipped 31 lines |
40 | 40 | | if ! command -v go &> /dev/null; then |
41 | 41 | | echo "未找到 Go,请先安装 Go 并设置相应的环境变量。" |
42 | 42 | | echo "安装完成后请手动设置以下环境变量:" |
43 | | - | echo " - GOROOT: 指向 Go 的安装目录 export GOROOT=$HOME/go export PATH=$GOROOT/bin:$PATH" |
44 | | - | echo " - GOPATH: 指向您的 Go 工作目录 export GOPATH=$HOME/gopath/go export PATH=$GOPATH/bin:$PATH" |
45 | | - | echo " - GOMODCACHE: 指向您的 Go 工作目录 export GOMODCACHE=$HOME/go/pkg/mod" |
| 43 | + | echo " - GOROOT: 指向 Go 的安装目录 export GOROOT=\$HOME/go export PATH=\$GOROOT/bin:\$PATH" |
| 44 | + | echo " - GOPATH: 指向您的 Go 工作目录 export GOPATH=\$HOME/gopath/go export PATH=\$GOPATH/bin:\$PATH" |
| 45 | + | echo " - GOMODCACHE: 指向您的 Go 工作目录 export GOMODCACHE=\$HOME/go/pkg/mod" |
46 | 46 | | exit 1 |
47 | 47 | | fi |
48 | 48 | | } |
| skipped 8 lines |
57 | 57 | | |
58 | 58 | | # 运行httpx的函数 |
59 | 59 | | run_httpx() { |
60 | | - | local url_file="$1" |
| 60 | + | local gau_result="$1" |
| 61 | + | local url_file="$2" |
61 | 62 | | local line_count |
62 | 63 | | local httpx_command="httpx $proxy_arg $httpx_args" |
63 | 64 | | echo "正在对收集到的 URL 进行 httpx 探活..." |
64 | | - | cat "$url_file" | $httpx_command -o "$url_file" || exit 1 |
| 65 | + | cat "$gau_result" | $httpx_command -o "$url_file" || exit 1 |
65 | 66 | | line_count=$(wc -l < "$url_file" | awk '{print $1}') |
66 | 67 | | echo -e "${GREEN}httpx 执行完成。找到 $line_count 个活跃的 URL。${RESET}" |
67 | 68 | | } |
| skipped 17 lines |
85 | 86 | | echo -e "${RED}警告:$subfinder_alive_urls_file 文件为空。跳过执行 katana 命令。${RESET}" |
86 | 87 | | return 1 |
87 | 88 | | fi |
88 | | - | katana -silent -list "$subfinder_alive_urls_file" -headless -no-incognito -xhr -d 5 -jc -aff -ef $excluded_extentions | anew "$url_file" |
| 89 | + | katana -silent -list "$subfinder_alive_urls_file" -headless -no-incognito -xhr -d 5 -jc -aff -ef $excluded_extentions -o "$katana_result" |
| 90 | + | cat "$katana_result" | anew "$url_file" |
89 | 91 | | line_count=$(wc -l < "$url_file" | awk '{print $1}') |
90 | 92 | | echo -e "${GREEN}katana 执行完成。总共找到 $line_count 个活跃的 URL。${RESET}" |
91 | 93 | | } |
| skipped 1 lines |
93 | 95 | | # 运行nuclei的函数 |
94 | 96 | | run_nuclei() { |
95 | 97 | | local url_file="$1" |
| 98 | + | echo "更新Nuclei templates" |
| 99 | + | nuclei -ut |
96 | 100 | | echo "正在对收集到的 URL 运行Nuclei" |
97 | | - | echo -e "${GREEN}Nuclei_command : $nuclei_command ${RESET}" |
| 101 | + | echo -e "Nuclei_command : ${GREEN}cat $url_file | $nuclei_command ${RESET}" |
98 | 102 | | cat "$url_file" | $nuclei_command || exit 1 |
99 | 103 | | } |
100 | 104 | | |
| skipped 59 lines |
160 | 164 | | fi |
161 | 165 | | output_domain_file="$project/$domain.txt" |
162 | 166 | | else |
163 | | - | output_all_file="$project/allurls.txt" |
164 | 167 | | # 检查是否提供了项目名称 |
165 | 168 | | if [ -z "$project" ]; then |
166 | 169 | | project="output" |
167 | 170 | | fi |
| 171 | + | output_all_file="$project/allurls.txt" |
| 172 | + | |
| 173 | + | |
168 | 174 | | fi |
169 | 175 | | |
| 176 | + | gau_result="$project/gau_result.txt" |
| 177 | + | katana_result="$project/katana_result.txt" |
170 | 178 | | nuclei_fuzzing_output_file="$project/nuclei_fuzzing_results_$(date +%Y%m%d%H%M%S).txt" |
171 | 179 | | subfinder_domains_file="$project/subfinder_urls.txt" |
172 | 180 | | subfinder_alive_urls_file="$project/subfinder_alive_urls.txt" |
| skipped 6 lines |
179 | 187 | | |
180 | 188 | | # 检查是否已运行gau 并创建了输出文件 |
181 | 189 | | if [ -n "$domain" ]; then |
182 | | - | if [ ! -f "$output_domain_file" ]; then |
| 190 | + | if [ ! -f "$gau_result" ]; then |
183 | 191 | | echo "正在对 $domain 运行gau" |
184 | 192 | | gau_args="--blacklist $excluded_extentions --subs" |
185 | 193 | | [ -n "$proxy" ] && gau_args+=" --proxy $proxy" |
186 | | - | gau $gau_args $domain | uro > "$output_domain_file" || exit 1 |
| 194 | + | gau $gau_args $domain | uro > "$gau_result" || exit 1 |
187 | 195 | | fi |
188 | 196 | | else |
189 | | - | if [ ! -f "$output_all_file" ]; then |
| 197 | + | if [ ! -f "$gau_result" ]; then |
190 | 198 | | echo "正在对 $filename 中的 URL 运行gau" |
191 | 199 | | gau_args="--blacklist $excluded_extentions --subs" |
192 | 200 | | [ -n "$proxy" ] && gau_args+=" --proxy $proxy" |
193 | | - | cat "$filename" | gau $gau_args | uro > "$output_all_file" || exit 1 |
| 201 | + | cat "$filename" | gau $gau_args | uro > "$gau_result" || exit 1 |
194 | 202 | | fi |
195 | 203 | | fi |
196 | 204 | | |
| skipped 4 lines |
201 | 209 | | url_file="$output_all_file" |
202 | 210 | | fi |
203 | 211 | | |
204 | | - | run_httpx "$url_file" |
| 212 | + | if [ ! -f "$url_file" ]; then |
| 213 | + | run_httpx "$gau_result" "$url_file" |
| 214 | + | else |
| 215 | + | echo "$gau_result 已存在,跳过httpx探活" |
| 216 | + | fi |
205 | 217 | | |
206 | 218 | | # 运行subfinder 函数 |
207 | 219 | | # 定义 subfinder_command 变量,根据提供的域名或文件进行选择 |
| skipped 3 lines |
211 | 223 | | subfinder_command="subfinder -dL $filename -all -silent -o $subfinder_domains_file" |
212 | 224 | | fi |
213 | 225 | | |
214 | | - | run_subfinder |
| 226 | + | if [ ! -f "$subfinder_domains_file" ]; then |
| 227 | + | run_subfinder |
| 228 | + | else |
| 229 | + | echo "$subfinder_domains_file 已存在,跳过subfinder子域名收集" |
| 230 | + | fi |
215 | 231 | | |
216 | 232 | | # 运行httpx 在收集到的子域上 |
217 | 233 | | if [ ! -f "$subfinder_domains_file" ]; then |
218 | 234 | | echo -e "${RED} 警告:$subfinder_domains_file 文件不存在。跳过运行httpx 命令。${RESET}" |
219 | 235 | | else |
220 | | - | echo "正在对收集到的子域运行httpx" |
221 | | - | httpx -l "$subfinder_domains_file" -ports=80,443,8080,8443,8000,8888 $httpx_args -o "$subfinder_alive_urls_file" || exit 1 |
222 | | - | line_count=$(wc -l < "$subfinder_alive_urls_file" | awk '{print $1}') |
223 | | - | echo -e "${GREEN}Httpx 探活子域执行完成。找到 $line_count 个活跃的 URL。${RESET}" |
| 236 | + | if [ ! -f "$subfinder_alive_urls_file" ]; then |
| 237 | + | echo "正在对收集到的子域运行httpx" |
| 238 | + | httpx -l "$subfinder_domains_file" -ports=80,443,8080,8443,8000,8888 $httpx_args -o "$subfinder_alive_urls_file" || exit 1 |
| 239 | + | line_count=$(wc -l < "$subfinder_alive_urls_file" | awk '{print $1}') |
| 240 | + | echo -e "${GREEN}Httpx 探活子域执行完成。找到 $line_count 个活跃的 URL。${RESET}" |
| 241 | + | else |
| 242 | + | echo "$subfinder_alive_urls_file 已存在,跳过子域名httpx探活。" |
| 243 | + | fi |
224 | 244 | | fi |
225 | 245 | | |
226 | 246 | | # 运行katana 函数 |
227 | | - | run_katana "$subfinder_alive_urls_file" "$url_file" |
228 | | - | |
| 247 | + | if [ ! -f "$katana_result" ]; then |
| 248 | + | run_katana "$subfinder_alive_urls_file" "$url_file" |
| 249 | + | else |
| 250 | + | echo "$katana_result 已存在,跳过katana爬虫。" |
| 251 | + | fi |
229 | 252 | | # 提取所有URL,方便做其他扫描 |
230 | | - | sed -E 's#^(https?://[^/]+).*#\1#' "$url_file" | sort -u | tee "$project/websites.txt" |
231 | | - | line_count=$(wc -l < "$project/websites.txt" | awk '{print $1}') |
232 | | - | echo -e "${GREEN}所有存活websites已提取成功。共 $line_count 个website。\nnuclei完整扫描命令:${RED}nuclei -l $project/websites.txt -nh -es info -et ssl,dns -p $proxy -o $project/nuclei_full_results_$(date +%Y%m%d%H%M%S).txt ${RESET} ${RESET}" |
| 253 | + | if [ ! -f "$project/websites.txt" ]; then |
| 254 | + | sed -E 's#^(https?://[^/]+).*#\1#' "$url_file" | sort -u | tee "$project/websites.txt" |
| 255 | + | line_count=$(wc -l < "$project/websites.txt" | awk '{print $1}') |
| 256 | + | echo -e "${GREEN}所有存活websites已提取成功。共 $line_count 个website。\nnuclei完整扫描命令:${RED}nuclei -l $project/websites.txt -nh -es info -et ssl,dns -p $proxy -o $project/nuclei_full_results_$(date +%Y%m%d%H%M%S).txt ${RESET} ${RESET}" |
| 257 | + | fi |
233 | 258 | | # 运行nuclei 函数 |
234 | 259 | | # 定义 nuclei_command 变量 |
235 | 260 | | nuclei_command="nuclei $proxy_arg $nuclei_fuzzing_args -o $nuclei_fuzzing_output_file" |
| skipped 4 lines |