-
Notifications
You must be signed in to change notification settings - Fork 50
/
crimson_target
executable file
·705 lines (653 loc) · 27.1 KB
/
crimson_target
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
### CREATED BY KARMAZ
#
## FUNCTIONS:
# 1. PARSING robots.txt AND sitemap.xml
# 2. ENUMERATING WAF
# 3. ENUMERATING FRAMEWORKS & CMS
# 2. VULNERABILITY SCANNING
# 3. DOMAIN CRAWLING
# 4. DIRECTORY BRUTEFORCING
# 5. GATHERING SOURCE CODE OF JS FILES
# 6. EXTRACTING NEW ENDPOINTS FROM GATHERED SOURCE CODE
# 7. MERGING PATHS WITH DOMAIN AND PROBING FOR NEW ENDPOINTS
# 8. PROXYING LIVE RESULTS TO BURP SUITE
# 9. PREPARING params.txt && dirs.txt FOR EXPLOIT MODULE
# 10. CHECKING POTENTIAL BACKUP FILES
# 11. TESTING HOP-BY-HOP DELETION
#
## LISTS:
#
# 1) recon.txt - FILE WITH RECON OUTPUT
# 2) urls.txt - FILE WITH GATHERED URLs
# 3) status_params.txt - STATUS CODES OF urls.txt
# 4) ffuf.txt - DIR BRUTEFORCING OUTPUT
# 5) status_dir.txt - STATUS CODE OF ffuf.txt
# 9) exp/params.txt - FILE PREPARED FOR crimson_exploit WITH PARAMS
# 10) exp/dirs.txt - FILE PREPARED FOR crimson_exploit WITH DIRECTORIES
# 11) backups.txt - POTENTIALLY BACKUP FILES
# 12) arjun.txt - FILE WITH BRUTEFORCED PARAMETERS
# 13) all_source_code/ - DIRECTORY WITH JS SOURCE CODES
# 14) testssl.txt - OUTPUT FROM testssl
# 15) jwt.txt - OUTPUT FROM jwt_tool
# 16) wp/ - DIRECTORY WITH OUTPUT FROM WordPress tools
#
## WORKFLOW
#
# 0. Start Burp - optional step
# - Create new project - www.example.tld
# - Turn off an interception
# - Make active scan for proxied URLs only in scope
# 1. Start the script
# - If you did not choose -a flag, go to /bounty/tested.domain.tld/temp and remove manually false positives entries in ferox.txt
# 2. Check the output listed above (LISTS)
# 3. Manually browse the application, click on all functionalities
# 4. Copy the whole target scope from Burp after manually browsing the target
# 5. Paste it to exp/all.txt and run crimson_exploit
#
###
### Catch crash in trap and save the function name in anchor.log
trap 'echo $anchor > anchor.log && exit 1' SIGINT
trap 'echo $anchor > anchor.log && exit 1' SIGHUP
trap 'echo $anchor > anchor.log && exit 1' SIGKILL
trap 'echo $anchor > anchor.log && exit 1' SIGTERM
anchor_check() {
### RETRY FUNCTION IF IT'S NOT IN anchor.log
anchor="$1"
[ ! -f "$HOME/bounty/$DOMAIN/$domain/anchor.log" ] && return 0
if grep -q "$1" "$HOME/bounty/$DOMAIN/$domain/anchor.log"; then
rm "$HOME/bounty/$DOMAIN/$domain/anchor.log"; return 0
else
return 1
fi
}
clear_log() {
rm files.txt >/dev/null 2>&1
rm status_ffuf.txt >/dev/null 2>&1
rm status_new_endpoints.txt >/dev/null 2>&1
### REMOVE EMPTY FILES AND DIRECTORIES
find . -type d -empty -print -delete -o -type f -empty -print -delete >/dev/null 2>&1
### REMOVING LOG IF PROGRAM EXIT NORMALLY
if [ -f "$HOME/bounty/$DOMAIN/$domain/anchor.log" ]; then
rm "$HOME/bounty/$DOMAIN/$domain/anchor.log"
fi
}
### ---
### ALL FUNCTIONS:
get_the_sitemap() {
### SITEMAP-URLS.SH
anchor_check "${FUNCNAME[0]}" || return 0
echo "[SITEMAP 80] ----------------------------------------------------------" | tee -a recon.txt
"$HOME"/tools/crimson/scripts/sitemap-urls.sh http://"$domain"/sitemap.xml >> recon.txt
echo >> recon.txt
echo "[SITEMAP 443] ---------------------------------------------------------" | tee -a recon.txt
"$HOME"/tools/crimson/scripts/sitemap-urls.sh https://"$domain"/sitemap.xml >> recon.txt
echo >> recon.txt
}
get_the_robots(){
### CURL
anchor_check "${FUNCNAME[0]}" || return 0
echo "[ROBOTS 80] -----------------------------------------------------------" | tee -a recon.txt
curl -H "$cookie" -s -k --max-time 30 http://"$domain"/robots.txt >> recon.txt
echo >> recon.txt
echo "[ROBOTS 443] ----------------------------------------------------------" | tee -a recon.txt
curl -H "$cookie" -s -k --max-time 30 https://"$domain"/robots.txt >> recon.txt
echo >> recon.txt
}
check_waf() {
### WAFW00F
anchor_check "${FUNCNAME[0]}" || return 0
echo "[WAFW00F] -------------------------------------------------------------" | tee -a recon.txt
wafw00f "$domain" | tail -n +16 | tee -a recon.txt
}
whatweb_enum() {
### WHATWEB
anchor_check "${FUNCNAME[0]}" || return 0
echo "[WHATWEB 80] ---------------------------------------------------------" | tee -a recon.txt
whatweb -a 3 "http://$domain" -H "$cookie" | tee -a recon.txt
echo "[WHATWEB 443] ---------------------------------------------------------" | tee -a recon.txt
whatweb -a 3 "https://$domain" -H "$cookie" | tee -a recon.txt
}
cmseek_enum() {
### CMSEEK
anchor_check "${FUNCNAME[0]}" || return 0
echo "[CMSEEK] ---------------------------------------------------------" | tee -a recon.txt
python3 "$HOME"/tools/CMSeeK/cmseek.py -u "$domain" --follow-redirect
cat "$HOME"/tools/CMSeeK/Result/"$domain"/cms.json | jq . >> recon.txt
rm -rf "$HOME"/tools/CMSeeK/Result/"$domain"/
}
test_wordpress_if_exists() {
anchor_check "${FUNCNAME[0]}" || return 0
if [[ $(cat recon.txt | grep cms_name | cut -d "\"" -f 4) == "WordPress" ]]
then
mkdir wp
echo -e "\033[0;31m [+]\033[0mURL STARTING WP ENUMERATION"
### ENUMERATION USING wpscan > wpscan.txt
wpscan --no-banner --url "$domain" -o wp/wpscan.txt --cookie-string "$cookie" --no-update
### SSRF CHECK
echo -e "\033[0;31m [+]\033[0mURL LOOKING FOR SSRF IN WORDPRESS"
quickpress -server http://"$collaborator_domain" -target https://"$domain"
quickpress -server http://"$collaborator_domain" -target http://"$domain"
### PLUGIN ENUMERATION > plugins.txt && ssl_plugins.txt
echo -e "\033[0;31m [+]\033[0mURL BRUTEFORCING PLUGINS"
python3 "$HOME"/tools/WPluginScanner/wpluginscanner.py http://"$domain" -o wp/plugins.txt -p popular.txt > /dev/null 2>&1
python3 "$HOME"/tools/WPluginScanner/wpluginscanner.py https://"$domain" -o wp/ssl_plugins.txt -p popular.txt > /dev/null 2>&1
echo -e "\033[0;31m [+]\033[0mURL CHECK WP/ DIRECTORY AND RUN python3 /root/tools/wpbullet/wpbullet.py --path DOWNLOADED_PLUGIN"
fi
}
test_jwt_if_exists() {
anchor_check "${FUNCNAME[0]}" || return 0
jwt=$(echo "$cookie" | cut -d ":" -f 2 | grep "ey.*\..*\.[^;]*" -o)
if [ -n "$jwt" ]
then
python3 "$HOME"/tools/jwt_tool/jwt_tool.py "$jwt" | tail -n+14 | tee -a jwt.txt
python3 "$HOME"/tools/jwt_tool/jwt_tool.py -t https://"$domain"/ -rc "$cookie" -M pb | tee -a jwt.txt
fi
}
nuclei_scan_domain(){
### NUCLEI
anchor_check "${FUNCNAME[0]}" || return 0
nuclei -ut -silent
echo "[NUCLEI] ---------------------------------------------------------" | tee -a recon.txt
nuclei -u http://"$domain" -fr -mhe 300 -headless -silent -et "token-spray,misconfiguration/http-missing-security-headers.yaml" -H "$cookie" | tee -a nuclei.txt
mkdir screenshots/
mv ./*.png screenshots/
cat nuclei.txt | grep ^http | grep ".*/.*\.$DOMAIN/" | httpx -silent -fc 404 >> urls.txt
}
nikto_enum() {
### NIKTO
anchor_check "${FUNCNAME[0]}" || return 0
echo "[NIKTO] ---------------------------------------------------------" | tee -a recon.txt
nikto -host "$domain" -useragent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36${cr}${nl}$cookie" --maxtime 1500 | tee -a recon.txt
}
gospider_enum() {
### GOSPIDER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [1]"
echo "$domain" | httpx -silent | gospider -c 10 -q -w -a --sitemap --robots --subs -H "$cookie" | grep http | sed "s/^.*http/http/g" >> urls.txt
}
paramspider_enum() {
### GOSPIDER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [2]"
python3 "$HOME"/tools/ParamSpider/paramspider.py -d "$domain" --output ./paramspider.txt --level high > /dev/null 2>&1
cat paramspider.txt | grep http | sort -u | grep "$domain" >> urls.txt 2>/dev/null
cat ../paramspider.txt | grep http | sort -u | grep "$domain" | anew urls.txt > /dev/null
}
gau_enum() {
### GAU
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [3]"
gau "$domain" -subs -t 10 >> urls.txt
}
waybackurls_enum() {
### WAYBACKURLS
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [4]"
waybackurls "$domain" >> urls.txt
}
hakrawler_enum() {
### HAKRAWLER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [5]"
echo "$domain" | httpx -silent | hakrawler -insecure -h "$cookie" -subs -t 10 -u >> urls.txt
}
cariddi_enum(){
### CARIDDI
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [6]"
echo "$domain" | cariddi -headers "$cookie" -intensive -plain >> urls.txt
}
galer_enum_and_merging() {
### GALER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [7]"
echo "$domain" | httpx -silent | galer -s >> urls.txt
### MERGE SPIDERS AND DELETE DUPLICATES >> urls.txt
echo -e "\033[0;31m [+]\033[0m MERGING SPIDERS RESULTS"
grep -Eo "(http|https)://[a-zA-Z0-9./?=_%:-]*" ../urls.txt | grep "$domain" | anew urls.txt 2>/dev/null
cat urls.txt | qsreplace -a > temp1.txt 2>/dev/null
}
check_live_urls_and_make_custom_wordlist() {
anchor_check "${FUNCNAME[0]}" || return 0
httpx -silent -l temp1.txt -H "$cookie" -fc 404 > urls.txt
### GET NEW ENDPOINTS FROM SPIDERS AND ADD THEM TO WORDLIST FOR DIRECOTRY BRUTEFORCING > custom_dir.txt
echo -e "\033[0;31m [+]\033[0m GATHERING NEW PATHS FROM SPIDERS RESULTS"
cat urls.txt | unfurl paths > temp1.txt
sort -u "$HOME"/tools/crimson/words/dir > custom_dir.txt
sort -u temp1.txt | anew custom_dir.txt > /dev/null
rm temp1.txt
}
ffuf_bruteforce() {
### FFUF
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m STARTING DIRECTORY BRUTEFORCING"
if [ $https_on == 1 ]
then
ffuf -ignore-body -r -w custom_dir.txt -u https://"$domain"/FUZZ -mc all -fc 400 -H "$cookie" -o ffuf.json > /dev/null
else
ffuf -ignore-body -r -w custom_dir.txt -u http://"$domain"/FUZZ -mc all -fc 400 -H "$cookie" -o ffuf.json > /dev/null
fi
cat ffuf.json | jq -c '.results[] | {url:.url,status: .status}' > status_ffuf.txt
rm ffuf.json
### REMOVE TRASH RESPONSES FROM PREVIOUS SCAN > ffuf.txt
echo -e "\033[0;31m [+]\033[0m REMOVING TRASH RESPONSES FROM status_ffuf.txt"
python3 "$HOME"/tools/crimson/scripts/clever_ffuf.py
sort -u temp_ffuf.txt > ffuf.txt
rm temp_ffuf.txt
}
ferox_bruteforce() {
### FEROX
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m STARTING DIRECTORY BRUTEFORCING"
if [ $https_on == 1 ]
then
feroxbuster -C 400,404 --auto-tune -nEgBekr --wordlist custom_dir.txt -u "https://$domain" -o temp/ferox.txt -I 3gp,aac,apng,avif,bmp,class,com,css,cur,doc,flac,gif,gz,ico,jar,jfif,jpeg,jpg,m4a,m4p,m4v,mov,mp3,mp4,mpeg,mpg,oga,ogg,ogv,pdf,pif,pjp,pjpeg,png,ram,scr,snp,svg,swf,tgz,tif,tiff,wav,webm,webp,woff,xls -H "$cookie"
else
feroxbuster -C 400,404 --auto-tune -nEgBekr --wordlist custom_dir.txt -u "http://$domain" -o temp/ferox.txt -I 3gp,aac,apng,avif,bmp,class,com,css,cur,doc,flac,gif,gz,ico,jar,jfif,jpeg,jpg,m4a,m4p,m4v,mov,mp3,mp4,mpeg,mpg,oga,ogg,ogv,pdf,pif,pjp,pjpeg,png,ram,scr,snp,svg,swf,tgz,tif,tiff,wav,webm,webp,woff,xls -H "$cookie"
fi
### MANUAL CHECK - DELETE TRASH FROM FEROXBUSTER OUTPUT
echo -e "\033[0;31m [+]\033[0m CHECK $HOME/bounty/$DOMAIN/$domain/temp/ferox.txt"
echo -e "\033[0;31m [+][+]\033[0m REMOVE FALSE-POSITIVES MANUALLY"
echo -e "\033[0;31m [+][+][+]\033[0m RE-RUN CRIMSON_TARGET"
echo "ferox_manual" > "$HOME/bounty/$DOMAIN/$domain/anchor.log"
exit 1
}
ferox_manual() {
# CONTINUE THE EXECUTION...
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m FEROXBUSTER SUCCESSFULLY EDITED - CONTINUING THE SCRIPT EXECUTION"
### CONVERT THE OUTPUT TO PROPER FORMAT AND DELETE DUPLICATES
cat temp/ferox.txt | cut -s -d "h" -f 2-1000 | sed "s/^/h/" | qsreplace -a > ffuf.txt
}
gahter_js_links() {
# GETJS
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m GATHERING .js LINKS"
sort -u urls.txt ffuf.txt | getJS --timeout 2 --insecure --complete --nocolors -H "$cookie" | grep "^http" | grep "$DOMAIN" | sed "s/\?.*//" | anew exp/jsfiles.txt
### GET LINKS TO JS FILES FROM urls.txt > exp/jsfiles.txt
cat urls.txt | grep "\.js$" | grep "^http" | grep "$DOMAIN" | sed "s/\?.*//" | anew exp/jsfiles.txt > /dev/null
}
check_js_live_and_gather_js_code() {
# HTTPX
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING FOR LIVE .js LINKS"
httpx -silent -l exp/jsfiles.txt -H "$cookie" -fc 404 -srd all_source_code/ >> exp/temp_jsfiles.txt
mv exp/temp_jsfiles.txt exp/jsfiles.txt
}
beautify_source_code() {
anchor_check "${FUNCNAME[0]}" || return 0
for file in all_source_code/*
do
cat "$file" | js-beautify > temp.txt 2>/dev/null
mv temp.txt "$file"
done
}
zile_enum() {
# ZILE
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m GATHERING API KEYS && NEW PATHS"
cd all_source_code || exit
python3 "$HOME"/tools/crimson/scripts/zile/zile.py --file >> ../temp/temp_zile.txt
cd ..
awk '!seen[$0]++' temp/temp_zile.txt | grep -v "[+]" > zile.txt 2>/dev/null
### GET ENDPOINTS FROM ZILE > extracted.txt
cat zile.txt | cut -s -d " " -f 2 | sed "s/^..//g" | sed "s/..$//g" | sed "s/\"//g" | unfurl path | sed "s/'$//" > temp/temp_zile_endpoints.txt
awk '!seen[$0]++' temp/temp_zile_endpoints.txt >> extracted.txt
}
mutate_urls_check_duplicates() {
# UNIQ
anchor_check "${FUNCNAME[0]}" || return 0
if [ $https_on == 1 ]
then
while IFS= read -r line; do echo "https://$domain$line" >> temp/temp_new.txt; done < extracted.txt
awk '!seen[$0]++' temp/temp_new.txt > temp/temp_new_endpoints.txt
sort ffuf.txt temp/temp_new_endpoints.txt | uniq -d > temp/temp_duplicates.txt
grep -v -x -f temp/temp_duplicates.txt temp/temp_new_endpoints.txt > new_endpoints.txt
else
while IFS= read -r line; do echo http://"$domain""$line" >> temp/temp_new.txt; done < extracted.txt
awk '!seen[$0]++' temp/temp_new.txt > temp/temp_new_endpoints.txt
sort ffuf.txt temp/temp_new_endpoints.txt | uniq -d > temp/temp_duplicates.txt
grep -v -x -f temp/temp_duplicates.txt temp/temp_new_endpoints.txt > new_endpoints.txt
fi
rm extracted.txt
}
check_new_endpoints_status() {
# WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING STATUS CODE OF NEW PATHS"
wfuzz --conn-delay 2 --req-delay 3 -f status_new_endpoints.txt,raw -L -Z -z file,new_endpoints.txt -z file,"$HOME"/tools/crimson/words/blank -H "$cookie" FUZZFUZ2Z
rm new_endpoints.txt
### REMOVE 400 && 404 RESPONSES
cat status_new_endpoints.txt | grep "http" | grep -E "C=400 |C=404 " -v | grep -v "Pycurl" | cut -s -d "\"" -f2 > filtered_new_endpoints.txt
### MERGE ffuf.txt WITH NEW ENDPOINTS >> ffuf.txt
echo -e "\033[0;31m [+]\033[0m ADDING LIVE PATHS TO ffuf.txt"
cat filtered_new_endpoints.txt | anew ffuf.txt > /dev/null
rm filtered_new_endpoints.txt
### ADD http://$domain | https://$domain EVEN IF THEY ARE 404/400/X status code >> ffuf.txt
if [ $https_on == 1 ]
then
echo -e "https://$domain" | anew ffuf.txt > /dev/null
else
echo -e "http://$domain" | anew ffuf.txt > /dev/null
fi
}
proxy_directories_to_burp() {
# WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m PROXING ALL DIRECTORIES && FILES TO BURP SUITE"
wfuzz -f status_dir.txt,raw -L -Z -z file,ffuf.txt -z file,"$HOME"/tools/crimson/words/blank -p host.docker.internal:8080 -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
proxy_urls_to_burp() {
# WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m PROXING urls.txt TO BURP"
cat urls.txt | grep "?" > temp/temp_params.txt
wfuzz -f status_params.txt,raw -L -Z -z file,temp/temp_params.txt -z file,"$HOME"/tools/crimson/words/blank -p host.docker.internal:8080 -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
check_directories_status(){
# WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING STATUS CODE OF (ffuf.txt) BRUTEFORCED DIRECTORIES"
wfuzz -f status_dir.txt,raw -L -Z -z file,ffuf.txt -z file,"$HOME"/tools/crimson/words/blank -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
check_urls_status(){
# WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING STATUS CODE OF (urls.txt) CRAWLED QUERIES"
cat urls.txt | grep "?" > temp/temp_params.txt
wfuzz -f status_params.txt,raw -L -Z -z file,temp/temp_params.txt -z file,"$HOME"/tools/crimson/words/blank -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
preapre_params_and_files() {
# QSREPLACE
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m PREPARING FILES FOR crimson_exploit MODULE"
cat status_params.txt | grep -v "C=404 " | grep http | grep -v "Pycurl" | cut -s -d "\"" -f2 | sort -u | qsreplace -a > exp/params.txt
cat status_dir.txt | grep -v "C=400\|C=429\|C=404" | grep http | cut -s -d "\"" -f2 | grep -v -e "Pycurl\|\.3gp\|\.aac\|\.apng\|\.avif\|\.bmp\|\.class\|\.com\|\.css\|\.cur\|\.doc\|\.flac\|\.gif\|\.gz\|\.ico\|\.jar\|\.jfif\|\.jpeg\|\.jpg\|\.m4a\|\.m4p\|\.m4v\|\.mov\|\.mp3\|\.mp4\|\.mpeg\|\.mpg\|\.oga\|\.ogg\|\.ogv\|\.pdf\|\.pif\|\.pjp\|\.pjpeg\|\.png\|\.ram\|\.scr\|\.snp\|\.svg\|\.swf\|\.tgz\|\.tif\|\.tiff\|\.wav\|\.webm\|\.webp\|\.woff\|\.xls\|\.js" | sed "s/\/$//g" | sort -u > files.txt
cat files.txt > exp/dirs.txt
}
check_for_backups() {
# CRIMSON_BACKUPER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING EXISTANCE OF BRUTEFORCED FILES BACKUPS"
python "$HOME"/tools/crimson/scripts/crimson_backuper.py -w urls.txt -e "$HOME"/tools/crimson/words/BCK_EXT -c "$cookie" -o backups.txt
}
check_for_cors() {
# CORSME
anchor_check "${FUNCNAME[0]}" || return 0
echo "[CORS] ---------------------------------------------------------" | tee -a recon.txt
cat ffuf.txt | CorsMe -header "$cookie" -output corsme.txt > /dev/null 2>&1
cat corsme.txt >> recon.txt
rm corsme.txt
rm error_requests.txt
}
check_ssl() {
anchor_check "${FUNCNAME[0]}" || return 0
echo "[SSL] ---------------------------------------------------------" | tee -a recon.txt
testssl --warnings off "$domain" | tee -a testssl.txt
}
arjun_enum() {
# ARJUN
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m STARTING ARJUN ON exp/files.txt - IT WILL TAKE A WHILE..."
arjun -i files.txt -oT arjun.txt -q --headers "$cookie"
}
vhost_brute() {
# FFUF
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m BRUTEFORCING VIRTUAL HOSTNAMES "
ip=$(dig +short "$domain")
cat "$HOME"/tools/crimson/words/vhosts.txt | sed "s/$/.$DOMAIN/" > vhosts.txt
cat ../subdomains.txt >> vhosts.txt 2>/dev/null
sed -i '/_/d' ./vhosts.txt
echo "[VHOSTS] ---------------------------------------------------------" | tee -a recon.txt
if [ $https_on == 1 ]
then
blength=$(curl -H "$cookie" -k -s -H "Host: bbaddhost.$domain" "https://$ip/" |wc -l)
glength=$(curl -H "$cookie" -k -s -H "Host: $domain" "https://$ip/" |wc -l)
wfuzz -f v_temp.txt,raw -H "$cookie" -c -w vhosts.txt -H "Host: FUZZ" -u "https://$ip/" -t 100 --hl "$glength","$blength" | tail -n +5
else
blength=$(curl -H "$cookie" -k -s -H "Host: bbaddhost.$domain" "http://$ip/" |wc -l)
glength=$(curl -H "$cookie" -k -s -H "Host: $domain" "http://$ip/" |wc -l)
wfuzz -f v_temp.txt,raw -H "$cookie" -c -w vhosts.txt -H "Host: FUZZ" -u "http://$ip/" -t 100 --hl "$glength","$blength" | tail -n +5
fi
cat v_temp.txt >> recon.txt
rm v_temp.txt
}
hop_by_hop_test() {
# hop-by-hop/hbh-header-abuse-test.py
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m TESTING HOP-BY-HOP DELETION"
echo "[HBH] ---------------------------------------------------------" | tee -a recon.txt
if [ $https_on == 1 ]
then
cat "$HOME"/tools/crimson/words/exp/hbh-headers | while IFS= read -r HEADER; do python "$HOME"/tools/hop-by-hop/hbh-header-abuse-test.py -u "https://$domain" -x "$HEADER"; sleep 1; done | tee -a recon.txt
else
cat "$HOME"/tools/crimson/words/exp/hbh-headers | while IFS= read -r HEADER; do python "$HOME"/tools/hop-by-hop/hbh-header-abuse-test.py -u "http://$domain" -x "$HEADER"; sleep 1; done
fi
}
#---
### MAIN()
fully_automated=0
proxy_on=0
arjun_on=0
vhost_on=0
backup_on=0
no_brute_on=0
test_http=0
while getopts "c:j:vd:ayphbnk" OPTION; do
case $OPTION in
c)
cookie=$OPTARG
;;
j)
collaborator_domain=$OPTARG
;;
v)
vhost_on=1
;;
d)
domain=$OPTARG
;;
a)
fully_automated=1
;;
y)
proxy_on=1
;;
p)
arjun_on=1
;;
h)
hop_on=1
;;
b)
backup_on=1
;;
n)
no_brute_on=1
;;
k)
test_http=1
;;
*)
echo "Incorrect options provided"
exit 1
;;
esac
done
if [ -z "$domain" ]
then
echo "crimson_target -d \"example.domain.com\"
# Optional flags are shown below:
-c \"Cookie: auth1=123;\"
-j \"burp.collaborator.domain\" # SSRF check with quickpress
-v # Virtual host discovering
-a # Without this flag, you have to manually check for false-positives after bruteforcing
-y # Proxy urls.txt and ffuf.txt to Burp (host.docker.internal:8080)
-p # Parameter bruteforcing with Arjun (WARNING - it takes a lot of time, better use Burp Param Miner)
-h # Test HOP-BY-HOP header deletion at the end
-b # Check backup files (can take a few hours of time)
-n # Use this option to skip directory bruteforcing phase
-k # Test HTTP inseated of HTTPS"
exit 1
else
### PREPARE DIRECTORIES AND VARIABLES
echo -e "\033[0;31m [+]\033[0m PREPARING DIRECTORIES AND VARIABLES"
DOMAIN=$(tldextract "$domain" | cut -s -d " " -f 2-3 | sed "s/\ /\./")
TARGET=$(dig +short "$domain" | tr "\n" ",")
mkdir "$HOME"/bounty/"$DOMAIN"/"$domain"/temp -p 2>/dev/null
mkdir "$HOME"/bounty/"$DOMAIN"/"$domain"/exp 2>/dev/null
mkdir "$HOME"/bounty/"$DOMAIN"/"$domain"/all_source_code 2>/dev/null
cd "$HOME"/bounty/"$DOMAIN"/"$domain" || exit
if [ -z "$cookie" ]
then
export cookie="Cookie: a=1;";
else
export cookie=$cookie;
fi
### CHECK IF THERE IS SSL
if [ "$test_http" == 0 ]
then
https_on=0
else
ssl_check=$(echo "$domain" | httpx -silent | grep https -o)
if [ "$ssl_check" == "https" ]
then
https_on=1
else
https_on=0
fi
fi
### CMS SCAN >> recon.txt
cmseek_enum
echo "----------------------------------------------------------------" >> recon.txt
echo "SCANNING START: $(date +'[%m-%d %H:%M:%S]')" >> recon.txt
echo "----------------------------------------------------------------" >> recon.txt
### GET THE CONTENT OF SITEMAP IF EXISTS >> recon.txt2
get_the_sitemap
### GET THE CONTENT OF ROBOTS IF EXISTS >> recon.txt
get_the_robots
### CHECKING WAF >> recon.txt
check_waf
### IDENTIFY TECHNOLOGY >> recon.txt
whatweb_enum
### SSL CHECK
check_ssl
### TEST WP IF FOUND >>
test_wordpress_if_exists
### TEST JWT IF EXISTS > jwt.txt
test_jwt_if_exists
### NUCLEI SCAN
nuclei_scan_domain
### PREPARE COOKIE HEADER FO NIKTO ENUM
nl=$'\n'
cr=$'\r'
### PERFORM NIKTO VULNERABILITY SCAN
nikto_enum
### SPIDER 1 > urls.txt
gospider_enum
### SPIDER 2 >> urls.txt
paramspider_enum
### SPIDER 3 >> urls.txt
gau_enum
### SPIDER 4 >> urls.txt
waybackurls_enum
### SPIDER 5 >> urls.txt
hakrawler_enum
### SPIDER 6 >> urls.txt
cariddi_enum
### SPIDER 7 >> urls.txt
galer_enum_and_merging
### MERGE ALL SPIDERS, CHECK LIVE, MAKE CUSTON WORDLIST FOR DIR BRUTE >> custom_dir.txt
check_live_urls_and_make_custom_wordlist
### --- AUTOMATED SECTION --- ### -a
if [ $fully_automated == 1 ] && [ $no_brute_on == 0 ]
then
### DIRECTORY BRUTEFORCING USING FFUF > status_ffuf.txt
ffuf_bruteforce
elif [ $fully_automated == 0 ] && [ $no_brute_on == 0 ]
then
### DIRECTORY BRUTEFORCING USING FEROXBUSTER (output is called ffuf.txt for comatibility reasons)
ferox_bruteforce
ferox_manual
else
### SKIPPING DIRECTORY BRUTEFORCING -n
if [ $https_on == 1 ]
then
echo "https://$domain" >> ffuf.txt
else
echo "http://$domain" >> ffuf.txt
fi
fi
### GET LINKS TO JS FILES AND PREPARE IT FOR EXPLOIT MODULE > exp/jsfiles.txt
gahter_js_links
### CHECK FOR LIVE JS LINKS AND STORE THE SOURCE CODE > all_source_code/
check_js_live_and_gather_js_code
### BEAUTIFY JS SOURCE CODE IN all_source_code/
beautify_source_code
### DIG API KEYS / ENDPOINTS ETC. > zile.txt
zile_enum
### MUTATE URLS && CHECK IF THERE ARE NO DUPLICATES WITH ffuf.txt > new_endpoints.txt
mutate_urls_check_duplicates
### CHECK STATUS OF NEW URLS > status_new_endpoints.txt
check_new_endpoints_status
### --- PROXY SECTION --- ### -y
if [ $proxy_on == 1 ]
then
### PROXY ALL BRUTEFORCED FILES AND DIRECTORIES TO BURP > status_dir.txt
proxy_directories_to_burp
### CHECK STATUS OF URLS WITH QUERIES && PROXY TO BURP > status_params.txt
proxy_urls_to_burp
else
### IF THERE IS NO PROXY FLAG - JUST CHECK THE STATUS CODE AND SAVE THE RESULTS FOR FURTHER PROCESSING
check_directories_status
check_urls_status
fi
### EXTRACT UNIQUE QUERIES > exp/params.txt && PREPARE FILES WORDLIST FOR BACKUPER AND ARJUN &AND DIRECTORIES FOR EXPLOIT MODULE (filtering static content) > exp/files.txt && exp/dirs.txt
preapre_params_and_files
### CORS MISCONFIGURATION SCAN >> recon.txt
check_for_cors
if [ $arjun_on == 1 ]
then
### GET PARAMETER NAMES BY BRUTEFORCING THEM > exp/arjun.txt
arjun_enum
fi
### --- VHOST SECTION --- ### -v
if [ $vhost_on == 1 ]
then
### VHOST BRUTEFORCING = vhost.txt
vhost_brute
fi
if [ "$hop_on" == 1 ]
then
hop_by_hop_test
fi
### CHECK FOR BACKUP FILES > backups.txt
if [ $backup_on == 1 ]
then
### GET PARAMETER NAMES BY BRUTEFORCING THEM > exp/arjun.txt
check_for_backups
fi
### CLEAR LOGS && TRASH
clear_log
### MARK THE END
echo "----------------------------------------------------------------"
echo "SCANNING END: $(date +'[%m-%d %H:%M:%S]')" >> recon.txt
echo "----------------------------------------------------------------"
### PRINT THE RESULTS
echo -e "\033[0;31m [+]\033[0m 1. recon.txt :"
cat recon.txt
#echo -e "\033[0;31m [+]\033[0m 2. zile.txt :"
# cat zile.txt | cut -s -d " " -f 2 | sed "s/^..//g" | sed "s/..$//g" | sort -u | sed "s/^.//" | sed "s/.$//"
echo -e "\033[0;31m [+]\033[0m 3. CHECK :"
echo " - status_dir.txt"
echo " - status_params.txt"
echo " - arjun.txt"
echo " - backups.txt"
echo " - use pywhat && ciphey for 'strange' findings"
echo " - robots.txt and sitemap.xml at : https://httpstatus.io/"
echo " - Use trufflehog if there is a git repository"
fi
### TO DO
# If there is /graphql endpoint use INQL automatically.
# s3scanner