CyberTriage.Collector

Runs the Cyber Triage (http://cybertriage.com ) Collector to copy forensic artifacts for automated analysis. The Dynamic collection will include EXEs, DLLs, and other files referenced by artifacts. The output of the collection can be saved to a Velociraptor server, a Cyber Triage server, or cloud storage (S3 or Azure). Configuration information is available at - https://docs.cybertriage.com/en/latest/chapters/integrations/velociraptor_collect.html It requires the Velociraptor server to have copies of the Cyber Triage Deployer script and optional configuration files. History. 1.0 - Initial artifact creation Contact support@cybertriage.com with any questions. Copyright (c) 2025 Sleuth Kit Labs LLC. All rights reserved.


name: CyberTriage.Collector
description: 
     Runs the Cyber Triage (http://cybertriage.com) Collector to copy
     forensic artifacts for automated analysis. The Dynamic collection
     will include EXEs, DLLs, and other files referenced by artifacts. 

     The output of the collection can be saved to a Velociraptor server, 
     a Cyber Triage server, or cloud storage (S3 or Azure).

     Configuration information is available at - 
         https://docs.cybertriage.com/en/latest/chapters/integrations/velociraptor_collect.html
     
     It requires the Velociraptor server to have copies of the Cyber Triage Deployer script and 
     optional configuration files. 
     
     History.
     1.0 - Initial artifact creation

     Contact support@cybertriage.com with any questions.

     Copyright (c) 2025 Sleuth Kit Labs LLC. All rights reserved. 
     
type: CLIENT

resources:
   timeout: 14400
   max_upload_bytes: 4294967296

parameters:
   - name: PowerShellExe
     default: "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe"
     description: Where the PowerShell executable is located, this should not be changed unless you are using 
       a newer verison of PowerShell
     
   - name: script_temp_location
     default: "C:\\Windows\\temp\\deploy_cyber_triage_collector.ps1"
     description: Where the Deployer script will be downloaded to

   - name: CollectorConfig_temp_location
     default: "C:\\Windows\\temp\\CollectorConfig.yaml"
     description: Where the (optional) cloud storage configuration file will be copied to
     
   - name: filesets_temp_location
     default: "C:\\Windows\\temp\\filesets.yaml"
     description: Where the custom file rules configuration file will be copied to

     # This needs to be updated if the Deployer script writes data to a different location
   - name: outputFilesSpec
     description: List of files to copy back after the Collector runs. 
     type: csv
     default: |
       Glob
       Windows\Temp\file.json.gz*

   - name: Root
     description: |
       On Windows, this is the device to apply all the glob on
       (e.g. `C:`). On *NIX, this should be a path to a subdirectory or
       /.
     default: "C:"

   - name: Accessor
     default: auto
     description: |
       On Windows, this can be changed to `ntfs`.

tools:
  - name: deploy_cyber_triage_collector
    serve_locally: false

# TODO: Uncomment this if using cloud storage
#  - name: CollectorConfig.yaml
#    serve_locally: false

# TODO: Uncomment this if using custom file rules
#  - name: filesets.yaml
#    serve_locally: false

    
precondition: SELECT OS From info() where OS = 'windows'

sources:
   - name: Run Deployer Script
     query: |
       // Get the path to the Deployer script on the server.
       LET collector <= SELECT * FROM Artifact.Generic.Utils.FetchBinary(
                 ToolName= "deploy_cyber_triage_collector", IsExecutable=FALSE,
                 TemporaryOnly=FALSE)

       // Copy powershell script from Tools to c:\Windows\Temp. We had escaping issues when running from the download location.
       LET cp <= SELECT copy(filename=collector[0].OSPath, accessor="file", 
                             dest=script_temp_location)
                   FROM scope()

                 
       // TODO: Uncomment below if using cloud storage config file. This copies the file from the server to temp folder. 
       //LET CollectorConfig <= SELECT * FROM Artifact.Generic.Utils.FetchBinary(
       //                            ToolName= "CollectorConfig.yaml", IsExecutable=FALSE,
       //                            TemporaryOnly=FALSE)
       //LET cp1 <= SELECT copy(filename=CollectorConfig[0].OSPath, accessor="file", 
       //                          dest=CollectorConfig_temp_location)
       //                FROM scope()


       
       // TODO: Uncomment below if using custom file collection rules. This copies the file from the server to temp folder. 
       //LET filesets <= SELECT * FROM Artifact.Generic.Utils.FetchBinary(
       //             ToolName= "filesets.yaml", IsExecutable=FALSE,
       //             TemporaryOnly=FALSE)
       //LET cp2 <= SELECT copy(filename=filesets[0].OSPath, accessor="file", 
       //                          dest=filesets_temp_location)
       //                FROM scope()

       
       // Launch the script - it will download and run the Collector
       SELECT * FROM execve(argv=[PowerShellExe,
                    "-ExecutionPolicy", "Unrestricted", "-encodedCommand",
                 base64encode(string=utf16_encode(string=script_temp_location))])

       // TODO: Remove the rest of this file if the Deployer script sends data to cloud or CT Server. 
       // This section will send the JSON file back to the server. 
   - name: Get Collector File Metadata and Upload
     query: |
       LET RootPath <= pathspec(Path=Root, accessor=Accessor)

       // Generate the collection globs for each device
       LET specs = SELECT RootPath + Glob AS Glob
            FROM outputFilesSpec
            WHERE log(message=format(
               format="Processing Device %v with %v: glob is %v",
               args=[Root, Accessor, Glob]))

       // Join all the collection rules into a single Glob plugin. This ensure we
       // only make one pass over the filesystem. We only want LFNs.
       LET hits = SELECT OSPath AS SourceFile, Size
        FROM glob(globs=specs.Glob, accessor=Accessor)
        WHERE NOT IsDir AND log(message="Found " + SourceFile)

       // Pass all the results to the next query. This will serialize
       // to disk if there are too many results.
       LET all_results <=
         SELECT Size, SourceFile
         FROM hits
       // Upload the files
       LET uploaded_files = SELECT * FROM foreach(row={
          SELECT * FROM all_results
        },
        workers=30,
        query={
          SELECT SourceFile, Size,
               upload(file=SourceFile, accessor=Accessor) AS Upload
            FROM scope()
        })

       // Separate the hashes into their own column.
       SELECT now() AS CopiedOnTimestamp, SourceFile,
             Upload.Path AS DestinationFile,
               Size AS FileSize, Upload.sha256 AS SourceFileSha256
        FROM uploaded_files