diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml index b128c5624..66ab1d290 100644 --- a/.github/workflows/actions.yml +++ b/.github/workflows/actions.yml @@ -8,15 +8,12 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - include: - - docker_base_image: "ubuntu:22.04" - gift_ppa_track: "staging" - - docker_base_image: "ubuntu:22.04" - gift_ppa_track: "stable" - + os: + - ubuntu-22.04 + gift_ppa_track: ["staging", "stable"] steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Buid Turbinia Unit Tests Docker image diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 36c813145..7e0576c35 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -5,18 +5,15 @@ on: [push, pull_request] jobs: e2e-test: name: Run local stack e2e test - runs-on: ubuntu-latest strategy: matrix: - include: - - docker_base_image: "ubuntu:22.04" - gift_ppa_track: "staging" - - docker_base_image: "ubuntu:22.04" - gift_ppa_track: "stable" - + os: + - ubuntu-22.04 + gift_ppa_track: ["staging", "stable"] + runs-on: ${{ matrix.os }} steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Build Turbinia API server Docker image @@ -58,6 +55,10 @@ jobs: - name: Patch docker-compose config to use locally build images run: | sed -i -e 's/#image: "t/image: "t/g' -e 's/image: "u/#image: "u/g' ./docker/local/docker-compose.yml + - name: Update pip + run: python -m pip install --upgrade pip + - name: Install turbinia-client + run: pip install turbinia-client - name: Run E2E test run: | chmod +x ./turbinia/e2e/e2e-local.sh diff --git a/README.md b/README.md index ecbec15dc..995406630 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ # Turbinia +![Unit tests](https://github.com/google/turbinia/actions/workflows/actions.yml/badge.svg) ![e2e tests](https://github.com/google/turbinia/actions/workflows/e2e.yml/badge.svg) ## Summary diff --git a/docker/local/docker-compose.yml b/docker/local/docker-compose.yml index 3dddaf03c..658c3ff60 100644 --- a/docker/local/docker-compose.yml +++ b/docker/local/docker-compose.yml @@ -7,10 +7,8 @@ services: # See https://docs.docker.com/compose/compose-file/compose-file-v3/#expose expose: - "6379" - volumes: - $PWD/redis-data:/var/lib/redis - environment: - REDIS_REPLICATION_MODE=master @@ -18,11 +16,11 @@ services: #image: "turbinia-server-dev" # Use this for local development and comment out below line image: "us-docker.pkg.dev/osdfir-registry/turbinia/release/turbinia-server:latest" # Latest stable container_name: turbinia-server - + depends_on: + - redis volumes: - $PWD/evidence:/evidence - $PWD/conf/turbinia.conf:/etc/turbinia/turbinia.conf - environment: - LC_ALL=C.UTF-8 - LANG=C.UTF-8 @@ -32,30 +30,33 @@ services: #image: "turbinia-api-server-dev" # Use this for local development and comment out below line image: "us-docker.pkg.dev/osdfir-registry/turbinia/release/turbinia-api-server:latest" # Latest stable container_name: turbinia-api-server - + depends_on: + - redis volumes: - $PWD/evidence:/evidence - $PWD/conf/turbinia.conf:/etc/turbinia/turbinia.conf - environment: - LC_ALL=C.UTF-8 - LANG=C.UTF-8 - TURBINIA_EXTRA_ARGS=${TURBINIA_EXTRA_ARGS} + expose: + - "8000" turbinia-worker: #image: "turbinia-worker-dev" # Use this for local development and comment out below line image: "us-docker.pkg.dev/osdfir-registry/turbinia/release/turbinia-worker:latest" # Latest stable container_name: turbinia-worker privileged: true - + depends_on: + - redis volumes: - $PWD/evidence:/evidence - $PWD/conf/turbinia.conf:/etc/turbinia/turbinia.conf - environment: - LC_ALL=C.UTF-8 - LANG=C.UTF-8 - TURBINIA_EXTRA_ARGS=${TURBINIA_EXTRA_ARGS} + # Uncomment below in case you want to run a second worker on the same host. # turbinia-worker2: # image: "turbinia-worker-dev" # Use this for local development and comment out below line diff --git a/docker/tests/Dockerfile b/docker/tests/Dockerfile index 549651089..2bf9dec4c 100644 --- a/docker/tests/Dockerfile +++ b/docker/tests/Dockerfile @@ -12,6 +12,8 @@ RUN apt-get update && apt-get -y upgrade && apt-get -y install \ gpg \ john \ john-data \ + libleveldb1d \ + libleveldb-dev \ libssl-dev \ libterm-readline-gnu-perl \ libtool \ @@ -41,6 +43,9 @@ RUN add-apt-repository -y ppa:gift/$PPA_TRACK RUN apt-get update && apt-get -y install \ bulk-extractor \ docker-explorer-tools \ + libbde-tools \ + libfsapfs-tools \ + libluksde-tools \ sleuthkit \ libewf-tools \ && apt-get clean && rm -rf /var/cache/apt/* /var/lib/apt/lists/* diff --git a/turbinia/api/api_server.py b/turbinia/api/api_server.py index f01c375c0..6f87c89af 100644 --- a/turbinia/api/api_server.py +++ b/turbinia/api/api_server.py @@ -28,9 +28,6 @@ from turbinia.api.routes.router import api_router from turbinia.api.routes.ui import ui_router -from turbinia.config import logger - -logger.setup() log = logging.getLogger('turbinia') log.setLevel(logging.INFO) diff --git a/turbinia/e2e/e2e-local.sh b/turbinia/e2e/e2e-local.sh index 9b2334fd1..25888d97e 100755 --- a/turbinia/e2e/e2e-local.sh +++ b/turbinia/e2e/e2e-local.sh @@ -4,7 +4,8 @@ # The evidence processed is a prepared raw disk image. # Set default return value -RET=0 +RET=1 +set -o posix echo "Create evidence folder" mkdir -p ./evidence @@ -18,8 +19,8 @@ echo "==> Startup local turbinia docker-compose stack" export TURBINIA_EXTRA_ARGS="-d" docker-compose -f ./docker/local/docker-compose.yml up -d -echo "==> Sleep for 10s" -sleep 10s +echo "==> Sleep for 10 seconds to let containers start" +sleep 10 echo "==> Show and check running containers" containers=( turbinia-server turbinia-worker turbinia-api-server redis ) @@ -36,6 +37,23 @@ do done echo "All containers up and running!" +echo "==> Getting the turbinia-api-server container IP address" +API_SERVER=`docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' turbinia-api-server` +echo "==> Got IP address: $API_SERVER" + +echo "==> Generating turbinia-client configuration" +echo '{ + "default": { + "description": "Local e2e test environment", + "API_SERVER_ADDRESS": "http://turbinia-api-server", + "API_SERVER_PORT": 8000, + "API_AUTHENTICATION_ENABLED": false, + "CLIENT_SECRETS_FILENAME": ".client_secrets.json", + "CREDENTIALS_FILENAME": ".credentials.json" + } +}' | sed s/turbinia-api-server/$API_SERVER/> ./evidence/.turbinia_api_config.json +cat ./evidence/.turbinia_api_config.json + echo "==> Show loop device availability in worker" docker exec -t turbinia-worker /sbin/losetup -a docker exec -t turbinia-worker ls -al /dev/loop* @@ -43,39 +61,52 @@ docker exec -t turbinia-worker ls -al /dev/loop* echo "==> Show evidence volume contents in worker" docker exec -t turbinia-worker ls -al /evidence/ -echo "==> Show container logs" -docker logs turbinia-server -docker logs turbinia-worker -docker logs turbinia-api-server - echo "==> Create Turbinia request" -docker exec -t turbinia-server turbiniactl -r 123456789 -P /evidence/e2e-recipe.yaml rawdisk -l /evidence/artifact_disk.dd +RECIPE_DATA=`cat ./evidence/e2e-recipe.yaml | base64 -w0` +turbinia-client -p ./evidence submit rawdisk --source_path /evidence/artifact_disk.dd --request_id 123456789 --recipe_data {$RECIPE_DATA} -echo "==> Sleep for 150 seconds to let Turbinia process evidence" -sleep 150s +echo "==> Waiting 5 seconds before polling request status" +sleep 5 -echo "==> Display Turbinia request status" -docker exec turbinia-server turbiniactl -a status -r 123456789 +echo "==> Polling the API server for request status" +# Wait until request is complete +req_status=$(turbinia-client -p ./evidence status request 123456789 -j | jq -r '.status') +while [[ $req_status = "running" ]] +do + req_status=$(turbinia-client -p ./evidence status request 123456789 -j | jq -r '.status') + if [[ $req_status = "running" ]] + then + echo "Turbinia request 123456789 is still running. Sleeping for 10 seconds..." + sleep 10 + fi +done -echo "==> See if any tasks failed" -FAILED=`docker exec turbinia-server turbiniactl -a status -r 123456789 | awk '/Failed Tasks/,/\* None/' | wc -l` -if [ "$FAILED" != "2" ]; then - echo 'Tasks failed!' - RET=1 +echo "==> Check the status of the request" +if [ $req_status != "successful" ] +then + echo "Request is not running and the status is not successful!" +else + echo "Request successfully completed" + RET=0 fi +echo "==> Displaying request status" +turbinia-client -p ./evidence status request 123456789 -j + echo "==> Show Turbinia server logs" docker logs turbinia-server echo "==> Show Turbinia worker logs" docker logs turbinia-worker +echo "==> Show Turbinia API server logs" +docker logs turbinia-api-server + echo "==> Show evidence volume contents in worker" docker exec -t turbinia-worker ls -al /evidence/ docker exec -t turbinia-worker find /evidence -ls echo "==> Show PlasoParserTask logs" -for i in cat `docker exec turbinia-server turbiniactl -a status -r 123456789|grep -Eo '*/evidence/123456789/.*PlasoParserTask.*txt'`; do docker exec turbinia-worker cat $i; done - +for i in cat `turbinia-client -p ./evidence status request 123456789 -j | jq '.tasks[] | select(.name == "PlasoParserTask") | .saved_paths[]' | grep \.txt | tr -d '"'`; do docker exec turbinia-worker cat $i; done exit $RET diff --git a/turbinia/turbiniactl.py b/turbinia/turbiniactl.py index 0425f426e..6ae55e130 100644 --- a/turbinia/turbiniactl.py +++ b/turbinia/turbiniactl.py @@ -14,24 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. """Command line interface for Turbinia.""" -# pylint: disable=bad-indentation - -from __future__ import print_function -from __future__ import unicode_literals import argparse -import getpass import logging -import os import sys -import uuid from turbinia import config from turbinia import TurbiniaException from turbinia.config import logger from turbinia import __version__ -from turbinia.processors import archive -from turbinia.output_manager import OutputManager # We set up the logger first without the file handler, and we will set up the # file handler later once we have read the log path from the config. @@ -66,7 +57,7 @@ def check_args(source_path, args): Returns: list(str): List of arg or None """ - ret = list() + ret = [] if not args[0]: args[0] = source_path for arg in args: @@ -74,9 +65,8 @@ def check_args(source_path, args): arg = [None] if len(arg) > 1 and len(arg) != len(source_path): raise TurbiniaException( - 'Number of passed in args ({0:d}) must equal to one or ' - 'number of source_paths/disks ({1:d}).'.format( - len(arg), len(source_path))) + f'Number of passed in args {len(arg)} must equal to one or ' + f'number of source_paths/disks {len(source_path)}') if len(arg) == 1: arg = [arg[0] for _ in source_path] ret.append(arg) @@ -93,63 +83,24 @@ def process_args(args): TurbiniaException: If there's an error processing args. """ parser = argparse.ArgumentParser( - description='Turbinia can bulk process multiple evidence of same type ' - '(i.e. rawdisk, google cloud disk). For bulk processing, pass in csv ' - 'list of args to be processed. If all pieces of evidence share the same ' - 'property, such as project or source, there is no need for repeating ' - 'those values in the command.') + description=( + 'turbiniactl is used to start the different Turbinia ' + 'components (e.g. API server, workers, Turbinia server).')) parser.add_argument( '-q', '--quiet', action='store_true', help='Show minimal output') - parser.add_argument( - '-v', '--verbose', action='store_true', help='Show verbose output', - default=True) parser.add_argument( '-d', '--debug', action='store_true', help='Show debug output', default=False) - parser.add_argument( - '-a', '--all_fields', action='store_true', - help='Show all task status fields in output', required=False) parser.add_argument( '-c', '--config_file', help='Load explicit config file. If specified it ' 'will ignore config files in other default locations ' '(/etc/turbinia.conf, ~/.turbiniarc, or in paths referenced in ' 'environment variable TURBINIA_CONFIG_PATH)', required=False) - parser.add_argument( - '-I', '--recipe', help='Name of Recipe to be employed on evidence', - required=False) - parser.add_argument( - '-P', '--recipe_path', help='Recipe file path to load and use.', - required=False) - parser.add_argument( - '-X', '--skip_recipe_validation', action='store_true', help='Do not ' - 'perform recipe validation on the client.', required=False, default=False) - parser.add_argument( - '-f', '--force_evidence', action='store_true', - help='Force evidence processing request in potentially unsafe conditions', - required=False) - parser.add_argument( - '-k', '--decryption_keys', help='Decryption keys to be passed in as ' - ' comma separated list. Each entry should be in the form type=key. (e.g. ' - '"-k password=123456,recovery_password=XXXX-XXXX-XXXX-XXXX-XXXX-XXXX")', - default=[], type=csv_list) parser.add_argument('-o', '--output_dir', help='Directory path for output') parser.add_argument('-L', '--log_file', help='Log file') - parser.add_argument( - '-r', '--request_id', help='Create new requests with this Request ID', - required=False) parser.add_argument( '-V', '--version', action='version', version=__version__, help='Show the version') - parser.add_argument( - '-D', '--dump_json', action='store_true', - help='Dump JSON output of Turbinia Request instead of sending it') - parser.add_argument( - '-F', '--filter_patterns_file', - help='A file containing newline separated string patterns to filter ' - 'text based evidence files with (in extended grep regex format). ' - 'This filtered output will be in addition to the complete output') - parser.add_argument( - '-Y', '--yara_rules_file', help='A file containing Yara rules.') parser.add_argument( '-j', '--jobs_allowlist', default=[], type=csv_list, help='An allowlist for Jobs that will be allowed to run (in CSV format, ' @@ -162,297 +113,15 @@ def process_args(args): '-J', '--jobs_denylist', default=[], type=csv_list, help='A denylist for Jobs we will not allow to run. See ' '--jobs_allowlist help for details on format and when it is applied.') - parser.add_argument( - '-p', '--poll_interval', default=60, type=int, - help='Number of seconds to wait between polling for task state info') - - parser.add_argument( - '-T', '--debug_tasks', action='store_true', - help='Show debug output for all supported tasks', default=False) - parser.add_argument( - '-w', '--wait', action='store_true', - help='Wait to exit until all tasks for the given request have completed') - parser.add_argument( - '-g', '--group_name', help='Grouping name for evidence', required=False) - parser.add_argument( - '-R', '--reason', help='Related ticket/incident ID for the evidence', - required=False) subparsers = parser.add_subparsers( dest='command', title='Commands', metavar='') - # Action for printing config parser_config = subparsers.add_parser('config', help='Print out config file') parser_config.add_argument( '-f', '--file_only', action='store_true', help='Print out file path only') - - #Sends Test Notification - parser_testnotify = subparsers.add_parser( - 'testnotify', help='Sends test notification') - - # TODO(aarontp): Find better way to specify these that allows for multiple - # pieces of evidence to be submitted. Maybe automagically create different - # commands based on introspection of evidence objects? - # RawDisk - parser_rawdisk = subparsers.add_parser( - 'rawdisk', help='Process RawDisk as Evidence (bulk processable)') - parser_rawdisk.add_argument( - '-l', '--source_path', help='Local path to the evidence', required=True, - type=csv_list) - parser_rawdisk.add_argument( - '-s', '--source', help='Description of the source of the evidence', - required=False, type=csv_list, default=[None]) - parser_rawdisk.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # Parser options for Ewf Disk Evidence type - parser_ewfdisk = subparsers.add_parser( - 'ewfdisk', help='Process EwfDisk as Evidence') - parser_ewfdisk.add_argument( - '-l', '--source_path', help='Local path to the evidence', required=True, - type=csv_list) - parser_ewfdisk.add_argument( - '-s', '--source', help='Description of the source of the evidence', - required=False, type=csv_list, default=[None]) - parser_ewfdisk.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # Parser options for Google Cloud Disk Evidence type - parser_googleclouddisk = subparsers.add_parser( - 'googleclouddisk', - help='Process Google Cloud Persistent Disk as Evidence ' - '(bulk processable)') - parser_googleclouddisk.add_argument( - '-C', '--copy_only', action='store_true', help='Only copy disk and do ' - 'not process with Turbinia. This only takes effect when a source ' - '--project is defined and can be run without any Turbinia server or ' - 'workers configured.') - parser_googleclouddisk.add_argument( - '-d', '--disk_name', help='Google Cloud name for disk', required=True, - type=csv_list) - parser_googleclouddisk.add_argument( - '-p', '--project', help='Project that the disk to process is associated ' - 'with. If this is different from the project that Turbinia is running ' - 'in, it will be copied to the Turbinia project.', type=csv_list) - parser_googleclouddisk.add_argument( - '-z', '--zone', help='Geographic zone the disk exists in', type=csv_list) - parser_googleclouddisk.add_argument( - '-s', '--source', help='Description of the source of the evidence', - required=False, type=csv_list, default=[None]) - parser_googleclouddisk.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # Parser options for Google Cloud Persistent Disk Embedded Raw Image - parser_googleclouddiskembedded = subparsers.add_parser( - 'googleclouddiskembedded', - help='Process Google Cloud Persistent Disk with an embedded raw disk ' - 'image as Evidence (bulk processable)') - parser_googleclouddiskembedded.add_argument( - '-C', '--copy_only', action='store_true', help='Only copy disk and do ' - 'not process with Turbinia. This only takes effect when a source ' - '--project is defined and can be run without any Turbinia server or ' - 'workers configured.') - parser_googleclouddiskembedded.add_argument( - '-e', '--embedded_path', - help='Path within the Persistent Disk that points to the raw image file', - required=True, type=csv_list) - parser_googleclouddiskembedded.add_argument( - '-d', '--disk_name', help='Google Cloud name for disk', required=True, - type=csv_list) - parser_googleclouddiskembedded.add_argument( - '-p', '--project', help='Project that the disk to process is associated ' - 'with. If this is different from the project that Turbinia is running ' - 'in, it will be copied to the Turbinia project.', type=csv_list) - - parser_googleclouddiskembedded.add_argument( - '-P', '--mount_partition', type=csv_list, default=[1], - help='The partition number as an integer to use when mounting the ' - 'parent disk. Defaults to the first partition. Only affects mounting, and ' - 'not what gets processed.') - parser_googleclouddiskembedded.add_argument( - '-z', '--zone', help='Geographic zone the disk exists in', type=csv_list) - parser_googleclouddiskembedded.add_argument( - '-s', '--source', help='Description of the source of the evidence', - required=False, type=csv_list, default=[None]) - parser_googleclouddiskembedded.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # RawMemory - parser_rawmemory = subparsers.add_parser( - 'rawmemory', help='Process RawMemory as Evidence (bulk processable)') - parser_rawmemory.add_argument( - '-l', '--source_path', help='Local path to the evidence', required=True, - type=csv_list) - parser_rawmemory.add_argument( - '-P', '--profile', help='Profile to use with Volatility', required=True, - type=csv_list) - parser_rawmemory.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - parser_rawmemory.add_argument( - '-m', '--module_list', type=csv_list, - help='Volatility module(s) to execute', required=True) - - # Parser options for Directory evidence type - parser_directory = subparsers.add_parser( - 'directory', help='Process a directory as Evidence (bulk processable)') - parser_directory.add_argument( - '-l', '--source_path', help='Local path to the evidence', required=True, - type=csv_list) - parser_directory.add_argument( - '-s', '--source', help='Description of the source of the evidence', - required=False, type=csv_list, default=[None]) - parser_directory.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # Parser options for CompressedDirectory evidence type - parser_directory = subparsers.add_parser( - 'compresseddirectory', help='Process a compressed tar file as Evidence ' - '(bulk processable)') - parser_directory.add_argument( - '-l', '--source_path', help='Local path to the evidence', required=True, - type=csv_list) - parser_directory.add_argument( - '-s', '--source', help='Description of the source of the evidence', - required=False, type=csv_list, default=[None]) - parser_directory.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # Parser options for ChromiumProfile evidence type - parser_hindsight = subparsers.add_parser( - 'hindsight', help='Process ChromiumProfile as Evidence ' - '(bulk processable)') - parser_hindsight.add_argument( - '-l', '--source_path', help='Local path to the evidence', required=True, - type=csv_list) - parser_hindsight.add_argument( - '-f', '--format', help='Output format (supported types are ' - 'xlsx, sqlite, jsonl)', type=csv_list, default=['sqlite']) - parser_hindsight.add_argument( - '-b', '--browser_type', help='The type of browser the input files belong' - 'to (supported types are Chrome, Brave)', type=csv_list, - default=['Chrome']) - parser_hindsight.add_argument( - '-n', '--name', help='Descriptive name of the evidence', required=False, - type=csv_list) - - # List Jobs - subparsers.add_parser( - 'listjobs', - help='List all available Jobs. These Job names can be used by ' - '--jobs_allowlist and --jobs_denylist') - # Celery Worker subparsers.add_parser('celeryworker', help='Run Celery worker') - - # Parser options for Turbinia status command - parser_status = subparsers.add_parser( - 'status', help='Get Turbinia Task status') - parser_status.add_argument( - '-c', '--close_tasks', action='store_true', - help='Close tasks based on Request ID or Task ID', required=False) - parser_status.add_argument( - '-C', '--csv', action='store_true', - help='When used with --statistics, the output will be in CSV format', - required=False) - parser_status.add_argument( - '-d', '--days_history', default=0, type=int, - help='Number of days of history to show', required=False) - parser_status.add_argument( - '-D', '--dump_json', action='store_true', - help='Dump JSON status output instead text. Compatible with -d, -u, ' - '-r and -t flags, but not others') - parser_status.add_argument( - '-f', '--force', help='Gatekeeper for --close_tasks', action='store_true', - required=False) - parser_status.add_argument( - '-r', '--request_id', - help='Show all tasks for this Request ID. A request to process Evidence will ' - 'generate a unique request ID and this option will show all Tasks associated ' - 'with this request.', required=False) - # 20 == Priority.High. We are setting this manually here because we don't want - # to load the worker module yet in order to access this Enum. - parser_status.add_argument( - '-p', '--priority_filter', default=20, type=int, required=False, - help='This sets what report sections are shown in full detail in ' - 'report output. Any tasks that have set a report_priority value ' - 'equal to or lower than this setting will be shown in full detail, and ' - 'tasks with a higher value will only have a summary shown. To see all ' - 'tasks report output in full detail, set --priority_filter=100') - parser_status.add_argument( - '-R', '--full_report', - help='Generate full markdown report instead of just a summary', - action='store_true', required=False) - parser_status.add_argument( - '-s', '--statistics', help='Generate statistics only', - action='store_true', required=False) - parser_status.add_argument( - '-t', '--task_id', help='Show task data for the given Task ID. A ' - 'processing request can generate multiple Tasks as part of the request ' - 'and this will filter to only the specified Task.', required=False) - parser_status.add_argument( - '-u', '--user', help='Show task for given user', required=False) - parser_status.add_argument( - '-i', '--requests', required=False, action='store_true', - help='Show all requests from a specified timeframe. The default ' - 'timeframe is 7 days. Please use the -d flag to extend this.') - parser_status.add_argument( - '-g', '--group_id', help='Show Requests for given group ID. This command' - ' only shows the related requests and overview of their task status. Run ' - '--full_report for the full list of requests and their tasks.', - required=False) - parser_status.add_argument( - '-w', '--workers', required=False, action='store_true', - help='Show Worker status information from a specified timeframe. The ' - 'default timeframe is 7 days. Please use the -d flag to extend this. ' - 'Additionaly, you can use the -a or --all_fields flag to retrieve the ' - 'full output containing finished and unassigned worker tasks.') - parser_log_collector = subparsers.add_parser( - 'gcplogs', help='Collects Turbinia logs from Stackdriver.') - parser_log_collector.add_argument( - '-o', '--output_dir', help='Directory path for output', required=False) - parser_log_collector.add_argument( - '-q', '--query', - help='Filter expression to use to query Stackdriver logs.') - parser_log_collector.add_argument( - '-d', '--days_history', default=1, type=int, - help='Number of days of history to show', required=False) - parser_log_collector.add_argument( - '-s', '--server_logs', action='store_true', - help='Collects all server related logs.') - parser_log_collector.add_argument( - '-w', '--worker_logs', action='store_true', - help='Collects all worker related logs.') - - # Add GCS logs collector - parser_gcs_logs = subparsers.add_parser( - 'dumpgcs', help='Get Turbinia results from Google Cloud Storage.') - parser_gcs_logs.add_argument( - '-o', '--output_dir', help='Directory path for output.', required=True) - parser_gcs_logs.add_argument( - '-t', '--task_id', help='Download all the results for given task_id.') - parser_gcs_logs.add_argument( - '-r', '--request_id', - help='Download the results for all Tasks for the given request_id.') - parser_gcs_logs.add_argument( - '-b', '--bucket', - help='Alternate GCS bucket to download from. Must be in the following ' - 'format gs://{BUCKET_NAME}/. Defaults to the BUCKET_NAME as specified ' - 'in the config') - parser_gcs_logs.add_argument( - '-d', '--days_history', default=0, type=int, - help='Number of days of history to to query results for', required=False) - parser_gcs_logs.add_argument( - '-i', '--instance_id', - help='Instance ID used to run tasks/requests. You must provide an ' - 'instance ID if the task/request was not processed on the same instance ' - 'as your config file.') # Server subparsers.add_parser('server', help='Run Turbinia Server') # API server @@ -473,8 +142,6 @@ def process_args(args): if args.log_file: user_specified_log = args.log_file - if args.output_dir: - config.OUTPUT_DIR = args.output_dir config.TURBINIA_COMMAND = args.command flags_set = args.command in ('api_server', 'server', 'celeryworker') @@ -493,441 +160,43 @@ def process_args(args): else: log.setLevel(logging.INFO) - # Enable tasks debugging for supported tasks - if args.debug_tasks: - config.DEBUG_TASKS = True - - if config.CLOUD_PROVIDER.lower() == 'gcp': - from turbinia.lib import google_cloud - from libcloudforensics.providers.gcp import forensics as gcp_forensics - log.info(f'Turbinia version: {__version__:s}') - # Do late import of other needed Turbinia modules. This is needed because the - # config is loaded by these modules at load time, and we want to wait to load - # the config until after we parse the args so that we can use those arguments - # to point to config paths. - from turbinia import notify - from turbinia import client as TurbiniaClientProvider - from turbinia.worker import TurbiniaCeleryWorker - from turbinia.server import TurbiniaServer - # Print out config if requested if args.command == 'config': if args.file_only: log.info(f'Config file path is {config.configSource:s}\n') sys.exit(0) - try: - with open(config.configSource, "r", encoding='utf-8') as f: + with open(config.configSource, 'r', encoding='utf-8') as f: print(f.read()) sys.exit(0) except IOError as exception: msg = ( f'Failed to read config file {config.configSource:s}: {exception!s}') - raise TurbiniaException(msg) - #sends test notification - if args.command == 'testnotify': - notify.sendmail( - config.EMAIL_ADDRESS, 'Turbinia test notification', - 'This is a test notification') - sys.exit(0) - - args.jobs_allowlist = [j.lower() for j in args.jobs_allowlist] - args.jobs_denylist = [j.lower() for j in args.jobs_denylist] - - # Read set set filter_patterns - filter_patterns = [] - if (args.filter_patterns_file and - not os.path.exists(args.filter_patterns_file)): - msg = f'Filter patterns file {args.filter_patterns_file:s} does not exist.' - raise TurbiniaException(msg) - elif args.filter_patterns_file: - try: - filter_patterns = open(args.filter_patterns_file, - encoding='utf-8').read().splitlines() - except IOError as exception: - log.warning( - f'Cannot open file {args.filter_patterns_file:s} [{exception!s}]') - - # Read yara rules - yara_rules = '' - if (args.yara_rules_file and not os.path.exists(args.yara_rules_file)): - msg = f'Filter patterns file {args.yara_rules_file:s} does not exist.' - raise TurbiniaException(msg) - elif args.yara_rules_file: - try: - yara_rules = open(args.yara_rules_file, encoding='utf-8').read() - except IOError as exception: - msg = (f'Cannot open file {args.yara_rules_file:s} [{exception!s}]') - raise TurbiniaException(msg) - - # Create Client object - client = None - if args.command not in ('server'): - client = TurbiniaClientProvider.get_turbinia_client() + raise TurbiniaException(msg) from exception - # Set group id - group_id = uuid.uuid4().hex - - # Set all_args from list of commandline arguments to string - all_args = ' '.join(sys.argv) - - group_name = args.group_name - reason = args.reason - - # Checks for bulk processing - if args.command in ('ewfdisk', 'rawdisk', 'directory', 'compresseddirectory'): - args.name, args.source = check_args( - args.source_path, [args.name, args.source]) - # Iterate through evidence and call process_evidence - for i, source_path in enumerate(args.source_path): - name = args.name[i] - source = args.source[i] - process_evidence( - args=args, source_path=source_path, name=name, source=source, - group_id=group_id, filter_patterns=filter_patterns, client=client, - yara_rules=yara_rules, group_name=group_name, reason=reason, - all_args=all_args) - elif args.command in ('googleclouddisk', 'googleclouddiskembedded'): - # Fail if this is a local instance - if config.CLOUD_PROVIDER.lower() == 'local' and not args.force_evidence: - msg = ( - 'The evidence type {0:s} is Cloud only, and this instance of ' - 'Turbinia is not a cloud instance.'.format(args.command)) - raise TurbiniaException(msg) - # Check cloud zones - if not args.zone and config.TURBINIA_ZONE: - args.zone = [config.TURBINIA_ZONE] - elif not args.zone and not config.TURBINIA_ZONE: - msg = 'Turbinia zone must be set by --zone or in config.' - raise TurbiniaException(msg) - # Checks for cloud project - if not args.project and config.TURBINIA_PROJECT: - args.project = [config.TURBINIA_PROJECT] - elif not args.project and not config.TURBINIA_PROJECT: - msg = 'Turbinia project must be set by --project or in config' - raise TurbiniaException(msg) - # Since mount_partition and embedded_path are not in cloud disk namespace, - # Setting them to None here - if args.command == 'googleclouddisk': - args.mount_partition = None - args.embedded_path = None - ( - args.name, args.source, args.project, args.zone, args.mount_partition, - args.embedded_path) = check_args( - args.disk_name, [ - args.name, args.source, args.project, args.zone, - args.mount_partition, args.embedded_path - ]) - mount_partition = None - embedded_path = None - for i, disk_name in enumerate(args.disk_name): - project = args.project[i] - zone = args.zone[i] - name = args.name[i] - source = args.source[i] - if args.command == 'googleclouddiskembedded': - embedded_path = args.embedded_path[i] - if not name: - name = ':'.join((disk_name, embedded_path)) - mount_partition = args.mount_partition[i] - if ((project and project != config.TURBINIA_PROJECT) or - (zone and zone != config.TURBINIA_ZONE)): - new_disk = gcp_forensics.CreateDiskCopy( - project, config.TURBINIA_PROJECT, None, config.TURBINIA_ZONE, - disk_name=disk_name) - disk_name = new_disk.name - if args.copy_only: - log.info( - f'--copy_only specified, so not processing {disk_name:s} with Turbinia' - ) - continue - process_evidence( - args=args, disk_name=disk_name, name=name, source=source, - project=project, zone=zone, embedded_path=embedded_path, - mount_partition=mount_partition, group_id=group_id, - filter_patterns=filter_patterns, client=client, yara_rules=yara_rules, - group_name=group_name, reason=reason, all_args=all_args) - if args.command == 'rawmemory': - # Checks if length of args match - args.name, args.profile = check_args( - args.source_path, [args.name, args.profile]) - for i, source_path in enumerate(args.source_path): - profile = args.profile[i] - name = args.name[i] - process_evidence( - args=args, source_path=source_path, name=name, profile=profile, - group_id=group_id, filter_patterns=filter_patterns, client=client, - yara_rules=yara_rules, group_name=group_name, reason=reason, - all_args=all_args) - elif args.command == 'hindsight': - args.name, args.browser_type, args.format = check_args( - args.source_path, [args.name, args.browser_type, args.format]) - for i, source_path in enumerate(args.source_path): - name = args.name[i] - browser_type = args.browser_type[i] - format = args.format[i] - process_evidence( - args=args, source_path=source_path, name=name, format=format, - group_id=group_id, client=client, filter_patterns=filter_patterns, - yara_rules=yara_rules, browser_type=browser_type, - group_name=group_name, reason=reason, all_args=all_args) + # Do late import of other needed Turbinia modules. This is needed because the + # config is loaded by these modules at load time, and we want to wait to load + # the config until after we parse the args so that we can use those arguments + # to point to config paths. elif args.command == 'celeryworker': - logger.setup() + # pylint: disable=import-outside-toplevel + from turbinia.worker import TurbiniaCeleryWorker worker = TurbiniaCeleryWorker( jobs_denylist=args.jobs_denylist, jobs_allowlist=args.jobs_allowlist) worker.start() elif args.command == 'server': + # pylint: disable=import-outside-toplevel + from turbinia.server import TurbiniaServer server = TurbiniaServer( jobs_denylist=args.jobs_denylist, jobs_allowlist=args.jobs_allowlist) server.start() elif args.command == 'api_server': + # pylint: disable=import-outside-toplevel from turbinia.api.api_server import TurbiniaAPIServer api_server = TurbiniaAPIServer() api_server.start('turbinia.api.api_server:app') - elif args.command == 'status': - region = config.TURBINIA_REGION - if args.request_id and args.group_id: - msg = ( - 'Cannot run status command with request ID and group ID. Please ' - 'only specify one.') - raise TurbiniaException(msg) - - if args.dump_json and (args.statistics or args.requests or args.workers): - log.info( - 'The --dump_json flag is not compatible with --statistics, ' - '--reqeusts, or --workers flags') - sys.exit(1) - - if args.statistics: - print( - client.format_task_statistics( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, days=args.days_history, task_id=args.task_id, - request_id=args.request_id, user=args.user, csv=args.csv)) - sys.exit(0) - - if args.wait and args.request_id: - client.wait_for_request( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, request_id=args.request_id, user=args.user, - poll_interval=args.poll_interval) - elif args.wait and not args.request_id: - log.info( - '--wait requires --request_id, which is not specified. ' - 'turbiniactl will exit without waiting.') - if args.requests: - print( - client.format_request_status( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, days=args.days_history, - all_fields=args.all_fields)) - sys.exit(0) - if args.workers: - print( - client.format_worker_status( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, days=args.days_history, - all_fields=args.all_fields)) - sys.exit(0) - - if args.dump_json: - output_json = True - else: - output_json = False - print( - client.format_task_status( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, days=args.days_history, task_id=args.task_id, - request_id=args.request_id, group_id=args.group_id, user=args.user, - all_fields=args.all_fields, full_report=args.full_report, - priority_filter=args.priority_filter, output_json=output_json)) - sys.exit(0) - elif args.command == 'listjobs': - log.info('Available Jobs:') - client.list_jobs() - else: - log.warning(f'Command {args.command!s} not implemented.') - - -# TODO: shard this function and move some of its functionalities to other files -# (move some of this to evidence.py to run the checks etc) -def process_evidence( - client, group_id, args=None, browser_type=None, disk_name=None, - embedded_path=None, filter_patterns=None, format=None, mount_partition=None, - name=None, profile=None, project=None, source=None, source_path=None, - yara_rules=None, zone=None, group_name=None, reason=None, all_args=None): - """Creates evidence and turbinia request. - - Args: - client(TurbiniaClient): TurbiniaClient used for creating requests. - group_id(str): Group ID used for bulk processing. - args(Namespace): commandline args. - browser_type(str): Browser type used for hindsight. - disk_name(str): Disk name used for processing cloud evidence. - embedded_path(str): Embedded path for clouddiskembedded. - filter_patterns(str): Filter patterns used for processing evidence. - format(str): Output format for hindsight. - mount_partition(int): Mount partition for clouddiskembedded. - name(str): Evidence name. - profile(list(str)): List of volatility profiles used for rawmemory. - project(str): Project for cloud related evidence. - source(str): Source for evidence. - source_path(str): Source path used for host evidence. - yara_rules(str): Yara rule for processing evidence. - zone(str): Could zone used for cloud evidence. - group_name (str): Name for grouping evidence. - reason (str): Reason or justification to Turbinia requests. - all_args (str): a string of commandline arguments provided to run client. - """ - from turbinia import evidence - - # Set request id - request_id = args.request_id if args.request_id else uuid.uuid4().hex - - # Start Evidence configuration - evidence_ = None - - if args.command == 'rawdisk': - evidence_ = evidence.RawDisk( - name=name, source_path=os.path.abspath(source_path), source=source) - elif args.command == 'ewfdisk': - evidence_ = evidence.EwfDisk( - name=name, source_path=os.path.abspath(source_path), source=source) - elif args.command == 'directory': - source_path = os.path.abspath(source_path) - if not config.SHARED_FILESYSTEM: - log.info( - 'A Cloud Only Architecture has been detected. ' - 'Compressing the directory for GCS upload.') - source_path = archive.CompressDirectory( - source_path, output_path=config.TMP_DIR) - evidence_ = evidence.CompressedDirectory( - name=name, source_path=source_path, source=source) - else: - evidence_ = evidence.Directory( - name=name, source_path=source_path, source=source) - elif args.command == 'compresseddirectory': - archive.ValidateTarFile(source_path) - evidence_ = evidence.CompressedDirectory( - name=name, source_path=os.path.abspath(source_path), source=source) - elif args.command == 'googleclouddisk': - evidence_ = evidence.GoogleCloudDisk( - name=name, disk_name=disk_name, project=project, zone=zone, - source=source) - elif args.command == 'googleclouddiskembedded': - parent_evidence_ = evidence.GoogleCloudDisk( - name=name, disk_name=disk_name, project=project, source=source, - mount_partition=mount_partition, zone=zone) - evidence_ = evidence.GoogleCloudDiskRawEmbedded( - name=name, disk_name=disk_name, project=project, zone=zone, - embedded_path=embedded_path) - evidence_.set_parent(parent_evidence_) - elif args.command == 'hindsight': - if format not in ['xlsx', 'sqlite', 'jsonl']: - msg = 'Invalid output format.' - raise TurbiniaException(msg) - if browser_type not in ['Chrome', 'Brave']: - msg = 'Browser type not supported.' - raise TurbiniaException(msg) - source_path = os.path.abspath(source_path) - evidence_ = evidence.ChromiumProfile( - name=name, source_path=source_path, output_format=format, - browser_type=browser_type) - elif args.command == 'rawmemory': - source_path = os.path.abspath(source_path) - evidence_ = evidence.RawMemory( - name=name, source_path=source_path, profile=profile, - module_list=args.module_list) - - if evidence_ and not args.force_evidence: - if not config.SHARED_FILESYSTEM and evidence_.copyable: - # This is created so we can auto-upload files when they are copyable. - if os.path.exists(evidence_.local_path): - output_manager = OutputManager() - # Passing in request_id as the uid because we don't have an - # associated Task ID in this case. - output_manager.setup( - evidence_.type, request_id, request_id, remote_only=True) - output_manager.save_evidence(evidence_) - else: - msg = ( - 'The evidence local path does not exist: {0:s}. Please submit ' - 'a new Request with a valid path.'.format(evidence_.local_path)) - raise TurbiniaException(msg) - elif not config.SHARED_FILESYSTEM and not evidence_.cloud_only: - msg = ( - 'The evidence type {0:s} cannot run on Cloud instances of ' - 'Turbinia. Consider wrapping it in a ' - 'GoogleCloudDiskRawEmbedded or other Cloud compatible ' - 'object'.format(evidence_.type)) - raise TurbiniaException(msg) - - request = None - if evidence_: - request = client.create_request( - request_id=request_id, group_id=group_id, requester=getpass.getuser(), - group_name=group_name, reason=reason, all_args=all_args) - request.evidence.append(evidence_) - - if args.decryption_keys: - for credential in args.decryption_keys: - try: - credential_type, credential_data = credential.split('=') - except ValueError as exception: - msg = ( - 'Could not parse credential [{0:s}] from decryption keys ' - '{1!s}: {2!s}'.format( - credential, args.decryption_keys, exception)) - raise TurbiniaException(msg) - evidence_.credentials.append((credential_type, credential_data)) - - # Recipe pre-condition checks. - if args.recipe and args.recipe_path: - msg = ('Expected a recipe name (-I) or path (-P) but found both.') - raise TurbiniaException(msg) - - # Set the recipe name/path or None if not set. - # If no recipe name or path is given, the create_recipe method will - # generate a default recipe but still honor any of other parameters - # such as jobs_allowlist/jobs_denylist. - recipe = args.recipe if args.recipe else args.recipe_path - - recipe_dict = client.create_recipe( - debug_tasks=args.debug_tasks, filter_patterns=filter_patterns, - group_id=group_id, jobs_allowlist=args.jobs_allowlist, - jobs_denylist=args.jobs_denylist, recipe_name=recipe, sketch_id=None, - skip_recipe_validation=args.skip_recipe_validation, - yara_rules=yara_rules, group_name=group_name, reason=reason, - all_args=all_args) - request.recipe = recipe_dict - - if args.dump_json: - print(request.to_json().encode('utf-8')) - sys.exit(0) - else: - log.info( - 'Creating request {0:s} with group id {1:s} and evidence ' - '{2:s}'.format(request.request_id, request.group_id, evidence_.name)) - # TODO add a new log line when group status is implemented - log.info( - 'Run command "turbiniactl status -r {0:s}" to see the status of' - ' this request and associated tasks'.format(request.request_id)) - client.send_request(request) - - if args.wait: - log.info(f'Waiting for request {request.request_id:s} to complete') - region = config.TURBINIA_REGION - client.wait_for_request( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, request_id=request.request_id, - poll_interval=args.poll_interval) - print( - client.format_task_status( - instance=config.INSTANCE_ID, project=config.TURBINIA_PROJECT, - region=region, request_id=request.request_id, - all_fields=args.all_fields)) def main(): diff --git a/turbinia/turbiniactl_test.py b/turbinia/turbiniactl_test.py index 608d0fe5b..c550dd0cd 100644 --- a/turbinia/turbiniactl_test.py +++ b/turbinia/turbiniactl_test.py @@ -14,42 +14,13 @@ # limitations under the License. """Tests for Turbinia task_manager module.""" -from __future__ import unicode_literals - -import argparse import unittest -import tempfile +import argparse from unittest import mock -from libcloudforensics.providers.gcp.internal import compute + from turbinia import config -from turbinia import TurbiniaException from turbinia import turbiniactl -from turbinia.lib import recipe_helpers -from turbinia.message import TurbiniaRequest -from turbinia.processors import archive - - -class FakeEvidence(): - """Class to represent a fake Evidence object. """ - - def __init__( - self, name='My Evidence', type=None, source_path=None, cloud_only=False, - copyable=False, disk_name=None, project=None, zone=None): - self.source = 'testSource' - self.name = name - self.type = type - self.source_path = source_path - self.cloud_only = cloud_only - self.copyable = copyable - self.type = type - self.project = project - self.disk_name = disk_name - self.zone = zone - - def set_parent(self, _): - """Set evidence parent.""" - return class TestTurbiniactl(unittest.TestCase): @@ -61,447 +32,53 @@ class TestTurbiniactl(unittest.TestCase): def setUp(self, _, __): super(TestTurbiniactl, self).setUp() config.TASK_MANAGER = 'celery' - self.output_manager = mock.MagicMock() - self.base_dir = tempfile.mkdtemp() - self.source_path = tempfile.mkstemp(dir=self.base_dir)[1] - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.RawDisk') - def testRawDiskEvidence(self, mockEvidence, mockClient): - """Test RawDisk evidence.""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='rawdisk', force_evidence=False, - decryption_keys=None, recipe=None, recipe_path=None, - skip_recipe_validation=False, dump_json=None, debug_tasks=None, - jobs_denylist=None, jobs_allowlist=None, run_local=False, wait=False) - mockEvidence.return_value = FakeEvidence( - type='rawdisk', source_path=self.source_path) - config.SHARED_FILESYSTEM = True - turbiniactl.process_evidence( - name='My Evidence', source_path='/tmp/foo.img', args=args, - source='case', client=mockClient, group_id='FakeGroupID') - mockEvidence.assert_called_with( - name='My Evidence', source_path='/tmp/foo.img', source='case') - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.CompressedDirectory') - @mock.patch('turbinia.evidence.Directory') - def testDirectoryDiskEvidence( - self, mockDirectory, mockCompressedEvidence, mockClient): - """Test directory evidence.""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='directory', force_evidence=False, - decryption_keys=None, recipe=None, recipe_path=None, - skip_recipe_validation=False, dump_json=None, debug_tasks=None, - jobs_denylist=None, jobs_allowlist=None, run_local=False, wait=False) - # Test not shared filesystem - archive.CompressDirectory = mock.MagicMock() - config.SHARED_FILESYSTEM = False - mockCompressedEvidence.return_value = FakeEvidence( - type='compresseddirectory', source_path=self.source_path, - cloud_only=True) - mockClient.send_request = mock.MagicMock() - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - source='case', client=mockClient, group_id='FakeGroupID') - self.assertTrue(archive.CompressDirectory.called) - mockCompressedEvidence.assert_called_with( - name='My Evidence', source_path=mock.ANY, source='case') - # Test Directory evidence for shared filesystem - mockDirectory.return_value = FakeEvidence( - type='directory', source_path=self.source_path) - mockDirectory.cloud_only = False - config.SHARED_FILESYSTEM = True - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - source='case', client=mockClient, group_id='FakeGroupID') - mockDirectory.assert_called_with( - name='My Evidence', source_path=mock.ANY, source='case') - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.CompressedDirectory') - def testCompressedDirectory(self, mockEvidence, mockClient): - """Test compressed directory evidence""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='compresseddirectory', force_evidence=False, - decryption_keys=None, recipe=None, recipe_path=None, - skip_recipe_validation=False, dump_json=None, debug_tasks=None, - jobs_denylist=None, jobs_allowlist=None, run_local=False, wait=False) - archive.ValidateTarFile = mock.MagicMock() - mockEvidence.return_value = FakeEvidence( - type='compresseddirectory', source_path=self.source_path, - cloud_only=True) - mockClient.send_request = mock.MagicMock() - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - source='case', client=mockClient, group_id='FakeGroupID') - self.assertTrue(archive.ValidateTarFile.called) - mockEvidence.assert_called_with( - name='My Evidence', source_path=mock.ANY, source='case') - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.GoogleCloudDisk') - def testCloudDisk(self, mockEvidence, mockClient): - """Test Google Cloud Disk evidence.""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='googleclouddisk', force_evidence=False, - decryption_keys=None, recipe=None, recipe_path=None, - skip_recipe_validation=False, dump_json=None, debug_tasks=None, - jobs_denylist=None, jobs_allowlist=None, run_local=False, wait=False) - mockEvidence.return_value = FakeEvidence( - type='googleclouddisk', project='testProject', disk_name='testDisk', - cloud_only=True) - turbiniactl.process_evidence( - name='My Evidence', disk_name='testDisk', zone='testZone', - project='testProject', args=args, source='case', client=mockClient, - group_id='FakeGroupID') - mockEvidence.assert_called_with( - name='My Evidence', disk_name='testDisk', project='testProject', - source='case', zone='testZone') - - @mock.patch('turbinia.output_manager.OutputManager.setup') - @mock.patch('turbinia.output_manager.OutputManager.save_evidence') - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.GoogleCloudDiskRawEmbedded') - @mock.patch('turbinia.evidence.GoogleCloudDisk') - def testCloudEmbedded( - self, mockCloudEvidence, mockEmbeddedEvidence, mockClient, _, __): - """Test Google Cloud Disk Embedded evidence.""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='googleclouddiskembedded', - force_evidence=False, decryption_keys=None, recipe=None, - recipe_path=None, dump_json=None, debug_tasks=None, - skip_recipe_validation=False, jobs_denylist=None, jobs_allowlist=None, - run_local=False, wait=False) - mockCloudEvidence.return_value = FakeEvidence( - type='googleclouddisk', project='testProject', disk_name='testDisk', - cloud_only=True) - mockEmbeddedEvidence.return_value = FakeEvidence( - type='googleclouddiskembedded', project='testProject', - disk_name='testDisk', cloud_only=True) - mockClient.send_request = mock.MagicMock() - mockEmbeddedEvidence.set_parent = mock.MagicMock() - turbiniactl.process_evidence( - name='My Evidence', disk_name='testDisk', zone='testZone', - project='testProject', args=args, source='case', client=mockClient, - group_id='FakeGroupID', mount_partition='testMount') - mockCloudEvidence.assert_called_with( - name='My Evidence', disk_name='testDisk', project='testProject', - source='case', zone='testZone', mount_partition='testMount') - mockEmbeddedEvidence.assert_called_with( - name='My Evidence', disk_name='testDisk', project='testProject', - zone='testZone', embedded_path=mock.ANY) - self.assertTrue(mockEmbeddedEvidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.ChromiumProfile') - def testHindsight(self, mockEvidence, mockClient): - """Test hindsight evidence""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='hindsight', force_evidence=False, - decryption_keys=None, recipe=None, recipe_path=None, - skip_recipe_validation=False, dump_json=None, debug_tasks=None, - jobs_denylist=None, jobs_allowlist=None, run_local=False, wait=False) - with self.assertRaisesRegex(TurbiniaException, 'Invalid output format.'): - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - client=mockClient, group_id='FakeGroupID', format='invalid') - - with self.assertRaisesRegex(TurbiniaException, 'Browser type'): - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - client=mockClient, group_id='FakeGroupID', format='sqlite', - browser_type='firefox') - - mockEvidence.return_value = FakeEvidence( - type='chromiumProfile', source_path=self.source_path) - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - client=mockClient, group_id='FakeGroupID', format='sqlite', - browser_type='Chrome') - mockEvidence.assert_called_with( - name='My Evidence', output_format='sqlite', browser_type='Chrome', - source_path=mock.ANY) - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('turbinia.evidence.RawMemory') - def testRawMemory(self, mockEvidence, mockClient): - """Test raw memory evidence""" - mockClient.create_request.return_value = TurbiniaRequest() - args = argparse.Namespace( - request_id=None, command='rawmemory', force_evidence=False, - decryption_keys=None, recipe=None, recipe_path=None, - skip_recipe_validation=False, dump_json=None, debug_tasks=None, - jobs_denylist=None, jobs_allowlist=None, run_local=False, wait=False, - module_list=['mod1', 'mod2']) - mockEvidence.return_value = FakeEvidence( - type='rawmemory', source_path=self.source_path) - turbiniactl.process_evidence( - name='My Evidence', source_path=self.source_path, args=args, - client=mockClient, group_id='FakeGroupID', profile='testProfile') - - mockEvidence.assert_called_with( - name='My Evidence', source_path=mock.ANY, profile='testProfile', - module_list=['mod1', 'mod2']) - - @mock.patch('turbinia.client.get_turbinia_client') - def testUnequalDirectoryArgs(self, _): - """Test unequal number of args for directory evidence type.""" - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'directory', '--source_path', 'img1,img2', '--source', - 'source,source2,source3' - ]) - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'directory', '--source_path', 'img1,img2', '--name', - 'name1,name2,name3' - ]) - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - turbiniactl.process_args([ - 'directory', '--source_path', 'img1,img2', '--source', 'source,source2' - ]) - self.assertTrue(turbiniactl.process_evidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - def testUnequalRawdiskArgs(self, mockClient): - """Test unequal number of args for rawdisk evidence type.""" - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'rawdisk', '--source_path', 'img1,img2', '--source', - 'source,source2,source3' - ]) - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'rawdisk', '--source_path', 'img1,img2', '--name', - 'name1,name2,name3' - ]) - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - turbiniactl.process_args( - ['rawdisk', '--source_path', 'img1,img2', '--name', 'name1,name2']) - self.assertTrue(turbiniactl.process_evidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - def testUnequalCompresseddirectoryArgs(self, _): - """Test unequal number of args for compresseddirectory evidence type.""" - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'compresseddirectory', '--source_path', 'img1,img2,img3', - '--source', 'source1,source2' - ]) - - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'compresseddirectory', '--source_path', 'img1,img2', '--name', - 'name1,name2,name3' - ]) - - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - turbiniactl.process_args([ - 'compresseddirectory', '--source_path', 'img1,img2', '--name', - 'name1,name2' - ]) - self.assertTrue(turbiniactl.process_evidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('libcloudforensics.providers.gcp.forensics.CreateDiskCopy') - @mock.patch('argparse.ArgumentParser.parse_args') - def testUnequalCloudDiskArgs(self, mockParser, mock_copyDisk, _): - """Test unequal number of args for cloud disk evidence type.""" - config.SHARED_FILESYSTEM = False - config.CLOUD_PROVIDER = 'GCP' - mockArgs = argparse.Namespace( - all_fields=False, command='googleclouddisk', config_file=None, - copy_only=False, debug=False, debug_tasks=False, decryption_keys=[], - disk_name=['disk1', 'disk2', 'disk3'], dump_json=False, embedded_path=[ - 'path1', 'path2', 'path3' - ], filter_patterns_file=None, force_evidence=False, jobs_allowlist=[], - jobs_denylist=[], log_file=None, mount_partition=None, - name=None, output_dir=None, poll_interval=60, project=[ - 'proj1', 'proj2', 'proj3' - ], quiet=False, recipe=None, recipe_path=None, - request_id=None, server=False, skip_recipe_validation=False, source=[ - None - ], verbose=True, wait=False, yara_rules_file=None, zone=[ - 'zone1', 'zone2' - ], group_name=None, reason=None, all_args=None) - mockParser.return_value = mockArgs - - # Fail when zones dont match - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', - 'zone1,zone2', '--project', 'proj1,proj2,proj3' - ]) - - # Fail when projects don't match - mockArgs.zone = ['zone1', 'zone2', 'zone3'] - mockArgs.project = ['proj1', 'proj2'] - mockParser.return_value = mockArgs - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', - 'zone1,zone2,zone3', '--project', 'proj1,proj2' - ]) - - #Fail when names dont match - mockArgs.project = ['proj1', 'proj2', 'proj3'] - mockArgs.name = ['name1', 'name2'] - mockParser.return_value = mockArgs - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', - 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--name', - 'name1,name2' - ]) - mockArgs.name = ['name1', 'name2', 'name3'] - mockArgs.source = ['source1', 'source2'] - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', - 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--source', - 'source1,source2' - ]) - - mockArgs.source = ['source1', 'source2', 'source3'] - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - mock_copyDisk.return_value = compute.GoogleComputeDisk( - 'fake-proj', 'fake-zone', 'fake-disk-copy') - turbiniactl.process_args([ - 'googleclouddisk', '--disk_name', 'disk1,disk2,disk3', '--zone', - 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3' - ]) - self.assertTrue(turbiniactl.process_evidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - @mock.patch('libcloudforensics.providers.gcp.forensics.CreateDiskCopy') - def testUnequalCloudDiskEmbeddedArgs(self, mock_copyDisk, _): - """Test unequal number of args for cloud embedded disk evidence type.""" - # Fail when zones don't match - config.SHARED_FILESYSTEM = False - config.TASK_MANAGER = 'Celery' - config.CLOUD_PROVIDER = 'GCP' - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', - '--zone', 'zone1,zone2', '--project', 'proj1,proj2,proj3', - '--embedded_path', 'path1,path2,path3' - ]) - - # Fail when embedded path don't match - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', - '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', - '--embedded_path', 'path1,path2' - ]) - - # Fail when name don't match - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2', '--zone', - 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', '--name', - 'name1,name2', '--embedded_path', 'path1,path2,path3' - ]) - # Fail when mount source don't match - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', - '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', - '--source', 'source1,source2', '--embedded_path', - 'path1,path2,path3' - ]) - - # Fail when project don't match - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', - '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2', - '--source', 'source1,source2', '--embedded_path', - 'path1,path2,path3' - ]) - - # Pass when all the args match - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - mock_copyDisk.return_value = compute.GoogleComputeDisk( - 'fake-proj', 'fake-zone', 'fake-disk-copy') - turbiniactl.process_args([ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', '--zone', - 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', - '--embedded_path', 'path1,path2,path3' - ]) - self.assertTrue(turbiniactl.process_evidence.called) - - # Raise error when running locally - config.CLOUD_PROVIDER = 'local' - with self.assertRaisesRegex(TurbiniaException, 'Cloud only'): - turbiniactl.process_args([ - 'googleclouddiskembedded', '--disk_name', 'disk1,disk2,disk3', - '--zone', 'zone1,zone2,zone3', '--project', 'proj1,proj2,proj3', - '--embedded_path', 'path1,path2,path3' - ]) - - @mock.patch('turbinia.client.get_turbinia_client') - def testUnequalRawMemoryArgs(self, _): - """Test unequal number of args for rawmemory evidence type.""" - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'rawmemory', '--source_path', 'disk1,disk2,disk3', '--profile', - 'prof1,prof2,prof3,prof4', '--module_list', 'mock' - ]) - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'rawmemory', '--source_path', 'disk1,disk2,disk3', '--profile', - 'prof1,prof2,prof3', '--module_list', 'mock', '--name', - 'name1,name2' - ]) - - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - turbiniactl.process_args([ - 'rawmemory', '--source_path', 'disk1,disk2,disk3', '--profile', - 'prof1,prof2,prof3', '--module_list', 'mock' - ]) - self.assertTrue(turbiniactl.process_evidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - def testUnequalHindsightArgs(self, _): - """Test unequal number of args for hindsight evidence type.""" - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'hindsight', '--source_path', 'disk1,disk2,disk3', '--format', - 'prof1,prof2,prof3', '--name', 'name1,name2' - ]) - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'hindsight', '--source_path', 'disk1,disk2,disk3', '--format', - 'sqlite,sqlite,sqlite,sqlite' - ]) - self.assertRaises( - TurbiniaException, turbiniactl.process_args, [ - 'hindsight', '--source_path', 'disk1,disk2,disk3', '--format', - 'sqlite,sqlite,sqlite', '--browser_type', 'type1,type2' - ]) - - turbiniactl.process_evidence = mock.MagicMock(return_value=None) - turbiniactl.process_args( - ['hindsight', '--source_path', 'disk1,disk2,disk3']) - self.assertTrue(turbiniactl.process_evidence.called) - - @mock.patch('turbinia.client.get_turbinia_client') - def testTurbiniaClientRequest(self, mockClient): - """Test Turbinia client request creation.""" - config.TASK_MANAGER = 'celery' - mockClient.create_request = mock.MagicMock() - mockClient.create_request.return_value = TurbiniaRequest( - recipe=recipe_helpers.DEFAULT_RECIPE) - test_request = mockClient.create_request() - self.assertIsNotNone(test_request) - test_default_recipe = recipe_helpers.DEFAULT_RECIPE - self.assertEqual(test_request.recipe, test_default_recipe) + def testInvalidCommand(self): + """Test an invalid command.""" + args = argparse.Namespace(command='badCommand') + self.assertRaises((argparse.ArgumentError, SystemExit), + turbiniactl.process_args, [args.command]) + + @mock.patch('turbinia.worker.TurbiniaCeleryWorker') + def testCeleryWorkerCommand(self, mock_worker): + """Test CeleryWorker command.""" + args = argparse.Namespace(command='celeryworker') + turbiniactl.process_args([args.command]) + mock_worker.assert_called_once_with(jobs_denylist=[], jobs_allowlist=[]) + + @mock.patch('turbinia.config.ParseDependencies') + @mock.patch('turbinia.worker.TurbiniaCeleryWorker.start') + def testCeleryWorkerCommandStart(self, mock_worker, _): + """Test CeleryWorker start.""" + args = argparse.Namespace(command='celeryworker') + turbiniactl.process_args([args.command]) + mock_worker.assert_called_once_with() + + @mock.patch('turbinia.server.TurbiniaServer') + def testServerCommand(self, mock_server): + """Test Server command.""" + args = argparse.Namespace(command='server') + turbiniactl.process_args([args.command]) + mock_server.assert_called_once_with(jobs_denylist=[], jobs_allowlist=[]) + + @mock.patch('turbinia.task_manager.CeleryTaskManager._backend_setup') + @mock.patch('turbinia.server.TurbiniaServer.start') + def testServerCommandStart(self, mock_server, _): + """Test Server start.""" + args = argparse.Namespace(command='server') + turbiniactl.process_args([args.command]) + mock_server.assert_called_once_with() + + @mock.patch('turbinia.api.api_server.TurbiniaAPIServer') + def testAPIServerCommand(self, mock_api_server): + """Test API server command.""" + args = argparse.Namespace(command='api_server') + turbiniactl.process_args([args.command]) + mock_api_server.assert_called_once_with() + + @mock.patch('turbinia.api.api_server.TurbiniaAPIServer.start') + def testAPIServerCommandStart(self, mock_api_server): + """Test API server start.""" + args = argparse.Namespace(command='api_server') + turbiniactl.process_args([args.command]) + mock_api_server.assert_called_once_with('turbinia.api.api_server:app')