diff --git a/.gitignore b/.gitignore index 2cc4ab83..8ca24a8a 100644 --- a/.gitignore +++ b/.gitignore @@ -145,6 +145,9 @@ dmypy.json #Takajo binaries takajo takajo*.exe +takajo-* #Results files -*.txt \ No newline at end of file +*.txt +output/* +*.xlsx \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 58403f8c..c2b1c975 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changes +## v2.0.0 [2022/08/03] + +- Major update released at the [SANS DFIR Summit in Austin](https://www.sans.org/cyber-security-training-events/digital-forensics-summit-2023/). + +**New Features:** + +- `list-domains`: create a list of unique domains (input: JSONL, profile: standard) (@YamatoSecurity) +- `list-hashes`: create a list of process hashes to be used with vt-hash-lookup (input: JSONL, profile: standard) (@YamatoSecurity) +- `list-ip-addresses`: create a list of unique target and/or source IP addresses (input: JSONL, profile: standard) (@YamatoSecurity) +- `split-csv-timeline`: split up a large CSV file into smaller ones based on the computer name (input: non-multiline CSV, profile: any) (@YamatoSecurity) +- `split-json-timeline`: split up a large JSONL timeline into smaller ones based on the computer name (input: JSONL, profile: any) (@fukusuket) +- `stack-logons`: stack logons by target user, target computer, source IP address and source computer (input: JSONL, profile: standard) (@YamatoSecurity) +- `sysmon-process-tree`: output the process tree of a certain process (input: JSONL, profile: standard) (@hitenkoku) +- `timeline-logon`: create a CSV timeline of logon events (input: JSONL, profile: standard) (@YamatoSecurity) +- `timeline-suspicious-processes`: create a CSV timeline of suspicious processes (input: JSONL, profile: standard) (@YamatoSecurity) +- `vt-domain-lookup`: look up a list of domains on VirusTotal (input: text file) (@YamatoSecurity) +- `vt-hash-lookup`: look up a list of hashes on VirusTotal (input: text file) (@YamatoSecurity) +- `vt-ip-lookup`: look up a list of IP addresses on VirusTotal (input: text file) (@YamatoSecurity) + ## v1.0.0 [2022/10/28] - Official release at [Code Blue 2022 Bluebox](https://codeblue.jp/2022/en/talks/?content=talks_24). diff --git a/README-Japanese.md b/README-Japanese.md index d0e481a0..6a066426 100644 --- a/README-Japanese.md +++ b/README-Japanese.md @@ -7,18 +7,21 @@ --- -[tag-1]: https://img.shields.io/github/downloads/Yamato-Secuirty/takajo/total?label=GitHub%F0%9F%8E%AFDownloads&style=plastic -[tag-2]: https://img.shields.io/github/stars/Yamato-Security/takajo?style=plastic&label=GitHub%F0%9F%8E%AFStars -[tag-3]: https://img.shields.io/github/v/release/Yamato-Security/takajo?display_name=tag&label=latest-version&style=plastic -[tag-4]: https://img.shields.io/badge/Maintenance%20Level-Actively%20Developed-brightgreen.svg -[tag-5]: https://img.shields.io/badge/Twitter-00acee?logo=twitter&logoColor=white - -![tag-1] ![tag-2] ![tag-3] ![tag-4] ![tag-5] +

+ + + + + + + + +

## Takajoについて -Takajō (鷹匠)は 日本の[Yamato Security](https://yamatosecurity.connpass.com/)グループによって作られた [Hayabusa](https://github.com/Yamato-Security/hayabusa)から得られた結果を解析するツールです。Takajōは[Nim](https://nim-lang.org/)で作られました。 -Takajōは、日本語で["鷹狩りのスキルに優れた人"](https://en.wikipedia.org/wiki/Falconry)を意味し、ハヤブサが得た`結果をさらに活かすことから選ばれました。 +Takajō (鷹匠)は 日本の[Yamato Security](https://yamatosecurity.connpass.com/)グループによって作られた [Hayabusa](https://github.com/Yamato-Security/hayabusa)から得られた結果を解析するツールです。Takajōは[Nim](https://nim-lang.org/)で作られました。 +Takajōは、日本語で["鷹狩りのスキルに優れた人"](https://en.wikipedia.org/wiki/Falconry)を意味し、ハヤブサが得た結果をさらに活かすことから選ばれました。 # 関連プロジェクト @@ -29,97 +32,623 @@ Takajōは、日本語で["鷹狩りのスキルに優れた人"](https://en.wik * [WELA (Windows Event Log Analyzer)](https://github.com/Yamato-Security/WELA/blob/main/README-Japanese.md) - PowerShellで書かれたWindowsイベントログの解析ツール。 ## 目次 - - [関連プロジェクト](#関連プロジェクト) - [目次](#目次) - [機能](#機能) - - [作成予定機能](#作成予定機能) +- [ダウンロード](#ダウンロード) - [Gitクローン](#gitクローン) - - [アドバンス: ソースコードからのコンパイル(任意)](#アドバンス-ソースコードからのコンパイル任意) - - [使用方法](#使用方法) + - [アドバンス: ソースコードからのコンパイル(任意)](#アドバンス-ソースコードからのコンパイル任意) +- [コマンド一覧](#コマンド一覧) + - [Listコマンド](#listコマンド) + - [Splitコマンド](#splitコマンド) + - [Stackコマンド](#stackコマンド) + - [Sysmonコマンド](#sysmonコマンド) + - [Timelineコマンド](#timelineコマンド) + - [VirusTotalコマンド](#virustotalコマンド) +- [コマンド使用方法](#コマンド使用方法) + - [Listコマンド](#listコマンド-1) + - [`list-domains`コマンド](#list-domainsコマンド) + - [`list-domains`コマンドの使用例](#list-domainsコマンドの使用例) + - [`list-hashes`コマンド](#list-hashesコマンド) + - [`list-hashes`コマンドの使用例](#list-hashesコマンドの使用例) + - [`list-ip-addresses`コマンド](#list-ip-addressesコマンド) + - [`list-ip-addresses`コマンドの使用例](#list-ip-addressesコマンドの使用例) + - [`list-undetected-evtx`コマンド](#list-undetected-evtxコマンド) + - [`list-undetected-evtx`コマンドの使用例](#list-undetected-evtxコマンドの使用例) + - [`list-unused-rules`コマンド](#list-unused-rulesコマンド) + - [`list-unused-rules`コマンドの使用例](#list-unused-rulesコマンドの使用例) + - [Splitコマンド](#splitコマンド-1) + - [`split-csv-timeline`コマンド](#split-csv-timelineコマンド) + - [`split-csv-timeline`コマンドの使用例](#split-csv-timelineコマンドの使用例) + - [`split-json-timeline`コマンド](#split-json-timelineコマンド) + - [`split-json-timeline`コマンドの使用例](#split-json-timelineコマンドの使用例) + - [Stackコマンド](#stackコマンド-1) + - [`stack-logons`コマンド](#stack-logonsコマンド) + - [`stack-logons`コマンドの使用例](#stack-logonsコマンドの使用例) + - [Sysmonコマンド](#sysmonコマンド-1) + - [`sysmon-process-tree`コマンド](#sysmon-process-treeコマンド) + - [`sysmon-process-tree`コマンドの使用例](#sysmon-process-treeコマンドの使用例) + - [Timelineコマンド](#timelineコマンド-1) + - [`timeline-logon`コマンド](#timeline-logonコマンド) + - [`timeline-logon`コマンドの使用例](#timeline-logonコマンドの使用例) + - [`timeline-suspicious-processes`コマンド](#timeline-suspicious-processesコマンド) + - [`timeline-suspicious-processes`コマンドの使用例](#timeline-suspicious-processesコマンドの使用例) + - [VirusTotalコマンド](#virustotalコマンド-1) + - [`vt-domain-lookup`コマンド](#vt-domain-lookupコマンド) + - [`vt-domain-lookup`コマンドの使用例](#vt-domain-lookupコマンドの使用例) + - [`vt-hash-lookup`コマンド](#vt-hash-lookupコマンド) + - [`vt-hash-lookup`コマンドの使用例](#vt-hash-lookupコマンドの使用例) + - [`vt-ip-lookup`コマンド](#vt-ip-lookupコマンド) + - [`vt-ip-lookup`コマンドの使用例](#vt-ip-lookupコマンドの使用例) - [貢献](#貢献) - [バグの報告](#バグの報告) - [ライセンス](#ライセンス) - [Twitter](#twitter) ## 機能 +- Nimで開発され、プログラミングが簡単、メモリ安全、ネイティブCコードと同じくらい高速で、単一のスタンドアロンバイナリとして動作します。 +- ログオンイベント、疑わしいプロセスなどさまざまなタイムラインを作成します。 +- 不審なプロセスのプロセスツリーを出力します。 +- さまざまなスタッキング分析ができます。 +- CSVとJSONLのタイムラインを分割します。 +- VirusTotalの検索で使用するIPアドレス、ドメイン、ハッシュなどをリストアップします。 +- ドメイン、ハッシュ、IPアドレスをVirusTotalで検索します。 +- 検知されていない`.evtx` ファイルをリストアップします。 -- メモリセーフかつ、プログラムしやすいNimで作成することで、C言語と同じくらい早く、バイナリのクロスコンパイルが可能 -- `undetected-evtx`: Hayabusaのcsvの結果を比較して検知していない`.evtx`ファイルを一覧化 -- `unused-rules`: Hayabusaのcsvの結果(例: output.csv)を比較して検知していない`yml`ルールを一覧化 - -## 作成予定機能 +# ダウンロード -- 行動分析機能 -- 不審なプロセスツリーの表示機能 +[Releases](https://github.com/Yamato-Security/takajo/releases)ページからTakajōの安定したバージョンでコンパイルされたバイナリが含まれている最新版もしくはソースコードをダウンロードできます。 ## Gitクローン -以下のgit cloneコマンドでレポジトリをダウンロードし、ソースコードからコンパイルして使用することも可能です: +以下の`git clone`コマンドでレポジトリをダウンロードし、ソースコードからコンパイルして使用することも可能です: ->> 注意: mainブランチは開発中のバージョンです。まだ正式にリリースされていない新機能が使えるかもしれないが、バグがある可能性もあるので、テスト版だと思って下さい。 +>> 注意: mainブランチは開発中のバージョンです。まだ正式にリリースされていない新機能が使えるかもしれませんが、バグがある可能性もあるので、テスト版だと思って下さい。 `git clone https://github.com/Yamato-Security/takajo.git` -## アドバンス: ソースコードからのコンパイル(任意) +## アドバンス: ソースコードからのコンパイル(任意) -Nimがインストールされている場合、以下のコマンドでソースコードからコンパイルすることができます。 +Nimがインストールされている場合、以下のコマンドでソースコードからコンパイルできます: -```bash +``` > nimble update -> nimble build -d:release +> nimble build -d:release -d:ssl +``` + +# コマンド一覧 + +## Listコマンド +* `list-domains`: `vt-domain-lookup`コマンドで使用する、重複のないドメインのリストを作成する +* `list-hashes`: `vt-hash-lookup` で使用するプロセスのハッシュ値のリストを作成する +* `list-ip-addresses`: `vt-ip-lookup`コマンドで使用する、重複のない送信元/送信先のIPリストを作成する +* `list-undetected-evtx`: 検知されなかったevtxファイルのリストを作成する +* `list-unused-rules`: 検知されなかったルールのリストを作成する + +## Splitコマンド +* `split-csv-timeline`: コンピューター名に基づき、大きなCSVタイムラインを小さなCSVタイムラインに分割する +* `split-json-timeline`: コンピューター名に基づき、大きなJSONLタイムラインを小さなJSONLタイムラインに分割する + +## Stackコマンド +* `stack-logons`: ユーザー名、コンピューター名、送信元IPアドレス、送信元コンピューター名など、項目ごとの上位ログオンを出力する + +## Sysmonコマンド +* `sysmon-process-tree`: プロセスツリーを出力する + +## Timelineコマンド +* `timeline-logon`: ログオンイベントのCSVタイムラインを作成する +* `timeline-suspicious-processes`: 不審なプロセスのCSVタイムラインを作成する + +## VirusTotalコマンド +* `vt-domain-lookup`: VirusTotalでドメインのリストを検索し、悪意のあるドメインをレポートする +* `vt-hash-lookup`: VirusTotalでハッシュのリストを検索し、悪意のあるハッシュ値をレポートする +* `vt-ip-lookup`: VirusTotalでIPアドレスのリストを検索し、悪意のあるIPアドレスをレポートする + +# コマンド使用方法 + +## Listコマンド + +### `list-domains`コマンド + + +`vt-domain-lookup` で使用する重複のないドメインのリストを作成します。 +現在は、Sysmon EID 22ログでクエリが記録されたドメインのみをチェックしますが、ビルトインのWindows DNSクライアント・サーバーログも今後サポート予定です。 + +* 入力: `JSONL` +* プロファイル: `all-field-info` と`all-field-info-verbose` 以外すべて +* 出力: `テキストファイル` + +必須オプション: + +- `-o, --output `: 結果を保存するテキストファイル +- `-t, --timeline `: HayabusaのJSONLタイムライン + +任意オプション: + +- `-s, --includeSubdomains`: サブドメインを含めるか (デフォルト: `false`) +- `-w, --includeWorkstations`: ローカルワークステーション名を含めるか (デフォルト: `false`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `list-domains`コマンドの使用例 + +HayabusaでJSONLタイムラインを出力する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +結果をテキストファイルに保存する: + +``` +takajo.exe list-domains -t ../hayabusa/timeline.jsonl -o domains.txt +``` + +サブドメインを含める場合: + +``` +takajo.exe list-domains -t ../hayabusa/timeline.jsonl -o domains.txt -s ``` -コンパイルされたバイナリはtakajoフォルダ配下で作成されます。 +### `list-hashes`コマンド -### 使用方法 +`vt-hash-lookup`で使用するプロセスハッシュ値のリストを作成します (入力: JSONL, プロファイル: standard) -1. `help`: 各コマンドのヘルプメニューを表示する。 -2. `undetected-evtxes`: Hayabusaのルールで検知しなかったevtxファイルを一覧化する。 -Hayabusa実行時に`%EvtxFile%`の情報が含まれたプロファイルを使ってcsvを出力して下さい。プロファイルごとにHayabusaのcsvに出力される情報は異なります。詳細は[こちら](https://github.com/Yamato-Security/hayabusa#profiles)を確認して下さい。 +* 入力: `JSONL` +* プロファイル: `all-field-info` と `all-field-info-verbose`以外すべて +* 出力: `テキストファイル` 必須オプション: -- `-t, --timeline ../hayabusa/timeline.csv`: Hayabusaで作成されたCSVタイムライン。 -- `-e --evtx-dir ../hayabusa-sample-evtx`: Hayabusaでスキャンした`.evtx`ファイルが存在するディレクトリ。 +- `-t, --timeline `: HayabusaのJSONLタイムライン +- `-o, --output `: 結果を保存するベースファイル名 任意オプション: -- `-c, --column-name EvtxColumn`: カスタムなカラム名を指定する。デフォルトではHayabusaのデフォルトの`EvtxFile`が使用される。 -- `-o, --output result.txt`: 結果をテキストファイルに保存する。デフォルトは画面出力になる。 -- `-q, --quiet`: ロゴを表示しない。 +- `-l, --level`: 最小のアラートレベルを指定 (デフォルト: `high`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) -例: +#### `list-hashes`コマンドの使用例 -```bash -takajo.exe undetected-evtx -t ../hayabusa/timeline.csv -e ../hayabusa-sample-evtx +HayabusaでJSONLタイムラインを作成する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +ハッシュタイプ毎に異なるファイルに結果を保存する: + +``` +takajo.exe list-hashes -t ../hayabusa/timeline.jsonl -o case-1 ``` +たとえば、`MD5`、`SHA1` 、`IMPHASH` がSysmonログに保存されている場合、 次のファルが作成されます: +`case-1-MD5-hashes.txt`, `case-1-SHA1-hashes.txt`, `case-1-ImportHashes.txt` -1. `unused-rules`: Hayabusaのスキャンで1件も検知しなかった`.yml`ファイルを一覧化する。 -Hayabusa実行時に`%RuleFile%`の情報が含まれたプロファイルを使ってcsvを出力して下さい。プロファイルごとにHayabusaのcsvに出力される情報は異なります。プロファイルごとにHayabusaのcsvに出力される情報は異なります。詳細は[こちら](https://github.com/Yamato-Security/hayabusa#profiles)を確認して下さい。 +### `list-ip-addresses`コマンド + + +`vt-ip-lookup`で使用する重複のない送信先/送信先IPアドレスのリストを作成します。すべての結果から送信先IPアドレスの`TgtIP`フィールドと送信元IPアドレスの `SrcIP`フィールドが抽出され、重複のないIPアドレスをテキストファイルに出力します。 + +* 入力: `JSONL` +* プロファイル: `all-field-info` と `all-field-info-verbose` 以外すべて +* 出力: `テキストファイル` 必須オプション: -- -t, --timeline timeline.csv: Hayabusaで作成されたCSVタイムライン。 -- -r --rules-dir ../hayabusa/rules: Hayabusaでスキャンした`.yml`ファイルが存在するディレクトリ。 +- `-o, --output `: 結果を保存するテキストファイル +- `-t, --timeline `: HayabusaのJSONLタイムライン 任意オプション: -- `-c, --column-name CustomRuleFileColumn`: カスタムなカラム名を指定する。デフォルトではHayabusaのデフォルトの`RuleFile`が使用される。 -- `-o, --output result.txt`: 結果をテキストファイルに保存する。デフォルトは画面出力になる。 -- `-q, --quiet`: ロゴを表示しない。 +- `-i, --inbound`: インバウンドトラフィックを含めるか (デフォルト: `true`) +- `-O, --outbound`: アウトバウンドトラフィックを含めるか (デフォルト: `true`) +- `-p, --privateIp`: プライベートIPアドレスを含めるか (デフォルト: `false`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `list-ip-addresses`コマンドの使用例 + +HayabusaでJSONLタイムラインを作成する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +結果をテキストファイルに保存する: + +``` +takajo.exe list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt +``` + +インバウンドトラフィックを除外する: + +``` +takajo.exe list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt -i=false +``` + +プライベートIPアドレスを含める: + +``` +takajo.exe list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt -p +``` + +### `list-undetected-evtx`コマンド +Hayabusaで検知するルールがなかったすべての`.evtx`ファイルをリストアップします。これは、[hayabusa-sample-evtx](https://github.com/Yamato-Security/hayabusa-evtx)リポジトリ内のevtxファイルなど、悪意のあるアクティビティの証拠を含むすべてのevtxファイルをリストアップすることを目的としています + +* 入力: `CSV` +* プロファイル: `verbose`, `all-field-info-verbose`, `super-verbose`, `timesketch-verbose` + > まず、`%EvtxFile%`を出力するプロファイルを使用し、Hayabusaを実行、結果をCSVタイムラインに保存する必要があります + > [こちら](https://github.com/Yamato-Security/hayabusa#profiles)でHayabusaがプロファイルに従って、どのカラムを保存するかを確認できます。 +* 出力: `標準出力 または テキストファイル` + +必須オプション: -例: +- `-e, --evtx-dir `: Hayabusaでスキャンした`.evtx` ファイルのディレクトリ +- `-t, --timeline `: HayabusaのCSVタイムライン -```bash -takajo.exe unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules +任意オプション: + +- `-c, --column-name `: evtxのカラム名を指定 (デフォルト: Hayabusaの規定値の`EvtxFile`) +- `-o, --output `: 結果を保存するテキストファイル (デフォルト: 標準出力) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `list-undetected-evtx`コマンドの使用例 + +HayabusaでCSVタイムラインを出力する: + +``` +hayabusa.exe -d -p verbose -o timeline.csv +``` + +結果を標準出力に表示する: + +``` +takajo.exe list-undetected-evtx -t ../hayabusa/timeline.csv -e +``` + +結果をテキストファイルに保存する: + +``` +takajo.exe list-undetected-evtx -t ../hayabusa/timeline.csv -e -o undetected-evtx.txt +``` + +### `list-unused-rules`コマンド + +何も検出されなかったすべての`.yml`ルールをリストアップします。これは、ルールの信頼性を判断するのに役立ちます。つまり、どのルールが悪意のあるアクティビティを検出するか、またどのルールがまだテストされておらずサンプル`.evtx`ファイルが必要かの判断に使えます。 + +* 入力: `CSV` +* プロファイル: `verbose`, `all-field-info-verbose`, `super-verbose`, `timesketch-verbose` + > まず、`%RuleFile%`を出力するプロファイルを使用し、Hayabusaを実行、結果をCSVタイムラインに保存する必要があります + > [こちら](https://github.com/Yamato-Security/hayabusa#profiles)でHayabusaがプロファイルに従って、どのカラムを保存するかを確認できます。 +* 出力: `標準出力 または テキストファイル` + +必須オプション: + +- `-r, --rules-dir `: Hayabusaで使用した `.yml` ルールファイルのディレクトリ +- `-t, --timeline `: HayabusaのCSVタイムライン + +任意オプション: + +- `-c, --column-name `: ルールファイルのカラム名を指定 (デフォルト: Hayabusaの規定値の`RuleFile`) +- `-o, --output `: 結果を保存するテキストファイル (デフォルト: 標準出力) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `list-unused-rules`コマンドの使用例 + +HayabusaでCSVタイムラインを出力する: + +``` +hayabusa.exe csv-timeline -d -p verbose -o timeline.csv +``` + +結果を標準出力に表示する: + +``` +takajo.exe list-unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules +``` + +結果をテキストファイルに保存する: + +``` +takajo.exe list-unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules -o unused-rules.txt +``` + +## Splitコマンド + +### `split-csv-timeline`コマンド + +コンピューター名に基づき、大きなCSVタイムラインを小さなCSVタイムラインに分割します。 + +* 入力: `複数行モード(-M)でないCSV` +* プロファイル: `すべて` +* 出力: `複数のCSV` + +必須オプション: + +- `-t, --timeline `: HayabusaのCSVタイムライン + +任意オプション: + +- `-m, --makeMultiline`: フィールドを複数行で出力する (デフォルト: `false`) +- `-o, --output `: CSVを保存するディレクトリ (デフォルト: `output`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `split-csv-timeline`コマンドの使用例 + +HayabusaでCSVタイムラインを出力する: + +``` +hayabusa.exe csv-timeline -d -o timeline.csv +``` + +1つのCSVタイムラインを複数のCSVタイムラインに分割して `output` ディレクトリに出力: + +``` +takajo.exe split-csv-timeline -t ../hayabusa/timeline.csv +``` + +フィールドを改行文字で区切って複数行のエントリを作成し、`case-1-csv`ディレクトリに保存: + +``` +takajo.exe split-csv-timeline -t ../hayabusa/timeline.csv -m -o case-1-csv +``` + +### `split-json-timeline`コマンド + +コンピューター名に基づき、大きなJSONLタイムラインを小さなJSONLタイムラインに分割します。 + +* 入力: `JSONL` +* プロファイル: `すべて` +* 出力: `複数のJSONL` + +必須オプション: + +- `-t, --timeline `: HayabusaのJSONLタイムライン + +任意オプション: + +- `-o, --output `: JSONLを保存するディレクトリ (デフォルト: `output`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `split-json-timeline`コマンドの使用例 + +HayabusaでJSONLタイムラインを作成する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +1つのJSONLタイムラインを複数のJSONLタイムラインに分割して `output` ディレクトリに出力: + +``` +takajo.exe split-json-timeline -t ../hayabusa/timeline.jsonl +``` + +`case-1-jsonl` ディレクトリに保存: + +``` +takajo.exe split-json-timeline -t ../hayabusa/timeline.jsonl -o case-1-jsonl +``` + +## Stackコマンド + +### `stack-logons`コマンド + +ログインしている上位アカウントのリストを作成します (入力: JSONL, プロファイル: standard) +まだ実装されていません。 + +#### `stack-logons`コマンドの使用例 + +``` +takajo.exe stack-remote-logons -t ../hayabusa/timeline.jsonl +``` + +## Sysmonコマンド + +### `sysmon-process-tree`コマンド + +不審なプロセスや悪意のあるプロセスなど、特定のプロセスのプロセスツリーを出力します。 + +* 入力: `JSONL` +* プロファイル: `all-field-info` と `all-field-info-verbose` 以外すべて +* 出力: `テキストファイル` + +必須オプション: + +- `-o, --output `: 結果を保存するテキストファイル +- `-p, --processGuid `: SysmonのプロセスGUID +- `-t, --timeline `: HayabusaのJSONLタイムライン + +任意オプション: + +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `sysmon-process-tree`コマンドの使用例 + +HayabusaでJSONLタイムラインを作成する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +結果をテキストファイルに保存する: + +``` +takajo.exe sysmon-process-tree -t ../hayabusa/timeline.jsonl -p "365ABB72-3D4A-5CEB-0000-0010FA93FD00" -o process-tree.txt +``` + +## Timelineコマンド + +### `timeline-logon`コマンド + +このコマンドは、次のログオンイベントから情報を抽出し、フィールドを正規化し、結果をCSVファイルに保存します: + +- `4624` - ログオン成功 +- `4625` - ログオン失敗 +- `4634` - アカウントログオフ +- `4647` - ユーザーが開始したログオフ +- `4648` - 明示的なログオン +- `4672` - 特権ログオン + +これにより、ラテラルムーブメント、パスワードスプレー、権限昇格などを検出しやすくなります。 + +* 入力: `JSONL` +* プロファイル: `all-field-info` と `all-field-info-verbose` 以外すべて +* 出力: `CSV` + +必須オプション: + +- `-o, --output `: 結果を保存するCSVファイル +- `-t, --timeline `: HayabusaのJSONLタイムライン + +任意オプション: + +- `-c, --calculateElapsedTime`: 成功ログオンの経過時間を計算する (デフォルト: `true`) +- `-l, --outputLogoffEvents`: ログオフイベントを別のエントリとして出力する (デフォルト: `false`) +- `-a, --outputAdminLogonEvents`: 管理者ログオン イベントを別のエントリとして出力する (デフォルト: `false`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `timeline-logon`コマンドの使用例 + +HayabusaでJSONLタイムラインを作成する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +ログオンタイムラインをCSVに保存する: + +``` +takajo.exe timeline-logon -t ../hayabusa/timeline.jsonl -o logon-timeline.csv +``` + +### `timeline-suspicious-processes`コマンド + +不審なプロセスのCSVタイムラインを作成する + +* 入力: `JSONL` +* プロファイル: `all-field-info` と `all-field-info-verbose` 以外すべて +* 出力: `CSV` + +必須オプション: + +- `-t, --timeline `: HayabusaのJSONLタイムライン + +任意オプション: + +- `-l, --level `: 最小のアラートレベルを指定 (デフォルト: `high`) +- `-o, --output `: 結果を保存するCSVファイル +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `timeline-suspicious-processes`コマンドの使用例 + +HayabusaでJSONLタイムラインを作成する: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +アラートレベルが`high`以上のプロセスを検索し、結果を標準出力に表示: + +``` +takajo.exe timeline-suspicious-process -t ../hayabusa/timeline.jsonl +``` + +アラートレベルが`low`以上のプロセスを検索し、結果を標準出力に表示: + +``` +takajo.exe timeline-suspicious-process -t ../hayabusa/timeline.jsonl -l low +``` + +結果をCSVに保存: + +``` +takajo.exe timeline-suspicious-process -t ../hayabusa/timeline.jsonl -o suspicous-processes.csv +``` + +## VirusTotalコマンド + +### `vt-domain-lookup`コマンド + +VirusTotalでドメインのリストを検索します。 + +* 入力: `テキストファイル` +* 出力: `CSV` + +必須オプション: + +- `-a, --apiKey `: VirusTotalのAPIキー +- `-d, --domainList `: ドメイン一覧のテキストファイル +- `-o, --output `: 結果を保存するCSV + +任意オプション: + +- `-j, --jsonOutput `: VirusTotalからのすべてのJSONレスポンスを出力するJSONファイル +- `-r, --rateLimit `: 1分間に送るリクエスト数レート制限 (デフォルト: `4`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `vt-domain-lookup`コマンドの使用例 + +はじめに、`list-domains`コマンドでドメイン一覧を作成し、その後 +次のコマンドを使用してそれらのドメインを検索します: + +``` +takajo.exe vt-domain-lookup -a -d domains.txt -o vt-domain-lookup.csv -r 1000 -j vt-domain-lookup.json +``` + +### `vt-hash-lookup`コマンド + +VirusTotalでハッシュのリストを検索します。 + +* 入力: `テキストファイル` +* 出力: `CSV` + +必須オプション: + +- `-a, --apiKey `: VirusTotalのAPIキー +- `-H, --hashList `: ハッシュ値一覧のテキスト +- `-o, --output `: 結果を保存するCSV + +任意オプション: + +- `-j, --jsonOutput `: VirusTotalからのすべてのJSONレスポンスを出力するJSONファイル +- `-r, --rateLimit `: 1分間に送るリクエスト数レート制限 (デフォルト: `4`) +- `-q, --quiet`: ロゴを出力しない (デフォルト: `false`) + +#### `vt-hash-lookup`コマンドの使用例 + +``` +takajo.exe vt-hash-lookup -a -H MD5-hashes.txt -o vt-hash-lookup.csv -r 1000 -j vt-hash-lookup.json +``` + +### `vt-ip-lookup`コマンド + +VirusTotalでIPアドレスのリストを検索します。 + +* 入力: `テキストファイル` +* 出力: `CSV` + +必須オプション: + +- `-a, --apiKey `: VirusTotalのAPIキー +- `-i, --ipList `: IPアドレスのテキストファイル +- `-o, --output `: 結果を保存するCSV + +任意オプション: + +- `-j, --jsonOutput `: VirusTotalからのすべてのJSONレスポンスを出力するJSONファイル +- `-r, --rateLimit `: 1分間に送るリクエスト数レート制限 (デフォルト: `4`) +- `-q, --quiet`: ロゴを表示しない (デフォルト: `false`) + +#### `vt-ip-lookup`コマンドの使用例 + +``` +takajo.exe vt-ip-lookup -a -i ipAddresses.txt -o vt-ip-lookup.csv -r 1000 -j vt-ip-lookup.json ``` ## 貢献 どのような形でも構いませんので、ご協力をお願いします。プルリクエスト、ルール作成、evtxログのサンプルなどがベストですが、機能リクエスト、バグの通知なども大歓迎です。 -少なくとも、私たちのツールを気に入っていただけたなら、Githubで星を付けて、あなたのサポートを表明してください。 +少なくとも、私たちのツールを気に入っていただけたなら、GitHubで星を付けて、あなたのサポートを表明してください。 ## バグの報告 diff --git a/README.md b/README.md index 3eed17bc..d62cef4b 100644 --- a/README.md +++ b/README.md @@ -7,17 +7,20 @@ --- -[tag-1]: https://img.shields.io/github/downloads/Yamato-Secuirty/takajo/total?label=GitHub%F0%9F%8E%AFDownloads&style=plastic -[tag-2]: https://img.shields.io/github/stars/Yamato-Security/takajo?style=plastic&label=GitHub%F0%9F%8E%AFStars -[tag-3]: https://img.shields.io/github/v/release/Yamato-Security/takajo?display_name=tag&label=latest-version&style=plastic -[tag-4]: https://img.shields.io/badge/Maintenance%20Level-Actively%20Developed-brightgreen.svg -[tag-5]: https://img.shields.io/badge/Twitter-00acee?logo=twitter&logoColor=white - -![tag-1] ![tag-2] ![tag-3] ![tag-4] ![tag-5] +

+ + + + + + + + +

## About Takajō -Takajō (鷹匠), created by [Yamato Security](https://github.com/Yamato-Security), is an analyzer for [Hayabusa](https://github.com/Yamato-Security/hayabusa) results written in [Nim](https://nim-lang.org/). +Takajō (鷹匠), created by [Yamato Security](https://github.com/Yamato-Security), is a fast forensics analyzer for [Hayabusa](https://github.com/Yamato-Security/hayabusa) results written in [Nim](https://nim-lang.org/). Takajō means ["Falconer"](https://en.wikipedia.org/wiki/Falconry) in Japanese and was chosen as Hayabusa's catches (results) can be put to even better use. # Companion Projects @@ -33,10 +36,51 @@ Takajō means ["Falconer"](https://en.wikipedia.org/wiki/Falconry) in Japanese a - [Companion Projects](#companion-projects) - [Table of Contents](#table-of-contents) - [Features](#features) - - [Planned features](#planned-features) +- [Downloads](#downloads) - [Git cloning](#git-cloning) - [Advanced: Compiling From Source (Optional)](#advanced-compiling-from-source-optional) - - [Usage](#usage) +- [Command List](#command-list) + - [List Commands](#list-commands) + - [Split Commands](#split-commands) + - [Stack Commands](#stack-commands) + - [Sysmon Commands](#sysmon-commands) + - [Timeline Commands](#timeline-commands) + - [VirusTotal Commands](#virustotal-commands) +- [Command Usage](#command-usage) + - [List Commands](#list-commands-1) + - [`list-domains` command](#list-domains-command) + - [`list-domains` command examples](#list-domains-command-examples) + - [`list-hashes` command](#list-hashes-command) + - [`list-hashes` command examples](#list-hashes-command-examples) + - [`list-ip-addresses` command](#list-ip-addresses-command) + - [`list-ip-addresses` command examples](#list-ip-addresses-command-examples) + - [`list-undetected-evtx` command](#list-undetected-evtx-command) + - [`list-undetected-evtx` command examples](#list-undetected-evtx-command-examples) + - [`list-unused-rules` command](#list-unused-rules-command) + - [`list-unused-rules` command examples](#list-unused-rules-command-examples) + - [Split Commands](#split-commands-1) + - [`split-csv-timeline` command](#split-csv-timeline-command) + - [`split-csv-timeline` command examples](#split-csv-timeline-command-examples) + - [`split-json-timeline` command](#split-json-timeline-command) + - [`split-json-timeline` command examples](#split-json-timeline-command-examples) + - [Stack Commands](#stack-commands-1) + - [`stack-logons` command](#stack-logons-command) + - [`stack-logons` command examples](#stack-logons-command-examples) + - [Sysmon Commands](#sysmon-commands-1) + - [`sysmon-process-tree` command](#sysmon-process-tree-command) + - [`sysmon-process-tree` command examples](#sysmon-process-tree-command-examples) + - [Timeline Commands](#timeline-commands-1) + - [`timeline-logon` command](#timeline-logon-command) + - [`timeline-logon` command examples](#timeline-logon-command-examples) + - [`timeline-suspicious-processes` command](#timeline-suspicious-processes-command) + - [`timeline-suspicious-processes` command examples](#timeline-suspicious-processes-command-examples) + - [VirusTotal Commands](#virustotal-commands-1) + - [`vt-domain-lookup` command](#vt-domain-lookup-command) + - [`vt-domain-lookup` command examples](#vt-domain-lookup-command-examples) + - [`vt-hash-lookup` command](#vt-hash-lookup-command) + - [`vt-hash-lookup` command examples](#vt-hash-lookup-command-examples) + - [`vt-ip-lookup` command](#vt-ip-lookup-command) + - [`vt-ip-lookup` command examples](#vt-ip-lookup-command-examples) - [Contribution](#contribution) - [Bug Submission](#bug-submission) - [License](#license) @@ -44,14 +88,18 @@ Takajō means ["Falconer"](https://en.wikipedia.org/wiki/Falconry) in Japanese a ## Features -- Written in Nim so it is very easy to program, memory safe, almost as fast as native C code and works as a single standalone binary on any OS. -- `undetected-evtx`: List up all of the `.evtx` files that Hayabusa didn't have a detection rule for. This is meant to be used on sample evtx files that all contain evidence of malicious activity such as the sample evtx files in the [hayabusa-sample-evtx](https://github.com/Yamato-Security/hayabusa-evtx) repository. -- `unused-rules`: List up all of the `.yml` detection rules that were not used. This is useful for finding out which rules are currently not proven to work and that need sample evtx files. +- Written in Nim so it is very easy to program, memory safe, as fast as native C code and works as a single standalone binary on any OS. +- Create a timeline of all of the various logon events, suspicious processes, etc... +- Print the process trees of a malicious processes. +- Various stacking analysis. +- Split up CSV and JSONL timelines. +- List up IP addresses, domains, hashes etc... to be used with VirusTotal lookups +- VirusTotal lookups of domains, hashes and IP addresses. +- List up `.evtx` files that cannot be detected yet. -## Planned features +# Downloads -- Behavior analysis -- Malicious process tree visualization +Please download the latest stable version of Takajo with compiled binaries or compile the source code from the [Releases](https://github.com/Yamato-Security/takajo/releases) page. ## Git cloning @@ -65,54 +113,562 @@ You can git clone the repository with the following command and compile binary f If you have Nim installed, you can compile from source with the following command: -```bash +``` > nimble update -> nimble build -d:release +> nimble build -d:release -d:ssl +``` + +# Command List + +## List Commands +* `list-domains`: create a list of unique domains to be used with `vt-domain-lookup` +* `list-hashes`: create a list of process hashes to be used with `vt-hash-lookup` +* `list-ip-addresses`: create a list of unique target and/or source IP addresses to be used with `vt-ip-lookup` +* `list-undetected-evtx`: create a list of undetected evtx files +* `list-unused-rules`: create a list of unused detection rules + +## Split Commands +* `split-csv-timeline`: split up a large CSV timeline into smaller ones based on the computer name +* `split-json-timeline`: split up a large JSONL timeline into smaller ones based on the computer name + +## Stack Commands +* `stack-logons`: stack logons by target user, target computer, source IP address and source computer + +## Sysmon Commands +* `sysmon-process-tree`: output the process tree of a certain process + +## Timeline Commands +* `timeline-logon`: create a CSV timeline of logon events +* `timeline-suspicious-processes`: create a CSV timeline of suspicious processes + +## VirusTotal Commands +* `vt-domain-lookup`: look up a list of domains on VirusTotal and report on malicious ones +* `vt-hash-lookup`: look up a list of hashes on VirusTotal and report on malicious ones +* `vt-ip-lookup`: look up a list of IP addresses on VirusTotal and report on malicious ones + +# Command Usage + +## List Commands + +### `list-domains` command + +Creates a list of unique domains to be used with `vt-domain-lookup`. +Currently it will only check queried domains in Sysmon EID 22 logs but will be updated to support built-in Windows DNS Client and Server logs. + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `Text file` + +Required options: + +- `-o, --output `: save results to a text file. +- `-t, --timeline `: Hayabusa JSONL timeline. + +Options: + +- `-s, --includeSubdomains`: include subdomains. (default: `false`) +- `-w, --includeWorkstations`: include local workstation names. (default: `false`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `list-domains` command examples + +Prepare the JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Save the results to a text file: + +``` +takajo.exe list-domains -t ../hayabusa/timeline.jsonl -o domains.txt +``` + +Include subdomains: + +``` +takajo.exe list-domains -t ../hayabusa/timeline.jsonl -o domains.txt -s +``` + +### `list-hashes` command + +Create a list of process hashes to be used with vt-hash-lookup (input: JSONL, profile: standard) + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `Text file` + +Required options: + +- `-t, --timeline `: JSONL timeline created by Hayabusa. +- `-o, --output `: specify the base name to save the text results to. + +Options: + +- `-l, --level`: specify the minimum level. (default: `high`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `list-hashes` command examples + +Prepare JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Save the results to a different text file for each hash type: + +``` +takajo.exe list-hashes -t ../hayabusa/timeline.jsonl -o case-1 +``` + +For example, if `MD5`, `SHA1` and `IMPHASH` hashes are stored in the sysmon logs, then the following files will be created: `case-1-MD5-hashes.txt`, `case-1-SHA1-hashes.txt`, `case-1-ImportHashes.txt` + +### `list-ip-addresses` command + +Creates a list of unique target and/or source IP addresses to be used with `vt-ip-lookup`. +It will extract the `TgtIP` fields for target IP addresses and `SrcIP` fields for source IP addresses in all results and output just the unique IP addresses to a text file. + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `Text file` + +Required options: + +- `-o, --output `: save results to a text file. +- `-t, --timeline `: Hayabusa JSONL timeline. + +Options: + +- `-i, --inbound`: include inbound traffic. (default: `true`) +- `-O, --outbound`: include outbound traffic. (default: `true`) +- `-p, --privateIp`: include private IP addresses (default: `false`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `list-ip-addresses` command examples + +Prepare the JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Save the results to a text file: + +``` +takajo.exe list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt +``` + +Exclude inbound traffic: + +``` +takajo.exe list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt -i=false +``` + +Include private IP addresses: + +``` +takajo.exe list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt -p +``` + +### `list-undetected-evtx` command + +List up all of the `.evtx` files that Hayabusa didn't have a detection rule for. +This is meant to be used on sample evtx files that all contain evidence of malicious activity such as the sample evtx files in the [hayabusa-sample-evtx](https://github.com/Yamato-Security/hayabusa-evtx) repository. + +* Input: `CSV` +* Profile: `verbose`, `all-field-info-verbose`, `super-verbose`, `timesketch-verbose` + > You first need to run Hayabusa with a profile that saves the `%EvtxFile%` column information and save the results to a CSV timeline. + > You can see which columns Hayabusa saves according to the different profiles [here](https://github.com/Yamato-Security/hayabusa#profiles). +* Output: `Standard output or text file` + +Required options: + +- `-e, --evtx-dir `: The directory of `.evtx` files you scanned with Hayabusa. +- `-t, --timeline `: Hayabusa CSV timeline. + +Options: + +- `-c, --column-name `: specify a custom column name for the evtx column. (default: Hayabusa's default of `EvtxFile`) +- `-o, --output `: save the results to a text file. (default: output to screen) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `list-undetected-evtx` command examples + +Prepare the CSV timeline with Hayabusa: + +``` +hayabusa.exe -d -p verbose -o timeline.csv +``` + +Output the results to screen: + +``` +takajo.exe list-undetected-evtx -t ../hayabusa/timeline.csv -e +``` + +Save the results to a text file: + +``` +takajo.exe list-undetected-evtx -t ../hayabusa/timeline.csv -e -o undetected-evtx.txt +``` + +### `list-unused-rules` command + +List up all of the `.yml` detection rules that did not detect anything. +This is useful to help determine the reliablity of rules. +That is, which rules are known to find malicious activity and which are still untested and need sample `.evtx` files. + +* Input: `CSV` +* Profile: `verbose`, `all-field-info-verbose`, `super-verbose`, `timesketch-verbose` + > You first need to run Hayabusa with a profile that saves the `%RuleFile%` column information and save the results to a CSV timeline. + > You can see which columns Hayabusa saves according to the different profiles [here](https://github.com/Yamato-Security/hayabusa#profiles). +* Output: `Standard output or text file` + +Required options: + +- `-r, --rules-dir `: the directory of `.yml` rules files you used with Hayabusa. +- `-t, --timeline `: CSV timeline created by Hayabusa. + +Options: + +- `-c, --column-name `: specify a custom column name for the rule file column. (default: Hayabusa's default of `RuleFile`) +- `-o, --output `: save the results to a text file. (default: output to screen) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `list-unused-rules` command examples + +Prepare the CSV timeline with Hayabusa: + +``` +hayabusa.exe csv-timeline -d -p verbose -o timeline.csv +``` + +Output the results to screen: + +``` +takajo.exe list-unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules +``` + +Save the results to a text file: + +``` +takajo.exe list-unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules -o unused-rules.txt +``` + +## Split Commands + +### `split-csv-timeline` command + +Split up a large CSV timeline into smaller ones based on the computer name. + +* Input: `non-multiline CSV` +* Profile: `Any` +* Output: `Multiple CSV files` + +Required options: + +- `-t, --timeline `: CSV timeline created by Hayabusa. + +Options: + +- `-m, --makeMultiline`: output fields in multiple lines. (default: `false`) +- `-o, --output `: directory to save the CSV files to. (default: `output`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `split-csv-timeline` command examples + +Prepare the CSV timeline with Hayabusa: + ``` +hayabusa.exe csv-timeline -d -o timeline.csv +``` + +Split the single CSV timeline into multiple CSV timelines in the default `output` directory: + +``` +takajo.exe split-csv-timeline -t ../hayabusa/timeline.csv +``` + +Separate field information with newline characters to make multi-line entries and save to the `case-1-csv` directory: + +``` +takajo.exe split-csv-timeline -t ../hayabusa/timeline.csv -m -o case-1-csv +``` + +### `split-json-timeline` command + +Split up a large JSONL timeline into smaller ones based on the computer name. + +* Input: `JSONL` +* Profile: `Any` +* Output: `Multiple JSONL files` + +Required options: + +- `-t, --timeline `: Hayabusa JSONL timeline. + +Options: + +- `-o, --output `: directory to save the JSONL files to. (default: `output`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `split-json-timeline` command examples + +Prepare the JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Split the single JSONL timeline into multiple JSONL timelines in the default `output` directory: + +``` +takajo.exe split-json-timeline -t ../hayabusa/timeline.jsonl +``` + +Save to the `case-1-jsonl` directory: + +``` +takajo.exe split-json-timeline -t ../hayabusa/timeline.jsonl -o case-1-jsonl +``` + +## Stack Commands + +### `stack-logons` command + +Creates a list logons according to `Target User`, `Target Computer`, `Logon Type`, `Source IP Address`, `Source Computer`. +Results are filtered out when the source IP address is a local IP address by default. + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `CSV` + +Required options: + +- `-t, --timeline `: JSONL timeline created by Hayabusa. + +Options: + +- `-l, --localSrcIpAddresses`: include results when the source IP address is local. +- `-o, --output `: specify the base name to save the text results to. +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `stack-logons` command examples + +Run with default settings: + +``` +takajo.exe stack-logons -t ../hayabusa/timeline.jsonl +``` + +Include local logons: + +``` +takajo.exe stack-logons -t ../hayabusa/timeline.jsonl -l +``` + +## Sysmon Commands + +### `sysmon-process-tree` command + +Output the process tree of a certain process, such as a suspicious or malicious process. + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `Text file` + +Required options: + +- `-o, --output `: a text file to save the results to. +- `-p, --processGuid `: sysmon process GUID +- `-t, --timeline `: JSONL timeline created by Hayabusa. + +Options: + +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `sysmon-process-tree` command examples + +Prepare JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Save the results to a text file: + +``` +takajo.exe sysmon-process-tree -t ../hayabusa/timeline.jsonl -p "365ABB72-3D4A-5CEB-0000-0010FA93FD00" -o process-tree.txt +``` + +## Timeline Commands + +### `timeline-logon` command + +This command extracts information from the following logon events, normalizes the fields and saves the results to a CSV file: + +- `4624` - Successful Logon +- `4625` - Failed Logon +- `4634` - Account Logoff +- `4647` - User Initiated Logoff +- `4648` - Explicit Logon +- `4672` - Admin Logon + +This makes it easier to detect lateral movement, password guessing/spraying, privilege escalation, etc... + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `CSV` + +Required options: + +- `-o, --output `: the CSV file to save the results to. +- `-t, --timeline `: JSONL timeline created by Hayabusa. + +Options: + +- `-c, --calculateElapsedTime`: calculate the elapsed time for successful logons. (default: `true`) +- `-l, --outputLogoffEvents`: output logoff events as separate entries. (default: `false`) +- `-a, --outputAdminLogonEvents`: output admin logon events as separate entries. (default: `false`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `timeline-logon` command examples + +Prepare JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Save logon timeline to a CSV file: + +``` +takajo.exe timeline-logon -t ../hayabusa/timeline.jsonl -o logon-timeline.csv +``` + +### `timeline-suspicious-processes` command + +Create a CSV timeline of suspicious processes. + +* Input: `JSONL` +* Profile: Any besides `all-field-info` and `all-field-info-verbose` +* Output: `CSV` + +Required options: + +- `-t, --timeline `: JSONL timeline created by Hayabusa. + +Options: + +- `-l, --level `: specify the minimum alert level. (default: `high`) +- `-o, --output `: the CSV file to save the results to. +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `timeline-suspicious-processes` command examples + +Prepare JSONL timeline with Hayabusa: + +``` +hayabusa.exe json-timeline -d -L -o timeline.jsonl +``` + +Search for processes that had an alert level of `high` or above and output results to screen: + +``` +takajo.exe timeline-suspicious-process -t ../hayabusa/timeline.jsonl +``` + +Search for processes that had an alert level of `low` or above and output results to screen: + +``` +takajo.exe timeline-suspicious-process -t ../hayabusa/timeline.jsonl -l low +``` + +Save the results to a CSV file: + +``` +takajo.exe timeline-suspicious-process -t ../hayabusa/timeline.jsonl -o suspicous-processes.csv +``` + +## VirusTotal Commands + +### `vt-domain-lookup` command + +Look up a list of domains on VirusTotal -### Usage +* Input: `Text file` +* Output: `CSV` -1. `help`: Print help menu for all commands. -2. `undetected-evtx`: List up all of the `.evtx` files that Hayabusa didn't have a detection rule for. -You first need to run Hayabusa with a profile that saves the `%EvtxFile%` column information and save the results to a csv timeline. Example: `hayabusa.exe -d -P verbose -o timeline.csv`. -You can see which columns Hayabusa saves according to the different profiles [here](https://github.com/Yamato-Security/hayabusa#profiles). +Required options: + +- `-a, --apiKey `: your VirusTotal API key. +- `-d, --domainList `: a text file list of domains. +- `-o, --output `: save the results to a CSV file. + +Options: + +- `-j, --jsonOutput `: output all of the JSON responses from VirusTotal to a JSON file. +- `-r, --rateLimit `: the rate per minute to send requests. (default: `4`) +- `-q, --quiet`: do not display logo. (default: `false`) + +#### `vt-domain-lookup` command examples + +First create a list of domains with the `list-domains` command. +Then lookup those domains with the following: + +``` +takajo.exe vt-domain-lookup -a -d domains.txt -o vt-domain-lookup.csv -r 1000 -j vt-domain-lookup.json +``` + +### `vt-hash-lookup` command + +Look up a list of hashes on VirusTotal. + +* Input: `Text file` +* Output: `CSV` Required options: -- `-t, --timeline ../hayabusa/timeline.csv`: CSV timeline created by Hayabusa. -- `-e, --evtx-dir ../hayabusa-sample-evtx`: The directory of `.evtx` files you scanned with Hayabusa. +- `-a, --apiKey `: your VirusTotal API key. +- `-H, --hashList `: a text file of hashes. +- `-o, --output `: save the results to a CSV file. Options: -- `-c, --column-name CustomEvtxColumn`: Optional: Specify a custom column name for the evtx column. Default is Hayabusa's default of `EvtxFile`. -- `-o, --output result.txt`: Save the results to a text file. The default is to print to screen. -- `-q, --quiet`: Do not display logo. +- `-j, --jsonOutput `: output all of the JSON responses from VirusTotal to a JSON file. +- `-r, --rateLimit `: the rate per minute to send requests. (default: `4`) +- `-q, --quiet`: do not display logo. (default: `false`) -Example: +#### `vt-hash-lookup` command examples -```bash -takajo.exe undetected-evtx -t ../hayabusa/timeline.csv -e ../hayabusa-sample-evtx ``` +takajo.exe vt-hash-lookup -a -H MD5-hashes.txt -o vt-hash-lookup.csv -r 1000 -j vt-hash-lookup.json +``` + +### `vt-ip-lookup` command -3. `unused-rules`: List up all of the `.yml` detection rules that did not detect anything. This is useful to help determine the reliablity of rules. That is, which rules are known to find malicious activity and which are still untested. -You first need to run Hayabusa with a profile that saves the `%RuleFile%` column information and save the results to a csv timeline. Example: `hayabusa.exe -d -P verbose -o timeline.csv`. -You can see which columns Hayabusa saves according to the different profiles [here](https://github.com/Yamato-Security/hayabusa#profiles). +Look up a list of IP addresses on VirusTotal. + +* Input: `Text file` +* Output: `CSV` Required options: -- `-t, --timeline ../hayabusa/timeline.csv`: CSV timeline created by Hayabusa. -- `-r, --rules-dir ../hayabusa/rules`: The directory of `.yml` rules files you used with Hayabusa. +- `-a, --apiKey `: your VirusTotal API key. +- `-i, --ipList `: a text file of IP addresses. +- `-o, --output `: save the results to a CSV file. Options: -- `-c, --column-name CustomRuleFileColumn`: Specify a custom column name for the rule file column. Default is Hayabusa's default of `RuleFile`. -- `-o, --output result.txt`: Save the results to a text file. The default is to print to screen. -- `-q, --quiet`: Do not display logo. +- `-j, --jsonOutput `: output all of the JSON responses from VirusTotal to a JSON file. +- `-r, --rateLimit `: the rate per minute to send requests. (default: `4`) +- `-q, --quiet`: do not display logo. (default: `false`) -Example: +#### `vt-ip-lookup` command examples -```bash -takajo.exe unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules +``` +takajo.exe vt-ip-lookup -a -i ipAddresses.txt -o vt-ip-lookup.csv -r 1000 -j vt-ip-lookup.json ``` ## Contribution diff --git a/src/takajo.nim b/src/takajo.nim index b19efbb4..9bd09f5c 100644 --- a/src/takajo.nim +++ b/src/takajo.nim @@ -1,130 +1,226 @@ +import algorithm import cligen -import os +import json +import httpclient +import sets +import sequtils +import strformat +import strutils +import tables import terminal -import std/sequtils -import std/strformat -import std/strutils -import std/tables -import takajopkg/submodule - -proc listUndetectedEvtxFiles(timeline: string, evtxDir: string, - columnName: system.string = "EvtxFile", quiet: bool = false, - output: string = ""): int = - - if not quiet: - styledEcho(fgGreen, outputLogo()) - - let csvData: TableRef[string, seq[string]] = getHayabusaCsvData(timeline, columnName) - var fileLists: seq[string] = getTargetExtFileLists(evtxDir, ".evtx") - - var detectedPaths: seq[string] = csvData[columnName].map(getFileNameWithExt) - detectedPaths = deduplicate(detectedPaths) - - let checkResult = getUnlistedSeq(fileLists, detectedPaths) - var outputStock: seq[string] = @[] - - echo "Finished. " - echo "---------------" - - if checkResult.len == 0: - echo "Great! No undetected evtx files were found." - echo "" - else: - echo "Undetected evtx file identified." - echo "" - var numberOfEvtxFiles = 0 - for undetectedFile in checkResult: - outputStock.add(undetectedFile) - inc numberOfEvtxFiles - outputStock.add("") - let undetectedPercentage = (checkResult.len() / fileLists.len()) * 100 - echo fmt"{ undetectedPercentage :.4}% of the evtx files did not have any detections." - echo fmt"Number of evtx files not detected: {numberOfEvtxFiles}" - echo "" - if output != "": - let f = open(output, fmWrite) - defer: f.close() - for line in outputStock: - f.writeLine(line) - echo fmt"Saved File {output}" - echo "" - else: - echo outputstock.join("\n") - discard - - -proc listUnusedRules(timeline: string, rulesDir: string, - columnName: string = "RuleFile", quiet: bool = false, - output: string = ""): int = - - if not quiet: - styledEcho(fgGreen, outputLogo()) - - let csvData: TableRef[string, seq[string]] = getHayabusaCsvData(timeline, columnName) - var fileLists: seq[string] = getTargetExtFileLists(rulesDir, ".yml") - var detectedPaths: seq[string] = csvData[columnName].map(getFileNameWithExt) - detectedPaths = deduplicate(detectedPaths) - var outputStock: seq[string] = @[] - - echo "Finished. " - echo "---------------" - - let checkResult = getUnlistedSeq(fileLists, detectedPaths) - if checkResult.len == 0: - echo "Great! No unused rule files were found." - echo "" - else: - echo "Unused rule file identified." - echo "" - var numberOfUnusedRules = 0 - for undetectedFile in checkResult: - outputStock.add(undetectedFile) - inc numberOfUnusedRules - let undetectedPercentage = (checkResult.len() / fileLists.len()) * 100 - outputStock.add("") - echo fmt"{ undetectedPercentage :.4}% of the yml rules were not used." - echo fmt"Number of unused rule files: {numberOfUnusedRules}" - echo "" - if output != "": - let f = open(output, fmWrite) - defer: f.close() - for line in outputStock: - f.writeLine(line) - echo fmt"Saved File {output}" - echo "" - else: - echo outputstock.join("\n") - discard +import times +import uri +import os +import std/enumerate +import suru +import takajopkg/general +include takajopkg/listDomains +include takajopkg/listIpAddresses +include takajopkg/listUndetectedEvtxFiles +include takajopkg/listUnusedRules +include takajopkg/splitCsvTimeline +include takajopkg/splitJsonTimeline +include takajopkg/stackLogons +include takajopkg/listHashes +include takajopkg/sysmonProcessTree +include takajopkg/timelineLogon +include takajopkg/timelineSuspiciousProcesses +include takajopkg/vtDomainLookup +include takajopkg/vtIpLookup +include takajopkg/vtHashLookup when isMainModule: - clCfg.version = "1.0.0-dev" - const examples = "Examples:\n undetected-evtx -t ../hayabusa/timeline.csv -e ../hayabusa-sample-evtx\n unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules\n" - clCfg.useMulti = "Usage: takajo.exe \n\nCommands:\n$subcmds\nCommand help: $command help \n\n" & examples - - if paramCount() == 0: - styledEcho(fgGreen, outputLogo()) - dispatchMulti( - [ - listUndetectedEvtxFiles, cmdName = "undetected-evtx", - doc = "List up undetected evtx files", - help = { - "timeline": "CSV timeline created by Hayabusa with verbose profile", - "evtxDir": "The directory of .evtx files you scanned with Hayabusa", - "columnName": "Specify custom column header name", - "quiet": "Do not display the launch banner", - "output": "Save the results to a text file. The default is to print to screen.", - } - ], - [ - listUnusedRules, cmdName = "unused-rules", - doc = "List up unused rules", - help = { - "timeline": "CSV timeline created by Hayabusa with verbose profile", - "rulesDir": "Hayabusa rules directory", - "columnName": "Specify custom column header name", - "quiet": "Do not display the launch banner", - "output": "Save the results to a text file. The default is to print to screen.", - } - ] - ) - + clCfg.version = "2.0.0" + const examples = "Examples:\p" + const example_list_domains = " list-domains -t ../hayabusa/timeline.jsonl -o domains.txt\p" + const example_list_ip_addresses = " list-ip-addresses -t ../hayabusa/timeline.jsonl -o ipAddresses.txt\p" + const example_list_undetected_evtx = " list-undetected-evtx -t ../hayabusa/timeline.csv -e ../hayabusa-sample-evtx\p" + const example_list_unused_rules = " list-unused-rules -t ../hayabusa/timeline.csv -r ../hayabusa/rules\p" + const example_split_csv_timeline = " split-csv-timeline -t ../hayabusa/timeline.csv [--makeMultiline] -o case-1-csv\p" + const example_split_json_timeline = " split-json-timeline -t ../hayabusa/timeline.jsonl -o case-1-json\p" + const example_stack_logons = " stack-logons -t ../hayabusa/timeline.jsonl -o logons.csv\p" + const example_list_hashes = " list-hashes -t ../hayabusa/case-1.jsonl -o case-1\p" + const example_sysmon_process_tree = " sysmon-process-tree -t ../hayabusa/timeline.jsonl -p [-o process-tree.txt]\p" + const example_timeline_logon = " timeline-logon -t ../hayabusa/timeline.jsonl -o logon-timeline.csv\p" + const example_timeline_suspicious_processes = " timeline-suspicious-processes -t ../hayabusa/timeline.jsonl [--level medium] [-o suspicious-processes.csv]\p" + const example_vt_domain_lookup = " vt-domain-lookup -a --domainList domains.txt -r 1000 -o results.csv --jsonOutput responses.json\p" + const example_vt_hash_lookup = " vt-hash-lookup -a --hashList case-1-MD5-hashes.txt -r 1000 -o results.csv --jsonOutput responses.json\p" + const example_vt_ip_lookup = " vt-ip-lookup -a --ipList ipAddresses.txt -r 1000 -o results.csv --jsonOutput responses.json\p" + + clCfg.useMulti = "Version: 2.0.0\pUsage: takajo.exe \p\pCommands:\p$subcmds\pCommand help: $command help \p\p" & + examples & example_list_domains & example_list_hashes & example_list_ip_addresses & example_list_undetected_evtx & example_list_unused_rules & + example_split_csv_timeline & example_split_json_timeline & example_stack_logons & example_sysmon_process_tree & + example_timeline_logon & example_timeline_suspicious_processes & + example_vt_domain_lookup & example_vt_hash_lookup & example_vt_ip_lookup + + if paramCount() == 0: + styledEcho(fgGreen, outputLogo()) + dispatchMulti( + [ + listDomains, cmdName = "list-domains", + doc = "create a list of unique domains to be used with vt-domain-lookup", + help = { + "includeSubdomains": "include subdomains", + "includeWorkstations": "include local workstation names", + "output": "save results to a text file", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + }, + short = { + "includeSubdomains": 's', + "includeWorkstations": 'w' + } + ], + [ + listHashes, cmdName = "list-hashes", + doc = "create a list of process hashes to be used with vt-hash-lookup", + help = { + "level": "specify the minimum alert level", + "output": "specify the base name to save results to text files (ex: -o case-1)", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + } + ], + [ + listIpAddresses, cmdName = "list-ip-addresses", + doc = "create a list of unique target and/or source IP addresses to be used with vt-ip-lookup", + help = { + "inbound": "include inbound traffic", + "outbound": "include outbound traffic", + "output": "save results to a text file", + "privateIp": "include private IP addresses", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + }, + short = { + "output": 'o', + "outbound": 'O' + } + ], + [ + listUndetectedEvtxFiles, cmdName = "list-undetected-evtx", + doc = "create a list of undetected evtx files", + help = { + "columnName": "specify a custom column header name", + "evtxDir": "directory of .evtx files you scanned with Hayabusa", + "output": "save the results to a text file (default: stdout)", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa CSV timeline (profile: any verbose profile)", + } + ], + [ + listUnusedRules, cmdName = "list-unused-rules", + doc = "create a list of unused sigma rules", + help = { + "columnName": "specify a custom column header name", + "output": "save the results to a text file (default: stdout)", + "quiet": "do not display the launch banner", + "rulesDir": "Hayabusa rules directory", + "timeline": "Hayabusa CSV timeline (profile: any verbose profile)", + } + ], + [ + splitCsvTimeline, cmdName = "split-csv-timeline", + doc = "split up a large CSV file into smaller ones based on the computer name", + help = { + "makeMultiline": "output fields in multiple lines", + "output": "output directory (default: output)", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa non-multiline CSV timeline (profile: any)", + } + ], + [ + splitJsonTimeline, cmdName = "split-json-timeline", + doc = "split up a large JSONL timeline into smaller ones based on the computer name", + help = { + "output": "output directory (default: output)", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any)", + } + ], + [ + stackLogons, cmdName = "stack-logons", + doc = "stack logons by target user, target computer, source IP address and source computer", + help = { + "localSrcIpAddresses": "include results when the source IP address is local", + "output": "save results to a CSV file", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + } + ], + [ + sysmonProcessTree, cmdName = "sysmon-process-tree", + doc = "output the process tree of a certain process", + help = { + "output": "save results to a text file", + "processGuid": "sysmon process GUID", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + } + ], + [ + timelineLogon, cmdName = "timeline-logon", + doc = "create a CSV timeline of logon events", + help = { + "calculateElapsedTime": "calculate the elapsed time for successful logons", + "output": "save results to a CSV file", + "outputAdminLogonEvents": "output admin logon events as separate entries", + "outputLogoffEvents": "output logoff events as separate entries", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + }, + short = { + "outputLogoffEvents": 'l', + "outputAdminLogonEvents": 'a' + } + ], + [ + timelineSuspiciousProcesses, cmdName = "timeline-suspicious-processes", + doc = "create a CSV timeline of suspicious processes", + help = { + "level": "specify the minimum alert level", + "output": "save results to a CSV file", + "quiet": "do not display the launch banner", + "timeline": "Hayabusa JSONL timeline (profile: any besides all-field-info*)", + } + ], + [ + vtDomainLookup, cmdName = "vt-domain-lookup", + doc = "look up a list of domains on VirusTotal", + help = { + "apiKey": "your VirusTotal API key", + "domainList": "a text file list of domains", + "jsonOutput": "save all responses to a JSON file", + "output": "save results to a CSV file", + "rateLimit": "set the rate per minute for requests", + "quiet": "do not display the launch banner", + } + ], + [ + vtHashLookup, cmdName = "vt-hash-lookup", + doc = "look up a list of hashes on VirusTotal", + help = { + "apiKey": "your VirusTotal API key", + "hashList": "a text file list of hashes", + "jsonOutput": "save all responses to a JSON file", + "output": "save results to a text file", + "rateLimit": "set the rate per minute for requests", + "quiet": "do not display the launch banner", + }, + short = { + "hashList": 'H' + } + ], + [ + vtIpLookup, cmdName = "vt-ip-lookup", + doc = "look up a list of IP addresses on VirusTotal", + help = { + "apiKey": "your VirusTotal API key", + "ipList": "a text file list of IP addresses", + "jsonOutput": "save all responses to a JSON file", + "output": "save results to a CSV file", + "rateLimit": "set the rate per minute for requests", + "quiet": "do not display the launch banner", + } + ] + ) \ No newline at end of file diff --git a/src/takajopkg/general.nim b/src/takajopkg/general.nim new file mode 100644 index 00000000..96bc7f80 --- /dev/null +++ b/src/takajopkg/general.nim @@ -0,0 +1,325 @@ +import json +import re +import std/os +import std/parsecsv +import std/sequtils +import std/strformat +import std/strutils +import std/tables +import times +from std/streams import newFileStream + +proc outputLogo*(): string = + let logo = """ +╔════╦═══╦╗╔═╦═══╗ ╔╦═══╗ +║╔╗╔╗║╔═╗║║║╔╣╔═╗║ ║║╔═╗║ +╚╝║║╚╣║ ║║╚╝╝║║ ║║ ║║║ ║║ + ║║ ║╚═╝║╔╗╖║╚═╝╠╗║║║ ║║ + ╔╝╚╗║╔═╗║║║╚╣╔═╗║╚╝║╚═╝║ + ╚══╝╚╝ ╚╩╝╚═╩╝ ╚╩══╩═══╝ + by Yamato Security +""" + return logo + +proc getJsonValue*(jsonResponse: JsonNode, keys: seq[string], default: string = "Unknown"): string = + var value = jsonResponse + for key in keys: + if value.kind == JObject and value.hasKey(key): + value = value[key] + else: + return default + #[ + try: + value = value[key] + except KeyError: + return default]# + # Check if the value is an integer or a string + if value.kind == JInt: + return $value.getInt() # Convert to string + elif value.kind == JString: + return value.getStr() + else: + return default # If it's neither a string nor an integer, return default + + +proc getJsonDate*(jsonResponse: JsonNode, keys: seq[string]): string = + try: + var node = jsonResponse + for key in keys: + node = node[key] + let epochDate = fromUnix(node.getInt()).utc + return epochDate.format("yyyy-MM-dd HH:mm:ss") + except KeyError: + return "Unknown" + +proc getUnlistedSeq*(targetSeq: seq[string], compareSeq: seq[string]): seq[string] = + ## get element not in compareSeq + var output: seq[string] = @[] + for target in targetSeq: + if not(target in compareSeq): + output.add(target) + return output + +proc getFileNameWithExt*(targetPath: string): string = + ## get file name with ext from path + var (_, file, ext) = splitFile(targetPath) + file &= ext + return file + +proc getTargetExtFileLists*(targetDirPath: string, targetExt: string): seq[string] = + ## extract yml file name seq to specified directory path + var r: seq[string] = @[] + for f in walkDirRec(targetDirPath): + if f.endsWith(targetExt): + r.insert(getFileNameWithExt(f)) + # removed duplicated file name from seq + r = deduplicate(r) + if r.len() == 0: + quit("Target file does not exist in specified directory. Please check your option parameters.") + else: + r + +proc getHayabusaCsvData*(csvPath: string, columnName: string): Tableref[string, seq[string]] = + ## procedure for Hayabusa output csv read data. + + var s = newFileStream(csvPath, fmRead) + # if csvPath is not valid, error output and quit. + if s == nil: + quit("Cannot open the file. Please check that the file format is CSV. FilePath: " & csvPath) + + # parse csv + var p: CsvParser + open(p, s, csvPath) + p.readHeaderRow() + + let r = newTable[string, seq[string]]() + # initialize table + for h in items(p.headers): + r[h] = @[] + + # insert csv data to table + while p.readRow(): + for h in items(p.headers): + r[h].add(p.rowEntry(h)) + if not r.contains(columnName): + quit(fmt"Coud not find the {column_name} column. Please run hayabusa with the verbose profile (-p option).") + + return r + +proc formatDuration*(d: Duration): string = + let + days = d.inDays + hours = d.inHours mod 24 + minutes = d.inMinutes mod 60 + seconds = d.inSeconds mod 60 + milliseconds = d.inMilliseconds mod 1000 + return $days & "d " & $hours & "h " & $minutes & "m " & $seconds & "s " & $milliseconds & "ms" + +proc extractStr*(jsonObj: JsonNode, key: string): string = + let value = jsonObj.hasKey(key) + if value and jsonObj[key].kind == JString: + return jsonObj[key].getStr() + else: + return "" + +proc extractInt*(jsonObj: JsonNode, key: string): int = + if jsonObj.hasKey(key) and jsonObj[key].kind == JInt: + return jsonObj[key].getInt() + else: + return -1 + +proc isEID_4624*(msgLogonRule: string): bool = + case msgLogonRule + of + "Logon (System) - Bootup", "Logon (Interactive) *Creds in memory*", + "Logon (Network)", "Logon (Batch)", "Logon (Service)", + "Logon (Unlock)", "Logon (NetworkCleartext)", + "Logon (NewCredentials) *Creds in memory*", + "Logon (RemoteInteractive (RDP)) *Creds in memory*", + "Logon (CachedInteractive) *Creds in memory*", + "Logon (CachedRemoteInteractive) *Creds in memory*", + "Logon (CachedUnlock) *Creds in memory*": + result = true + else: + result = false + +proc logonNumberToString*(msgLogonType: int): string = + case msgLogonType: + of 0: result = "0 - System" + of 2: result = "2 - Interactive" + of 3: result = "3 - Network" + of 4: result = "4 - Batch" + of 5: result = "5 - Service" + of 7: result = "7 - Unlock" + of 8: result = "8 - NetworkCleartext" + of 9: result = "9 - NewCredentials" + of 10: result = "10 - RemoteInteractive" + of 11: result = "11 - CachedInteractive" + of 12: result = "12 - CachedRemoteInteractive" + of 13: result = "13 - CachedUnlock" + else: result = "Unknown - " & $msgLogonType + return result + +proc isEID_4625*(msgLogonRule: string): bool = + case msgLogonRule + of + "Logon Failure (User Does Not Exist)", + "Logon Failure (Unknown Reason)", + "Logon Failure (Wrong Password)": + result = true + else: + result = false + + +proc escapeCsvField*(s: string): string = + # If the field contains a quote, comma, or newline, enclose it in quotes + # and replace any internal quotes with double quotes. + if '"' in s or ',' in s or '\n' in s: + result = "\"" & s.replace("\"", "\"\"") & "\"" + else: + result = s + +proc impersonationLevelIdToName*(impersonationLevelId: string): string = + case impersonationLevelId: + of "%%1832": result = "Identification" + of "%%1833": result = "Impersonation" + of "%%1840": result = "Delegation" + of "%%1841": result = "Denied by Process Trust Label ACE" + of "%%1842": result = "Yes" + of "%%1843": result = "No" + of "%%1844": result = "System" + of "%%1845": result = "Not Available" + of "%%1846": result = "Default" + of "%%1847": result = "DisallowMmConfig" + of "%%1848": result = "Off" + of "%%1849": result = "Auto" + of "": result = "" + else: result = "Unknown - " & impersonationLevelId + return result + +proc logonFailureReason*(subStatus: string): string = + case subStatus: + of "0xc0000064": result = "Non-existant User" + of "0xc000006a": result = "Wrong Password" + of "": result = "" + else: result = "Unknown - " & subStatus + return result + +proc elevatedTokenIdToName*(elevatedTokenId: string): string = + case elevatedTokenId: + of "%%1842": result = "Yes" + of "%%1843": result = "No" + of "": result = "" + else: result = "Unknown - " & elevatedTokenId + return result + +proc countLinesInTimeline*(filePath: string): int = + #[ + var count = 0 + for _ in filePath.lines(): + inc count + return count + ]# + const BufferSize = 4 * 1024 * 1024 # 4 MiB + var buffer = newString(BufferSize) + var file = open(filePath) + var count = 0 + + while true: + let bytesRead = file.readChars(buffer.toOpenArray(0, BufferSize - 1)) + if bytesRead == 0: + break + for i in 0 ..< bytesRead: + if buffer[i] == '\n': + inc(count) + + file.close() + return count + +proc formatFileSize*(fileSize: BiggestInt): string = + let + kilo = 1024 + mega = kilo * kilo + giga = kilo * mega + var fileSizeStr = "" + + if fileSize >= giga: + var gb = fileSize.float / giga.float + fileSizeStr = $gb.formatFloat(ffDecimal, 2) & " GB" + elif fileSize >= mega: + var mb = fileSize.float / mega.float + fileSizeStr = $mb.formatFloat(ffDecimal, 2) & " MB" + elif fileSize >= kilo: + var kb = fileSize.float / kilo.float + fileSizeStr = $kb.formatFloat(ffDecimal, 2) & " KB" + else: + fileSizeStr = $fileSize & " Bytes" + return fileSizeStr + +proc isMinLevel*(levelInLog: string, userSetLevel: string): bool = + case userSetLevel + of "critical": + return levelInLog == "crit" + of "high": + return levelInLog == "crit" or levelInLog == "high" + of "medium": + return levelInLog == "crit" or levelInLog == "high" or levelInLog == "med" + of "low": + return levelInLog == "crit" or levelInLog == "high" or levelInLog == "med" or levelInLog == "low" + of "informational": + return levelInLog == "crit" or levelInLog == "high" or levelInLog == "med" or levelInLog == "low" or levelInLog == "info" + else: + return false + +proc isPrivateIP*(ip: string): bool = + let + ipv4Private = re"^(10\.\d{1,3}\.\d{1,3}\.\d{1,3})$|^(192\.168\.\d{1,3}\.\d{1,3})$|^(172\.(1[6-9]|2\d|3[01])\.\d{1,3}\.\d{1,3})$|^(127\.\d{1,3}\.\d{1,3}\.\d{1,3})$" + ipv4MappedIPv6Private = re"^::ffff:(10\.\d{1,3}\.\d{1,3}\.\d{1,3})$|^::ffff:(192\.168\.\d{1,3}\.\d{1,3})$|^::ffff:(172\.(1[6-9]|2\d|3[01])\.\d{1,3}\.\d{1,3})$|^::ffff:(127\.\d{1,3}\.\d{1,3}\.\d{1,3})$" + ipv6Private = re"^fd[0-9a-f]{2}:|^fe80:" + + if ip =~ ipv4Private or ip =~ ipv4MappedIPv6Private or ip =~ ipv6Private: + return true + else: + return false + +proc isMulticast*(address: string): bool = + # Define regex for IPv4 multicast addresses + let ipv4MulticastPattern = re"^(224\.0\.0\.0|22[5-9]\.|23[0-9]\.)" + + # Define regex for IPv6 multicast addresses + let ipv6MulticastPattern = re"(?i)^ff[0-9a-f]{2}:" + # check if the address matches either of the multicast patterns + if address.find(ipv4MulticastPattern) >= 0 or address.find(ipv6MulticastPattern) >= 0: + return true + return false + +proc isLoopback*(address: string): bool = + # Define regex for IPv4 loopback addresses + let ipv4LoopbackPattern = re"^127\.0\.0\.1$" + + # Define regex for IPv6 loopback addresses + let ipv6LoopbackPattern = re"^(?:0*:)*?:?0*1$" + + # Check if the address matches either of the loopback patterns + if address.find(ipv4LoopbackPattern) >= 0 or address.find(ipv6LoopbackPattern) >= 0: + return true + return false + +proc isIpAddress*(s: string): bool = + let ipRegex = re(r"\b((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}") + return s.find(ipRegex) != -1 + +proc extractDomain*(domain: string): string = + let doubleTLDs = ["co.jp", "co.uk", "com.au", "org.au", "net.au", "com.br", "net.br", "com.cn", "net.cn", + "com.mx", "net.mx", "ac.nz", "co.nz", "net.nz", "co.za", "net.za", "co.in", "net.in", "ac.uk", "gov.uk"] + let parts = domain.split('.') + if parts.len >= 3: + let lastTwo = parts[^2] & '.' & parts[^1] + if doubleTLDs.contains(lastTwo): + return parts[^3] & '.' & lastTwo + if parts.len >= 2: + return parts[^2] & '.' & parts[^1] + return domain + +proc isLocalIP*(ip: string): bool = + return ip == "127.0.0.1" or ip == "-" or ip == "::1" \ No newline at end of file diff --git a/src/takajopkg/listDomains.nim b/src/takajopkg/listDomains.nim new file mode 100644 index 00000000..ceb88b74 --- /dev/null +++ b/src/takajopkg/listDomains.nim @@ -0,0 +1,78 @@ +# TODO: List up domain info from DNS Server and Client events +# Graceful error when no domains loaded +proc listDomains(includeSubdomains: bool = false, includeWorkstations: bool = false, output: string, quiet: bool = false, timeline: string) = + + + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + echo "Started the List Domains command" + echo "" + echo "Local queries to workstations are filtered out by default, but can be included with -w, --includeWorkstations." + echo "Sub-domains are also filtered out by default, but can be included with -s, --includeSubdomains." + echo "Domains ending with .lan, .LAN or .local are filtered out." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + + echo "Extracting domain queries from Sysmon 22 events. Please wait." + echo "" + + var + channel, domain = "" + eventId = 0 + domainHashSet = initHashSet[string]() + jsonLine: JsonNode + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + for line in lines(timeline): + inc bar + bar.update(1000000000) + jsonLine = parseJson(line) + channel = jsonLine["Channel"].getStr() + eventId = jsonLine["EventID"].getInt() + let details = jsonLine["Details"] + # Found a Sysmon 22 DNS Query event + if channel == "Sysmon" and eventId == 22: + domain = details.extractStr("Query") + + # If includeWorkstations is false, only add domain if it contains a period + # Filter out ".", "*.lan" and "*.LAN" + if includeWorkstations or (domain.contains('.') and domain != "." and not domain.endsWith(".lan") and not + domain.endsWith(".LAN") and not domain.endsWith(".local") and not isIpAddress(domain) and not domain.endsWith('.')): + + # Do not include subdomains by default so strip the subdomains + if not includeSubdomains: + domain = extractDomain(domain) + domainHashSet.incl(domain) + + bar.finish() + + # Save results + var outputFile = open(output, fmWrite) + for domain in domainHashSet: + outputFile.write(domain & "\p") + let outputFileSize = getFileSize(outputFile) + outputFile.close() + + echo "" + echo "Domains: ", len(domainHashSet) + echo "Saved file: " & output & " (" & formatFileSize(outputFileSize) & ")" + echo "" + + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/listHashes.nim b/src/takajopkg/listHashes.nim new file mode 100644 index 00000000..db58d5db --- /dev/null +++ b/src/takajopkg/listHashes.nim @@ -0,0 +1,127 @@ +# TODO +# Get hashes from sysmon ID 7 (Image Loaded) +proc listHashes(level: string = "high", output: string, quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + if level != "critical" and level != "high" and level != "medium" and level != "low" and level != "informational": + echo "You must specify a minimum level of critical, high, medium, low or informational. (default: high)" + echo "" + return + + echo "Started the List Hashes command" + echo "" + echo "This command will extract out unique MD5, SHA1, SHA256 and Import hashes from Sysmon 1 process creation events." + echo "By default, a minimum level of high will be used to extract only hashes of processes with a high likelihood of being malicious." + echo "You can change the minimum level of alerts with -l, --level (ex: -l low)." + echo "For example, -l=informational for a minimum level of informational, which will extract out all hashes." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + + if level == "critical": + echo "Scanning for process hashes with an alert level of critical" + else: + echo "Scanning for process hashes with a minimal alert level of " & level + echo "" + + var + md5hashes, sha1hashes, sha256hashes, impHashes = initHashSet[string]() + channel, eventLevel, hashes = "" + eventId, md5hashCount, sha1hashCount, sha256hashCount, impHashCount = 0 + jsonLine: JsonNode + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + for line in lines(timeline): + inc bar + bar.update(1000000000) # refresh every second + jsonLine = parseJson(line) + channel = jsonLine["Channel"].getStr() + eventId = jsonLine["EventID"].getInt() + eventLevel = jsonLine["Level"].getStr() + + # Found a Sysmon 1 process creation event + if channel == "Sysmon" and eventId == 1 and isMinLevel(eventLevel, level) == true: + try: + hashes = jsonLine["Details"]["Hashes"].getStr() # Hashes are not enabled by default so this field may not exist. + let pairs = hashes.split(",") # Split the string into key-value pairs. Ex: MD5=DE9C75F34F47B60A71BBA03760F0579E,SHA256=12F06D3B1601004DB3F7F1A07E7D3AF4CC838E890E0FF50C51E4A0C9366719ED,IMPHASH=336674CB3C8337BDE2C22255345BFF43 + for pair in pairs: + let keyVal = pair.split("=") + case keyVal[0]: + of "MD5": + md5hashes.incl(keyVal[1]) + inc md5hashCount + of "SHA1": + sha1hashes.incl(keyVal[1]) + inc sha1hashCount + of "SHA256": + sha256hashes.incl(keyVal[1]) + inc sha256hashCount + of "IMPHASH": + impHashes.incl(keyVal[1]) + inc impHashCount + except KeyError: + discard + bar.finish() + + # Save MD5 results + let md5outputFilename = output & "-MD5-hashes.txt" + var md5outputFile = open(md5outputFilename, fmWrite) + for hash in md5hashes: + md5outputFile.write(hash & "\p") + md5outputFile.close() + let md5FileSize = getFileSize(md5outputFilename) + + # Save SHA1 results + let sha1outputFilename = output & "-SHA1-hashes.txt" + var sha1outputFile = open(sha1outputFilename, fmWrite) + for hash in sha1hashes: + sha1outputFile.write(hash & "\p") + sha1outputFile.close() + let sha1FileSize = getFileSize(sha1outputFilename) + + # Save SHA256 results + let sha256outputFilename = output & "-SHA256-hashes.txt" + var sha256outputFile = open(sha256outputFilename, fmWrite) + for hash in sha256hashes: + sha256outputFile.write(hash & "\p") + sha256outputFile.close() + let sha256FileSize = getFileSize(sha256outputFilename) + + # Save IMPHASH results + let impHashOutputFilename = output & "-ImportHashes.txt" + var impHashOutputFile = open(impHashOutputFilename, fmWrite) + for hash in impHashes: + impHashOutputFile.write(hash & "\p") + impHashOutputFile.close() + let impHashFileSize = getFileSize(impHashOutputFilename) + + echo "" + echo "Saved files:" + echo md5outputFilename & " (" & formatFileSize(md5FileSize) & ")" + echo sha1outputFilename & " (" & formatFileSize(sha1FileSize) & ")" + echo sha256outputFilename & " (" & formatFileSize(sha256FileSize) & ")" + echo impHashOutputFilename & " (" & formatFileSize(impHashFileSize) & ")" + echo "" + echo "Hashes:" + echo "MD5: " & $md5hashCount + echo "SHA1: ", $sha1hashCount + echo "SHA256: ", $sha256hashCount + echo "Import: ", $impHashCount + echo "" + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/listIpAddresses.nim b/src/takajopkg/listIpAddresses.nim new file mode 100644 index 00000000..27591f82 --- /dev/null +++ b/src/takajopkg/listIpAddresses.nim @@ -0,0 +1,79 @@ +proc listIpAddresses(inbound: bool = true, outbound: bool = true, output: string, privateIp: bool = false, quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + # Error if both inbound and outbound are set to false as there is nothing to search for. + if inbound == false and outbound == false: + echo "You must enable inbound and/or outbound searching." + echo "" + return + + echo "Started the List IP Addresses command" + echo "" + echo "Inbound traffic is included by default but can be disabled with -i=false." + echo "Outbound traffic is included by default but can be disabled with -O=false." + echo "Private IP addresses are not included by default but can be enabled with -p." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + + echo "Extracting IP addresses from various logs. Please wait." + echo "" + + var + channel, ipAddress = "" + eventId = 0 + ipHashSet = initHashSet[string]() + jsonLine: JsonNode + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + for line in lines(timeline): + inc bar + bar.update(1000000000) + jsonLine = parseJson(line) + channel = jsonLine["Channel"].getStr() + eventId = jsonLine["EventID"].getInt() + + # Search for events with a SrcIP field if inbound == true + if inbound == true: + ipAddress = getJsonValue(jsonLine, @["Details", "SrcIP"]) + if (not isPrivateIP(ipAddress) or privateIp) and + isMulticast(ipAddress) == false and isLoopback(ipAddress) == false and ipAddress != "Unknown" and ipAddress != "-": + ipHashSet.incl(ipAddress) + + # Search for events with a TgtIP field if outbound == true + if outbound == true: + ipAddress = getJsonValue(jsonLine, @["Details", "TgtIP"]) + if (not isPrivateIP(ipAddress) or privateIp) and + isMulticast(ipAddress) == false and isLoopback(ipAddress) == false and ipAddress != "Unknown" and ipAddress != "-": + ipHashSet.incl(ipAddress) + bar.finish() + + # Save results + var outputFile = open(output, fmWrite) + for ipAddress in ipHashSet: + outputFile.write(ipAddress & "\p") + let outputFileSize = getFileSize(outputFile) + outputFile.close() + + echo "" + echo "IP Addresss: ", len(ipHashSet) + echo "Saved file: " & output & " (" & formatFileSize(outputFileSize) & ")" + echo "" + + # Print elapsed time + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/listUndetectedEvtxFiles.nim b/src/takajopkg/listUndetectedEvtxFiles.nim new file mode 100644 index 00000000..586c5eb8 --- /dev/null +++ b/src/takajopkg/listUndetectedEvtxFiles.nim @@ -0,0 +1,42 @@ +proc listUndetectedEvtxFiles(columnName: system.string = "EvtxFile", evtxDir: string, output: string = "", quiet: bool = false, timeline: string) = + + if not quiet: + styledEcho(fgGreen, outputLogo()) + + let csvData: TableRef[string, seq[string]] = getHayabusaCsvData(timeline, columnName) + var fileLists: seq[string] = getTargetExtFileLists(evtxDir, ".evtx") + + var detectedPaths: seq[string] = csvData[columnName].map(getFileNameWithExt) + detectedPaths = deduplicate(detectedPaths) + + let checkResult = getUnlistedSeq(fileLists, detectedPaths) + var outputStock: seq[string] = @[] + + echo "Finished. " + echo "---------------" + + if checkResult.len == 0: + echo "Great! No undetected evtx files were found." + echo "" + else: + echo "Undetected evtx file identified." + echo "" + var numberOfEvtxFiles = 0 + for undetectedFile in checkResult: + outputStock.add(undetectedFile) + inc numberOfEvtxFiles + outputStock.add("") + let undetectedPercentage = (checkResult.len() / fileLists.len()) * 100 + echo fmt"{ undetectedPercentage :.4}% of the evtx files did not have any detections." + echo fmt"Number of evtx files not detected: {numberOfEvtxFiles}" + echo "" + if output != "": + let f = open(output, fmWrite) + defer: f.close() + for line in outputStock: + f.writeLine(line) + echo fmt"Saved File {output}" + echo "" + else: + echo outputstock.join("\n") + discard \ No newline at end of file diff --git a/src/takajopkg/listUnusedRules.nim b/src/takajopkg/listUnusedRules.nim new file mode 100644 index 00000000..cf3327d3 --- /dev/null +++ b/src/takajopkg/listUnusedRules.nim @@ -0,0 +1,39 @@ +proc listUnusedRules(columnName: string = "RuleFile", output: string = "", quiet: bool = false, rulesDir: string, timeline: string) = + if not quiet: + styledEcho(fgGreen, outputLogo()) + + let csvData: TableRef[string, seq[string]] = getHayabusaCsvData(timeline, columnName) + var fileLists: seq[string] = getTargetExtFileLists(rulesDir, ".yml") + var detectedPaths: seq[string] = csvData[columnName].map(getFileNameWithExt) + detectedPaths = deduplicate(detectedPaths) + var outputStock: seq[string] = @[] + + echo "Finished. " + echo "---------------" + + let checkResult = getUnlistedSeq(fileLists, detectedPaths) + if checkResult.len == 0: + echo "Great! No unused rule files were found." + echo "" + else: + echo "Unused rule file identified." + echo "" + var numberOfUnusedRules = 0 + for undetectedFile in checkResult: + outputStock.add(undetectedFile) + inc numberOfUnusedRules + let undetectedPercentage = (checkResult.len() / fileLists.len()) * 100 + outputStock.add("") + echo fmt"{ undetectedPercentage :.4}% of the yml rules were not used." + echo fmt"Number of unused rule files: {numberOfUnusedRules}" + echo "" + if output != "": + let f = open(output, fmWrite) + defer: f.close() + for line in outputStock: + f.writeLine(line) + echo fmt"Saved File {output}" + echo "" + else: + echo outputstock.join("\n") + discard \ No newline at end of file diff --git a/src/takajopkg/splitCsvTimeline.nim b/src/takajopkg/splitCsvTimeline.nim new file mode 100644 index 00000000..43455d0b --- /dev/null +++ b/src/takajopkg/splitCsvTimeline.nim @@ -0,0 +1,87 @@ +proc splitCsvTimeline(makeMultiline: bool = false, output: string = "output", quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + echo "Started the Split CSV Timeline command" + echo "" + echo "This command will split a large CSV timeline into many multiple ones based on computer name." + echo "If you want to separate the field data by newlines, add the -m option." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + echo "Splitting the Hayabusa CSV timeline. Please wait." + + if not dirExists(output): + echo "" + echo "The directory '" & output & "' does not exist so will be created." + createDir(output) + echo "" + + var + inputFile = open(timeline, FileMode.fmRead) + line = "" + filenameSequence: seq[string] = @[] + filesTable = initTable[string, File]() + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + # Read in the CSV header + let csvHeader = inputFile.readLine() + while inputFile.endOfFile == false: + inc bar + bar.update(1000000000) # refresh every second + + var currentLine = inputFile.readLine() + let splitFields = currentLine.split(',') + var computerName = splitFields[1] + computerName = computerName[1 .. computerName.len - 2] # Remove surrounding double quotes + + # If it is the first time we see this computer name, then record it in a str sequence, create a file, + # write the CSV headers and current row. + if not filesTable.hasKey(computerName): + let filename = output & "/" & computerName & "-HayabusaResults.csv" + filenameSequence.add(filename) + var outputFile = open(filename, fmWrite) + filesTable[computerName] = outputFile + outputFile.write(csvHeader) + outputFile.write("\p") + flushFile(outputFile) # Flush buffer after writing to file + + # Use the file from the table and write the line. + var outputFile = filesTable[computerName] + if makeMultiline == true: + currentLine = currentLine.replace("¦", "\n") + outputFile.write(currentLine) + else: + outputFile.write(currentLine) + outputFile.write("\p") + flushFile(outputFile) + bar.finish() + + # Close all opened files + for file in filesTable.values: + close(file) + + close(inputFile) + + echo "" + for fn in filenameSequence: + let fileSize = getFileSize(fn) + echo "Saved file: " & fn & " (" & formatFileSize(fileSize) & ")" + + echo "" + + let endTime = epochTime() + let elapsedTime2 = int(endTime - startTime) + let hours = elapsedTime2 div 3600 + let minutes = (elapsedTime2 mod 3600) div 60 + let seconds = elapsedTime2 mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/splitJsonTimeline.nim b/src/takajopkg/splitJsonTimeline.nim new file mode 100644 index 00000000..324eb941 --- /dev/null +++ b/src/takajopkg/splitJsonTimeline.nim @@ -0,0 +1,75 @@ +proc splitJsonTimeline(output: string = "output", quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + if not os.fileExists(timeline): + echo "The file '" & timeline & "' does not exist. Please specify a valid file path." + quit(1) + + echo "Started the Split JSONL Timeline command" + echo "" + echo "This command will split a large JSONL timeline into many multiple ones based on computer name." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + echo "Splitting the Hayabusa JSONL timeline. Please wait." + + if not dirExists(output): + echo "" + echo "The directory '" & output & "' does not exist so will be created." + createDir(output) + echo "" + + var + inputFile = open(timeline, FileMode.fmRead) + filenameSequence: seq[string] = @[] + filesTable = initTable[string, File]() + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + for line in lines(timeline): + inc bar + bar.update(1000000000) # refresh every second + + let jsonLine = parseJson(line) + let computerName = jsonLine["Computer"].getStr() + + if not filesTable.hasKey(computerName): + let filename = output & "/" & computerName & "-HayabusaResults.jsonl" + filenameSequence.add(filename) + var outputFile = open(filename, fmWrite) + filesTable[computerName] = outputFile + outputFile.write(line) + outputFile.write("\p") + flushFile(outputFile) + else: + var outputFile = filesTable[computerName] + outputFile.write(line) + outputFile.write("\p") + flushFile(outputFile) + bar.finish() + + # Close all opened files + for file in filesTable.values: + close(file) + + echo "" + for fn in filenameSequence: + let fileSize = getFileSize(fn) + echo "Saved file: " & fn & " (" & formatFileSize(fileSize) & ")" + + echo "" + + let endTime = epochTime() + let elapsedTime2 = int(endTime - startTime) + let hours = elapsedTime2 div 3600 + let minutes = (elapsedTime2 mod 3600) div 60 + let seconds = elapsedTime2 mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/stackLogons.nim b/src/takajopkg/stackLogons.nim new file mode 100644 index 00000000..4b2ba65c --- /dev/null +++ b/src/takajopkg/stackLogons.nim @@ -0,0 +1,108 @@ +# TODO +# Output to stdout in tables (Target User, Target Computer, Logon Type, Source Computer) +# Remove local logins + +proc stackLogons(localSrcIpAddresses = false, output: string = "", quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + echo "Started the Stack Logons command" + echo "" + echo "This command will stack logons based on target user, target computer, source IP address and source computer." + echo "Local source IP addresses are not included by default but can be enabled with -l, --localSrcIpAddresses." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + echo "Scanning the Hayabusa timeline. Please wait." + echo "" + + var + EID_4624_count = 0 + tgtUser, tgtComp, logonType, srcIP, srcComp = "" + seqOfStrings: seq[string] + bar: SuruBar = initSuruBar() + uniqueLogons = 0 + outputFileSize: int64 + + bar[0].total = totalLines + bar.setup() + + # Loop through JSON lines + for line in lines(timeline): + inc bar + bar.update(1000000000) # refresh every second + let jsonLine = parseJson(line) + let ruleTitle = jsonLine["RuleTitle"].getStr() + + # EID 4624 Successful Logon + if isEID_4624(ruleTitle) == true: + inc EID_4624_count + + tgtUser = getJsonValue(jsonLine, @["Details", "TgtUser"]) + tgtComp = getJsonValue(jsonLine, @["Computer"]) + logonType = getJsonValue(jsonLine, @["Details", "Type"]) + srcIP = getJsonValue(jsonLine, @["Details", "SrcIP"]) + srcComp = getJsonValue(jsonLine, @["Details", "SrcComp"]) + + if not localSrcIpAddresses and isLocalIP(srcIP): + discard + else: + seqOfStrings.add(tgtUser & "," & tgtComp & "," & logonType & "," & srcIP & "," & srcComp) + bar.finish() + echo "" + + var countsTable: Table[string, int] = initTable[string, int]() + + # Add a count for each time the unique string was found + for string in seqOfStrings: + if not countsTable.hasKey(string): + countsTable[string] = 0 + countsTable[string] += 1 + + # Create a sequence of pairs from the Table + var seqOfPairs: seq[(string, int)] = @[] + for key, val in countsTable: + seqOfPairs.add((key, val)) + + # Sort the sequence in descending order based on the count + seqOfPairs.sort(proc (x, y: (string, int)): int = y[1] - x[1]) + + # Print results to screen + if output == "": + # Print the sorted counts with unique strings + for (string, count) in seqOfPairs: + inc uniqueLogons + var commaDelimitedStr = $count & "," & string + commaDelimitedStr = replace(commaDelimitedStr, ",", " | ") + echo commaDelimitedStr + # Save to CSV file + else: + let outputFile = open(output, fmWrite) + # Write headers + writeLine(outputFile, "Count,TgtUser,TgtComp,LogonType,SrcIP,SrcComp") + + # Write results + for (string, count) in seqOfPairs: + inc uniqueLogons + writeLine(outputFile, $count & "," & string) + outputFileSize = getFileSize(outputFile) + close(outputFile) + + echo "" + echo "Unique logons: " & $uniqueLogons + echo "Saved file: " & output & " (" & formatFileSize(outputFileSize) & ")" + echo "" + + let endTime = epochTime() + let elapsedTime2 = int(endTime - startTime) + let hours = elapsedTime2 div 3600 + let minutes = (elapsedTime2 mod 3600) div 60 + let seconds = elapsedTime2 mod 60 + echo "" + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" diff --git a/src/takajopkg/submodule.nim b/src/takajopkg/submodule.nim deleted file mode 100644 index b7b8b6a0..00000000 --- a/src/takajopkg/submodule.nim +++ /dev/null @@ -1,75 +0,0 @@ -import std/os -import std/parsecsv -import std/sequtils -import std/strformat -import std/strutils -import std/tables -from std/streams import newFileStream - -proc outputLogo*(): string = - let logo = """ -╔════╦═══╦╗╔═╦═══╗ ╔╦═══╗ -║╔╗╔╗║╔═╗║║║╔╣╔═╗║ ║║╔═╗║ -╚╝║║╚╣║ ║║╚╝╝║║ ║║ ║║║ ║║ - ║║ ║╚═╝║╔╗╖║╚═╝╠╗║║║ ║║ - ╔╝╚╗║╔═╗║║║╚╣╔═╗║╚╝║╚═╝║ - ╚══╝╚╝ ╚╩╝╚═╩╝ ╚╩══╩═══╝ - by Yamato Security -""" - return logo - -proc getUnlistedSeq*(targetSeq: seq[string], compareSeq: seq[string]): seq[string] = - ## get element not in compareSeq - var output: seq[string] = @[] - for target in targetSeq: - if not(target in compareSeq): - output.add(target) - return output - -proc getFileNameWithExt*(targetPath: string): string = - ## get file name with ext from path - var (_, file, ext) = splitFile(targetPath) - file &= ext - return file - -proc getTargetExtFileLists*(targetDirPath: string, targetExt: string): seq[string] = - ## extract yml file name seq to specified directory path - var r: seq[string] = @[] - for f in walkDirRec(targetDirPath): - if f.endsWith(targetExt): - r.insert(getFileNameWithExt(f)) - # removed duplicated file name from seq - r = deduplicate(r) - if r.len() == 0: - quit("Target file does not exist in specified directory. Please check your option parameters.") - else: - r - - -proc getHayabusaCsvData*(csvPath: string, columnName: string): Tableref[string, - seq[string]] = - ## procedure for Hayabusa output csv read data. - - var s = newFileStream(csvPath, fmRead) - # if csvPath is not valid, error output and quit. - if s == nil: - quit("Cannot open the file. Please check that the file format is CSV. FilePath: " & csvPath) - - # parse csv - var p: CsvParser - open(p, s, csvPath) - p.readHeaderRow() - - let r = newTable[string, seq[string]]() - # initialize table - for h in items(p.headers): - r[h] = @[] - - # insert csv data to table - while p.readRow(): - for h in items(p.headers): - r[h].add(p.rowEntry(h)) - if not r.contains(columnName): - quit(fmt"Coud not find the {column_name} column. Please run hayabusa with the verbose profile (-P option).") - - return r diff --git a/src/takajopkg/sysmonProcessTree.nim b/src/takajopkg/sysmonProcessTree.nim new file mode 100644 index 00000000..4cacfef7 --- /dev/null +++ b/src/takajopkg/sysmonProcessTree.nim @@ -0,0 +1,287 @@ +type + processObject = object + timeStamp: string + procName: string + processID: string + processGUID: string + parentProcessGUID: string + children: seq[processObject] + +proc printIndentedProcessTree(p: processObject, indent: string = "", + stairNum: int = 0, need_sameStair: seq[bool], parentsStair: bool): seq[string] = + ## プロセスオブジェクトからプロセスツリーを画面上に表示するためのプロシージャ + + var ret: seq[string] = @[] + ret = @[indent & p.procName & " (" & p.timeStamp & " / " & + p.processID & + " / " & p.processGUID & + " / " & p.parentProcessGUID & + ")"] + + var childStairNum = stairNum + 1 + var childPreStairStr = "" + var cnt = 1 + + # プロセスの階層数に応じて、親階層がつながっていることを表現するための` |`を付与する + while childStairNum > cnt: + if need_sameStair[cnt-1] and not parentsStair: + childPreStairStr &= " │" + else: + childPreStairStr &= " " + inc cnt + for childNum, children in enumerate(p.children): + # Display chile process + var childIndentStr = childPreStairStr + if len(p.children) == childNum + 1: + childIndentStr &= " └ " + else: + childIndentStr &= " ├ " + var next_need_sameStair = need_sameStair + next_need_sameStair.add(len(children.children) > 1) + ret = concat(ret, printIndentedProcessTree(children, childIndentStr, + childStairNum, next_need_sameStair, childNum + 1 == len(p.children))) + return ret + +proc containsTargetGUIDInChild(p: processObject, targetGUID: string): bool = + for child in p.children: + if child.processGUID == targetGUID: + return true + return false + + +proc moveProcessObjectToChild(mvSourceProcess: processObject, + searchProcess: var processObject, + outputProcess: var processObject) = + ## Procedure for moving a process object to a child process + + var searchChildrenProcess = searchProcess.children + for idx, childProcess in searchChildrenProcess: + if childProcess.processGUID == mvSourceProcess.parentProcessGUID: + # Added to a separate table because assertion errors occur when the number of elements changes during iteration + outputProcess.children[idx].children.add(mvSourceProcess) + return + else: + var child = childProcess + moveProcessObjectToChild(mvSourceProcess, child, + outputProcess.children[idx]) + +proc sysmonProcessTree(output: string = "", processGuid: string, + quiet: bool = false, timeline: string) = + ## Procedure for displaying Sysmon's process tree + + # Display the logo + if not quiet: + styledEcho(fgGreen, outputLogo()) + + echo "" + echo "Running the Process Tree module" + echo "" + + var stockedProcessObjectTable = newTable[string, processObject]() + + var processesFoundCount = 0 + var foundProcessTable = initTable[string, string]() + var passGuid = initHashSet[string]() + passGuid.incl(processGuid) + var addedProcess = initHashSet[string]() + + var parentsProcessStocks = newSeq[string]() + var parentsProcesStockDisableFlag = false + var parentProcessGUIDTable = newTable[string, string]() + for line in lines(timeline): + var processObjectTable = newTable[string, processObject]() + let + jsonLine = parseJson(line) + timeStamp = jsonLine["Timestamp"].getStr() + channel = jsonLine["Channel"].getStr() + eventId = jsonLine["EventID"].getInt() + eventLevel = jsonLine["Level"].getStr() + ruleTitle = jsonLine["RuleTitle"].getStr() + var eventProcessGUID = "" + # Found a Sysmon 1 process creation event. This assumes info level events are enabled and there won't be more than one Sysmon 1 event for a process. + if channel == "Sysmon" and eventId == 1 and (ruleTitle == "Proc Exec" or + ruleTitle == "Proc Exec (Sysmon Alert)"): + try: + eventProcessGUID = jsonLine["Details"]["PGUID"].getStr() + except KeyError: + echo "Could not find the PGUID field. Make sure you ran Hayabusa with the standard profile." + if eventProcessGUID in passGuid or jsonLine["Details"][ + "ParentPGUID"].getStr() in passGuid: + parentsProcesStockDisableFlag = true + inc processesFoundCount + let keysToExtract = { + #"CmdLine": "Cmdline", + "Proc": "Proc", + #"ParentCmdline": "ParentCmdline", + #"LogonID": "LID", + #"LogonGUID": "LGUID", + #"ParentPID": "ParentPID", + "ParentPGUID": "ParentPGUID", + #"Hashes": "Hashes" + } + + for (foundKey, jsonKey) in keysToExtract: + try: + foundProcessTable[foundKey] = jsonLine["Details"][ + jsonKey].getStr() + except KeyError: + foundProcessTable[foundKey] = "" + # PID is an integer so getStr will fail + try: + let eventProcessID = jsonLine["Details"]["PID"].getInt() + foundProcessTable["PID"] = $eventProcessID + except KeyError: + foundProcessTable["PID"] = "No PID Found" + + let process = processObject( + timeStamp: timeStamp, + procName: foundProcessTable["Proc"], + processID: foundProcessTable["PID"], + processGUID: eventProcessGUID, + parentProcessGUID: foundProcessTable["ParentPGUID"]) + let key = timeStamp & "-" & process.processID + if addedProcess.contains(key): + continue + + if not passGuid.contains(eventProcessGUID): + passGuid.incl(eventProcessGUID) + if not passGuid.contains(process.parentProcessGUID): + passGuid.incl(process.parentProcessGUID) + if not addedProcess.contains(key): + processObjectTable[process.processGUID] = process + addedProcess.incl(key) + # Link child processes to their parents + if len(stockedProcessObjectTable) != 0 and + process.parentProcessGUID in stockedProcessObjectTable: + if process.parentProcessGUID == processGUID: + stockedProcessObjectTable[processGUID].children.add(process) + elif process.processGUID == processGUID: + stockedProcessObjectTable[processGUID] = process + else: + stockedProcessObjectTable[ + process.parentProcessGUID].children.add(process) + else: + stockedProcessObjectTable[process.processGUID] = process + parentProcessGUIDTable[process.parentProcessGUID] = process.processGUID + else: + if not parentsProcesStockDisableFlag: + parentsProcessStocks.add(line) + # search ancestor process + var parents_exist = false + var parents_key = "" + + # echo parentsProcessStocks.len + parentsProcessStocks.reverse() + for line in parentsProcessStocks: + let + jsonLine = parseJson(line) + timeStamp = jsonLine["Timestamp"].getStr() + channel = jsonLine["Channel"].getStr() + eventId = jsonLine["EventID"].getInt() + eventLevel = jsonLine["Level"].getStr() + ruleTitle = jsonLine["RuleTitle"].getStr() + var eventProcessGUID = "" + try: + eventProcessGUID = jsonLine["Details"]["PGUID"].getStr() + except KeyError: + echo "Could not find the PGUID field. Make sure you ran Hayabusa with the standard profile." + if eventProcessGUID in passGuid: + var processObjectTable = newTable[string, processObject]() + let keysToExtract = { + #"CmdLine": "Cmdline", + "Proc": "Proc", + #"ParentCmdline": "ParentCmdline", + #"LogonID": "LID", + #"LogonGUID": "LGUID", + #"ParentPID": "ParentPID", + "ParentPGUID": "ParentPGUID", + #"Hashes": "Hashes" + } + + for (foundKey, jsonKey) in keysToExtract: + try: + foundProcessTable[foundKey] = jsonLine["Details"][ + jsonKey].getStr() + except KeyError: + foundProcessTable[foundKey] = "" + # PID is an integer so getStr will fail + try: + let eventProcessID = jsonLine["Details"]["PID"].getInt() + foundProcessTable["PID"] = $eventProcessID + except KeyError: + foundProcessTable["PID"] = "No PID Found" + + let process = processObject( + timeStamp: timeStamp, + procName: foundProcessTable["Proc"], + processID: foundProcessTable["PID"], + processGUID: eventProcessGUID, + parentProcessGUID: foundProcessTable["ParentPGUID"]) + let key = timeStamp & "-" & process.processID + # if addedProcess.contains(key): + # continue + + if not passGuid.contains(eventProcessGUID): + passGuid.incl(eventProcessGUID) + if not passGuid.contains(process.parentProcessGUID): + passGuid.incl(process.parentProcessGUID) + if not addedProcess.contains(key): + processObjectTable[process.processGUID] = process + addedProcess.incl(key) + stockedProcessObjectTable[process.processGUID] = process + stockedProcessObjectTable[process.processGUID].children.add( + stockedProcessObjectTable[parentProcessGUIDTable[ + process.processGUID]]) + parentProcessGUIDTable[process.parentProcessGUID] = process.processGUID + parents_exist = true + parents_key = process.processGUID + var outputStrSeq: seq[string] = @[] + var outputProcessObjectTable = stockedProcessObjectTable + + # Sort process tree + for process in stockedProcessObjectTable.keys: + if ((not parents_exist) and process == processGuid) or (( + parents_exist) and process == parents_key): + continue + + if parents_exist: + moveProcessObjectToChild(stockedProcessObjectTable[process], + stockedProcessObjectTable[parents_key], + outputProcessObjectTable[parents_key]) + else: + moveProcessObjectToChild(stockedProcessObjectTable[process], + stockedProcessObjectTable[processGuid], + outputProcessObjectTable[processGuid]) + + + # Display process tree for the specified process root + let root_multi_child = outputProcessObjectTable[parents_key].children.len() > 1 + if parents_key != "": + outputStrSeq = concat(outputStrSeq, printIndentedProcessTree( + outputProcessObjectTable[parents_key], need_sameStair = @[ + root_multi_child], parentsStair = false + )) + elif outputProcessObjectTable.hasKey(processGuid): + outputStrSeq = concat(outputStrSeq, printIndentedProcessTree( + outputProcessObjectTable[processGuid], need_sameStair = @[ + root_multi_child], parentsStair = false)) + + if output != "": + let f = open(output, fmWrite) + defer: f.close() + for line in outputStrSeq: + f.writeLine(line) + echo fmt"Saved File {output}" + echo "" + else: + for line in outputStrSeq: + if line.contains(fmt" / {processGuid} / "): + styledEcho(fgGreen, line) + else: + echo line + discard + + if processesFoundCount == 0: + echo "The process was not found." + echo "" + return diff --git a/src/takajopkg/timelineLogon.nim b/src/takajopkg/timelineLogon.nim new file mode 100644 index 00000000..42d53447 --- /dev/null +++ b/src/takajopkg/timelineLogon.nim @@ -0,0 +1,268 @@ +proc timelineLogon(calculateElapsedTime: bool = true, output: string, outputLogoffEvents: bool = false, outputAdminLogonEvents: bool = false, quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + echo "Started the Timeline Logon command" + echo "" + echo "This command creates a CSV timeline of logon events." + echo "Elapsed time for successful logons are calculated by default but can be disabled with -c=false." + echo "Logoff events can be outputted on separate lines with -l, --outputLogoffEvents." + echo "Admin logon events can be outputted on separate lines with -a, --outputAdminLogonEvents." + echo "" + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + + echo "Creating a logon timeline. Please wait." + echo "" + + var + seqOfResultsTables: seq[TableRef[string, string]] # Sequences are immutable so need to create a sequence of pointers to tables so we can update ["ElapsedTime"] + seqOfLogoffEventTables: seq[Table[string, string]] # This sequence can be immutable + logoffEvents: Table[string, string] = initTable[string, string]() + adminLogonEvents: Table[string, string] = initTable[string, string]() + EID_4624_count = 0 # Successful logon + EID_4625_count = 0 # Failed logon + EID_4634_count = 0 # Logoff + EID_4647_count = 0 # User initiated logoff + EID_4648_count = 0 # Explicit logon + EID_4672_count = 0 # Admin logon + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + for line in lines(timeline): + inc bar + bar.update(1000000000) + let jsonLine = parseJson(line) + let ruleTitle = jsonLine["RuleTitle"].getStr() + + #EID 4624 Successful Logon + if isEID_4624(ruleTitle) == true: + inc EID_4624_count + var singleResultTable = newTable[string, string]() + singleResultTable["Event"] = "Successful Logon" + singleResultTable["Timestamp"] = jsonLine["Timestamp"].getStr() + singleResultTable["Channel"] = jsonLine["Channel"].getStr() + singleResultTable["EventID"] = "4624" + let details = jsonLine["Details"] + let extraFieldInfo = jsonLine["ExtraFieldInfo"] + let logonType = details.extractStr("Type") # Will be Int if field mapping is turned off + singleResultTable["Type"] = logonType # Needs to be logonNumberToString(logonType) if data field mapping is turned off. TODO + singleResultTable["Auth"] = extraFieldInfo.extractStr("AuthenticationPackageName") + singleResultTable["TargetComputer"] = jsonLine.extractStr("Computer") + singleResultTable["TargetUser"] = details.extractStr("TgtUser") + let impersonationLevel = extraFieldInfo.extractStr("ImpersonationLevel") + singleResultTable["Impersonation"] = impersonationLevelIdToName(impersonationLevel) + singleResultTable["SourceIP"] = details.extractStr("SrcComp") + singleResultTable["Process"] = details.extractStr("LogonProcessName") + singleResultTable["LID"] = details.extractStr("LID") + singleResultTable["LGUID"] = extraFieldInfo.extractStr("LogonGuid") + singleResultTable["SourceComputer"] = details.extractStr("SrcIP") + let elevatedToken = extraFieldInfo.extractStr("ElevatedToken") + singleResultTable["ElevatedToken"] = elevatedTokenIdToName(elevatedToken) + singleResultTable["TargetUserSID"] = extraFieldInfo.extractStr("TargetUserSid") + singleResultTable["TargetDomainName"] = extraFieldInfo.extractStr("TargetDomainName") + singleResultTable["TargetLinkedLID"] = extraFieldInfo.extractStr("TargetLinkedLogonId") + singleResultTable["LogoffTime"] = "" + singleResultTable["ElapsedTime"] = "" + singleResultTable["AdminLogon"] = "" + + seqOfResultsTables.add(singleResultTable) + + #EID 4625 Failed Logon + if isEID_4625(ruleTitle) == true: + inc EID_4625_count + var singleResultTable = newTable[string, string]() + singleResultTable["Event"] = "Failed Logon" + singleResultTable["Timestamp"] = jsonLine["Timestamp"].getStr() + singleResultTable["Channel"] = jsonLine["Channel"].getStr() + singleResultTable["EventID"] = "4625" + let details = jsonLine["Details"] + let extraFieldInfo = jsonLine["ExtraFieldInfo"] + let logonType = details.extractStr("Type") + singleResultTable["Type"] = logonType # Needs to be logonNumberToString(logonType) if data field mapping is turned off. TODO + singleResultTable["Auth"] = details.extractStr("AuthPkg") + singleResultTable["TargetComputer"] = jsonLine.extractStr("Computer") + singleResultTable["TargetUser"] = details.extractStr("TgtUser") + singleResultTable["SourceIP"] = details.extractStr("SrcIP") + singleResultTable["Process"] = details.extractStr("Proc") + singleResultTable["SourceComputer"] = details.extractStr("SrcComp") + singleResultTable["TargetUserSID"] = extraFieldInfo.extractStr("TargetUserSid") # Don't output as it is always S-1-0-0 + singleResultTable["TargetDomainName"] = extraFieldInfo.extractStr("TargetDomainName") + singleResultTable["FailureReason"] = logonFailureReason(extraFieldInfo.extractStr("SubStatus")) + + seqOfResultsTables.add(singleResultTable) + + #EID 4634 Logoff + if ruleTitle == "Logoff": + inc EID_4634_count + # If we want to calculate ElapsedTime + if calculateElapsedTime == true: + # Create the key in the format of LID:Computer:User with a value of the timestamp + let key = jsonLine["Details"]["LID"].getStr() & ":" & jsonLine["Computer"].getStr() & ":" & jsonLine["Details"]["User"].getStr() + let logoffTime = jsonLine["Timestamp"].getStr() + logoffEvents[key] = logoffTime + if outputLogoffEvents == true: + var singleResultTable = newTable[string, string]() + singleResultTable["Event"] = "User Initiated Logoff" + singleResultTable["Timestamp"] = jsonLine["Timestamp"].getStr() + singleResultTable["Channel"] = jsonLine["Channel"].getStr() + singleResultTable["TargetComputer"] = jsonLine.extractStr("Computer") + singleResultTable["EventID"] = "4634" + let details = jsonLine["Details"] + singleResultTable["TargetUser"] = details.extractStr("User") + singleResultTable["LID"] = details.extractStr("LID") + seqOfResultsTables.add(singleResultTable) + + #EID 4647 User Initiated Logoff + if ruleTitle == "Logoff (User Initiated)": + inc EID_4647_count + # If we want to calculate ElapsedTime + if calculateElapsedTime == true: + # Create the key in the format of LID:Computer:User with a value of the timestamp + let key = jsonLine["Details"]["LID"].getStr() & ":" & jsonLine["Computer"].getStr() & ":" & jsonLine["Details"]["User"].getStr() + let logoffTime = jsonLine["Timestamp"].getStr() + logoffEvents[key] = logoffTime + if outputLogoffEvents == true: + var singleResultTable = newTable[string, string]() + singleResultTable["Event"] = "User Initiated Logoff" + singleResultTable["Timestamp"] = jsonLine["Timestamp"].getStr() + singleResultTable["Channel"] = jsonLine["Channel"].getStr() + singleResultTable["TargetComputer"] = jsonLine.extractStr("Computer") + singleResultTable["EventID"] = "4647" + let details = jsonLine["Details"] + singleResultTable["TargetUser"] = details.extractStr("User") + singleResultTable["LID"] = details.extractStr("LID") + seqOfResultsTables.add(singleResultTable) + + #EID 4648 Explicit Logon + if ruleTitle == "Explicit Logon" or ruleTitle == "Explicit Logon (Suspicious Process)": + inc EID_4648_count + var singleResultTable = newTable[string, string]() + singleResultTable["Event"] = "Explicit Logon" + singleResultTable["Timestamp"] = jsonLine["Timestamp"].getStr() + singleResultTable["Channel"] = jsonLine["Channel"].getStr() + singleResultTable["EventID"] = "4648" + let details = jsonLine["Details"] + let extraFieldInfo = jsonLine["ExtraFieldInfo"] + singleResultTable["TargetComputer"] = details.extractStr("TgtSvr") + singleResultTable["TargetUser"] = details.extractStr("TgtUser") + singleResultTable["SourceUser"] = details.extractStr("SrcUser") + singleResultTable["SourceIP"] = details.extractStr("SrcIP") + singleResultTable["Process"] = details.extractStr("Proc") + singleResultTable["LID"] = details.extractStr("LID") + singleResultTable["LGUID"] = extraFieldInfo.extractStr("LogonGuid") + singleResultTable["SourceComputer"] = jsonLine.extractStr("Computer") # 4648 is a little different in that the log is saved on the source computer so Computer will be the source. + seqOfResultsTables.add(singleResultTable) + + # EID 4672 Admin Logon + # When someone logs in with administrator level privileges, there will be two logon sessions created. One for a lower privileged user and one with admin privileges. + # I am just going to add "Yes" to the "AdminLogon" column in the 4624 (Successful logon) event to save space. + # The timing will be very close to the 4624 log so I am checking if the Computer, LID and TgtUser are the same and then if the two events happened within 10 seconds. + if ruleTitle == "Admin Logon": + inc EID_4672_count + let key = jsonLine["Details"]["LID"].getStr() & ":" & jsonLine["Computer"].getStr() & ":" & jsonLine["Details"]["TgtUser"].getStr() + let adminLogonTime = jsonLine["Timestamp"].getStr() + adminLogonEvents[key] = adminLogonTime + if outputAdminLogonEvents == true: + var singleResultTable = newTable[string, string]() + singleResultTable["Event"] = "Admin Logon" + singleResultTable["Timestamp"] = jsonLine["Timestamp"].getStr() + singleResultTable["Channel"] = jsonLine["Channel"].getStr() + singleResultTable["EventID"] = "4672" + singleResultTable["TargetComputer"] = jsonLine.extractStr("Computer") + let eventId = jsonLine["EventID"].getInt() + let details = jsonLine["Details"] + singleResultTable["TargetUser"] = details.extractStr("TgtUser") + singleResultTable["LID"] = details.extractStr("LID") + seqOfResultsTables.add(singleResultTable) + + bar.finish() + + echo "" + echo "Calculating logon elapsed time. Please wait." + echo "" + + # Calculating the logon elapsed time (default) + if calculateElapsedTime == true: + for tableOfResults in seqOfResultsTables: + if tableOfResults["EventID"] == "4624": + var logoffTime = "" + var logonTime = tableOfResults["Timestamp"] + + let key = tableOfResults["LID"] & ":" & tableOfResults["TargetComputer"] & ":" & tableOfResults["TargetUser"] + if logoffEvents.hasKey(key): + logoffTime = logoffEvents[key] + tableOfResults[]["LogoffTime"] = logoffTime + logonTime = logonTime[0 ..< logonTime.len - 7] + logoffTime = logoffTime[0 ..< logofftime.len - 7] + let parsedLogoffTime = parse(logoffTime, "yyyy-MM-dd HH:mm:ss'.'fff") + let parsedLogonTime = parse(logonTime, "yyyy-MM-dd HH:mm:ss'.'fff") + let duration = parsedLogoffTime - parsedLogonTime + tableOfResults[]["ElapsedTime"] = formatDuration(duration) + else: + logoffTime = "n/a" + + # Find admin logons + for tableOfResults in seqOfResultsTables: + if tableOfResults["EventID"] == "4624": + var logonTime = tableOfResults["Timestamp"] + logonTime = logonTime[0 ..< logonTime.len - 7] # Remove the timezone + #echo "4624 logon time: " & logonTime + let key = tableOfResults["LID"] & ":" & tableOfResults["TargetComputer"] & ":" & tableOfResults["TargetUser"] + if adminLogonEvents.hasKey(key): + var adminLogonTime = adminLogonEvents[key] + adminLogonTime = adminLogonTime[0 ..< adminLogonTime.len - 7] # Remove the timezone + let parsed_4624_logonTime = parse(logonTime, "yyyy-MM-dd HH:mm:ss'.'fff") + let parsed_4672_logonTime = parse(adminLogonTime, "yyyy-MM-dd HH:mm:ss'.'fff") + let duration = parsed_4624_logonTime - parsed_4672_logonTime + # If the 4624 logon event and 4672 admin logon event are within 10 seconds then flag as an Admin Logon + if duration.inSeconds < 10: + tableOfResults[]["AdminLogon"] = "Yes" + + echo "Found logon events:" + echo "EID 4624 (Successful Logon): ", EID_4624_count + echo "EID 4625 (Failed Logon): ", EID_4625_count + echo "EID 4634 (Logoff): ", EID_4634_count + echo "EID 4647 (User Initiated Logoff): ", EID_4647_count + echo "EID 4648 (Explicit Logon): ", EID_4648_count + echo "EID 4672 (Admin Logon): ", EID_4672_count + echo "" + + # Save results + var outputFile = open(output, fmWrite) + let header = ["Timestamp", "Channel", "EventID", "Event", "LogoffTime", "ElapsedTime", "FailureReason", "TargetComputer", "TargetUser", "AdminLogon", "SourceComputer", "SourceUser", "SourceIP", "Type", "Impersonation", "ElevatedToken", "Auth", "Process", "LID", "LGUID", "TargetUserSID", "TargetDomainName", "TargetLinkedLID"] + + ## Write CSV header + for h in header: + outputFile.write(h & ",") + outputFile.write("\p") + + ## Write contents + for table in seqOfResultsTables: + for key in header: + if table.hasKey(key): + outputFile.write(escapeCsvField(table[key]) & ",") + else: + outputFile.write(",") + outputFile.write("\p") + let fileSize = getFileSize(output) + outputFile.close() + + echo "Saved results to " & output & " (" & formatFileSize(fileSize) & ")" + echo "" + + let endTime = epochTime() + let elapsedTime2 = int(endTime - startTime) + let hours = elapsedTime2 div 3600 + let minutes = (elapsedTime2 mod 3600) div 60 + let seconds = elapsedTime2 mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/timelineSuspiciousProcesses.nim b/src/takajopkg/timelineSuspiciousProcesses.nim new file mode 100644 index 00000000..f316eb51 --- /dev/null +++ b/src/takajopkg/timelineSuspiciousProcesses.nim @@ -0,0 +1,237 @@ +proc timelineSuspiciousProcesses(level: string = "high", output: string = "", quiet: bool = false, timeline: string) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + if level != "critical" and level != "high" and level != "medium" and level != "low" and level != "informational": + echo "You must specify a minimum level of critical, high, medium, low or informational. (default: high)" + echo "" + return + + echo "Started the Timeline Suspicious Processes command" + echo "" + echo "This command will a CSV timeline of suspicious processes." + echo "The default minimum level of alerts is high." + echo "You can change the minimum level with -l, --level=." + echo "" + + echo "Counting total lines. Please wait." + echo "" + let totalLines = countLinesInTimeline(timeline) + echo "Total lines: ", totalLines + echo "" + + if level == "critical": + echo "Scanning for processes with an alert level of critical" + else: + echo "Scanning for processes with a minimal alert level of " & level + echo "" + + var + seqOfResultsTables: seq[Table[string, string]] + suspicousProcessCount_Sec_4688, suspicousProcessCount_Sysmon_1, eventId,pidInt = 0 + channel, cmdLine, company, computer, description, eventLevel, eventType, hashes, hash_MD5, hash_SHA1, hash_SHA256, hash_IMPHASH, + lid, lguid, ruleAuthor,ruleTitle, parentCmdline, + parentGuid, parentPid, pidStr, process, processGuid, product, timestamp, user = "" + jsonLine: JsonNode + bar: SuruBar = initSuruBar() + + bar[0].total = totalLines + bar.setup() + + for line in lines(timeline): + inc bar + bar.update(1000000000) # refresh every second + jsonLine = parseJson(line) + channel = jsonLine["Channel"].getStr() + eventId = jsonLine["EventID"].getInt() + eventLevel = jsonLine["Level"].getStr() + + # Found a Security 4688 process creation event + if channel == "Sec" and eventId == 4688 and isMinLevel(eventLevel, level) == true: + inc suspicousProcessCount_Sec_4688 + try: + cmdLine = jsonLine["Details"]["Cmdline"].getStr() + except KeyError: + cmdLine = "" + eventType = "Security 4688" + timestamp = jsonLine["Timestamp"].getStr() + ruleTitle = jsonLine["RuleTitle"].getStr() + computer = jsonLine["Computer"].getStr() + process = jsonLine["Details"]["Proc"].getStr() + pidStr = jsonLine["Details"]["PID"].getStr() + pidStr = intToStr(fromHex[int](pidStr)) + user = jsonLine["Details"]["User"].getStr() + lid = jsonLine["Details"]["LID"].getStr() + try: + ruleAuthor = jsonLine["RuleAuthor"].getStr() + except KeyError: + ruleAuthor = "" + + if output == "": # Output to screen + echo "Timestamp: " & timestamp + echo "Computer: " & computer + echo "Type: " & eventType + echo "Level: " & eventLevel + echo "Rule: " & ruleTitle + echo "RuleAuthor: " & ruleAuthor + echo "Cmdline: " & cmdLine + echo "Process: " & process + echo "PID: " & pidStr + echo "User: " & user + echo "LID: " & lid + echo "" + else: # Add records to seqOfResultsTables to eventually save to file. + var singleResultTable = initTable[string, string]() + singleResultTable["Timestamp"] = timestamp + singleResultTable["Computer"] = computer + singleResultTable["Type"] = eventType + singleResultTable["Level"] = eventLevel + singleResultTable["Rule"] = ruleTitle + singleResultTable["RuleAuthor"] = ruleAuthor + singleResultTable["Cmdline"] = cmdLine + singleResultTable["Process"] = process + singleResultTable["PID"] = pidStr + singleResultTable["User"] = user + singleResultTable["LID"] = lid + seqOfResultsTables.add(singleResultTable) + + # Found a Sysmon 1 process creation event + if channel == "Sysmon" and eventId == 1 and isMinLevel(eventLevel, level) == true: + inc suspicousProcessCount_Sysmon_1 + cmdLine = jsonLine["Details"]["Cmdline"].getStr() + eventType = "Sysmon 1" + timestamp = jsonLine["Timestamp"].getStr() + ruleTitle = jsonLine["RuleTitle"].getStr() + computer = jsonLine["Computer"].getStr() + process = jsonLine["Details"]["Proc"].getStr() + pidInt = jsonLine["Details"]["PID"].getInt() + user = jsonLine["Details"]["User"].getStr() + lid = jsonLine["Details"]["LID"].getStr() + lguid = jsonLine["Details"]["LGUID"].getStr() + processGuid = jsonLine["Details"]["PGUID"].getStr() + parentCmdline = jsonLine["Details"]["ParentCmdline"].getStr() + parentPid = $jsonLine["Details"]["ParentPID"].getInt() + parentGuid = jsonLine["Details"]["ParentPGUID"].getStr() + description = jsonLine["Details"]["Description"].getStr() + product = jsonLine["Details"]["Product"].getStr() + try: + company = jsonLine["Details"]["Company"].getStr() + except KeyError: + company = "" + try: + ruleAuthor = jsonLine["RuleAuthor"].getStr() + except KeyError: + ruleAuthor = "" + try: + hashes = jsonLine["Details"]["Hashes"].getStr() # Hashes are not enabled by default so this field may not exist. + let pairs = hashes.split(",") # Split the string into key-value pairs. Ex: MD5=DE9C75F34F47B60A71BBA03760F0579E,SHA256=12F06D3B1601004DB3F7F1A07E7D3AF4CC838E890E0FF50C51E4A0C9366719ED,IMPHASH=336674CB3C8337BDE2C22255345BFF43 + for pair in pairs: + let keyVal = pair.split("=") + case keyVal[0]: + of "MD5": + hash_MD5 = keyVal[1] + of "SHA1": + hash_SHA1 = keyVal[1] + of "SHA256": + hash_SHA256 = keyVal[1] + of "IMPHASH": + hash_IMPHASH = keyVal[1] + except KeyError: + hashes = "" + hash_MD5 = "" + hash_SHA1 = "" + hash_SHA256 = "" + hash_IMPHASH = "" + + if output == "": # Output to screen + echo "Timestamp: " & timestamp + echo "Computer: " & computer + echo "Type: " & eventType + echo "Level: " & eventLevel + echo "Rule: " & ruleTitle + echo "RuleAuthor: " & ruleAuthor + echo "Cmdline: " & cmdLine + echo "Process: " & process + echo "PID: " & $pidInt + echo "User: " & user + echo "LID: " & lid + echo "LGUID: " & lguid + echo "ProcessGUID: " & processGuid + echo "ParentCmdline: " & parentCmdline + echo "ParentPID: " & parentPid + echo "ParentGUID: " & parentGuid + echo "Description: " & description + echo "Product: " & product + echo "Company: " & company + echo "MD5 Hash: " & hash_MD5 + echo "SHA1 Hash: " & hash_SHA1 + echo "SHA256 Hash: " & hash_SHA256 + echo "Import Hash: " & hash_IMPHASH + echo "" + else: # Add records to seqOfResultsTables to eventually save to file. + var singleResultTable = initTable[string, string]() + singleResultTable["Timestamp"] = timestamp + singleResultTable["Computer"] = computer + singleResultTable["Type"] = eventType + singleResultTable["Level"] = eventLevel + singleResultTable["Rule"] = ruleTitle + singleResultTable["RuleAuthor"] = ruleAuthor + singleResultTable["Cmdline"] = cmdLine + singleResultTable["Process"] = process + singleResultTable["PID"] = pidStr + singleResultTable["User"] = user + singleResultTable["LID"] = lid + singleResultTable["LGUID"] = lguid + singleResultTable["ProcessGUID"] = processGuid + singleResultTable["ParentCmdline"] = parentCmdline + singleResultTable["ParentPID"] = parentPid + singleResultTable["ParentGUID"] = parentGuid + singleResultTable["Description"] = description + singleResultTable["Product"] = product + singleResultTable["Company"] = company + singleResultTable["MD5 Hash"] = hash_MD5 + singleResultTable["SHA1 Hash"] = hash_SHA1 + singleResultTable["SHA256 Hash"] = hash_SHA256 + singleResultTable["Import Hash"] = hash_IMPHASH + seqOfResultsTables.add(singleResultTable) + bar.finish() + + if output != "" and suspicousProcessCount_Sec_4688 != 0 and suspicousProcessCount_Sysmon_1 != 0: # Save results to CSV + # Open file to save results + var outputFile = open(output, fmWrite) + let header = ["Timestamp", "Computer", "Type", "Level", "Rule", "RuleAuthor", "Cmdline", "Process", "PID", "User", "LID", "LGUID", "ProcessGUID", "ParentCmdline", "ParentPID", "ParentPGUID", "Description", "Product", "Company", "MD5 Hash", "SHA1 Hash", "SHA256 Hash", "Import Hash"] + + ## Write CSV header + outputFile.write(header.join(",") & "\p") + + ## Write contents + for table in seqOfResultsTables: + for key in header: + if table.hasKey(key): + outputFile.write(escapeCsvField(table[key]) & ",") + else: + outputFile.write(",") + outputFile.write("\p") + outputFile.close() + let fileSize = getFileSize(output) + + echo "Saved results to " & output & " (" & formatFileSize(fileSize) & ")" + echo "" + + if suspicousProcessCount_Sec_4688 == 0 and suspicousProcessCount_Sysmon_1 == 0: + echo "No suspicous processes were found. There are either no malicious processes or you need to change the level." + echo "" + return + + echo "Suspicous processes in Security 4688 process creation events: " & $suspicousProcessCount_Sec_4688 + echo "Suspicious processes in Sysmon 1 process creation events: " & $suspicousProcessCount_Sysmon_1 + echo "" + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/vtDomainLookup.nim b/src/takajopkg/vtDomainLookup.nim new file mode 100644 index 00000000..9a609509 --- /dev/null +++ b/src/takajopkg/vtDomainLookup.nim @@ -0,0 +1,159 @@ +# TODO: +# Make asynchronous +# Handle OSError exception when the network gets disconnected while running +# Add categories and SAN info +# Add output not found to txt file +proc vtDomainLookup(apiKey: string, domainList: string, jsonOutput: string = "", output: string, rateLimit: int = 4, quiet: bool = false) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + if not fileExists(domainList): + echo "The file " & domainList & " does not exist." + return + + echo "Started the VirusTotal Domain Lookup command" + echo "" + echo "This command will lookup a list of domains on VirusTotal." + echo "Specify -j results.json to save the original JSON responses." + echo "The default rate is 4 requests per minute so increase this if you have a premium membership." + echo "" + + echo "Loading domains. Please wait." + echo "" + + let file = open(domainList) + # Read each line into a sequence. + var lines = newSeq[string]() + for line in file.lines: + lines.add(line) + file.close() + + echo "Loaded domains: ", len(lines) + echo "Rate limit per minute: ", rateLimit + echo "" + + let + timePerRequest = 60.0 / float(rateLimit) # time taken for one request + estimatedTimeInSeconds = float(len(lines)) * timePerRequest + estimatedHours = int(estimatedTimeInSeconds) div 3600 + estimatedMinutes = (int(estimatedTimeInSeconds) mod 3600) div 60 + estimatedSeconds = int(estimatedTimeInSeconds) mod 60 + echo "Estimated time: ", $estimatedHours & " hours, " & $estimatedMinutes & " minutes, " & $estimatedSeconds & " seconds" + echo "" + + let client = newHttpClient() + client.headers = newHttpHeaders({ "x-apikey": apiKey }) + + var + totalMaliciousDomainCount = 0 + bar: SuruBar = initSuruBar() + seqOfResultsTables: seq[TableRef[string, string]] + jsonResponses: seq[JsonNode] # Declare sequence to store Json responses + + bar[0].total = len(lines) + bar.setup() + + for domain in lines: + inc bar + bar.update(1000000000) # refresh every second + let response = client.request("https://www.virustotal.com/api/v3/domains/" & encodeUrl(domain), httpMethod = HttpGet) + var singleResultTable = newTable[string, string]() + singleResultTable["Domain"] = domain + singleResultTable["Link"] = "https://www.virustotal.com/gui/domain/" & domain + if response.status == $Http200: + singleResultTable["Response"] = "200" + let jsonResponse = parseJson(response.body) + jsonResponses.add(jsonResponse) + + # Parse values that need epoch time to human readable time + singleResultTable["CreationDate"] = getJsonDate(jsonResponse, @["data", "attributes", "creation_date"]) + singleResultTable["LastAnalysisDate"] = getJsonDate(jsonResponse, @["data", "attributes", "last_analysis_date"]) + singleResultTable["LastModifiedDate"] = getJsonDate(jsonResponse, @["data", "attributes", "last_modification_date"]) + singleResultTable["LastWhoisDate"] = getJsonDate(jsonResponse, @["data", "attributes", "whois_date"]) + + # Parse simple data + singleResultTable["MaliciousCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "malicious"]) + singleResultTable["HarmlessCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "harmless"]) + singleResultTable["SuspiciousCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "suspicious"]) + singleResultTable["UndetectedCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "undetected"]) + singleResultTable["CommunityVotesHarmless"] = getJsonValue(jsonResponse, @["data", "attributes", "total_votes", "harmless"]) + singleResultTable["CommunityVotesMalicious"] = getJsonValue(jsonResponse, @["data", "attributes", "total_votes", "malicious"]) + singleResultTable["Reputation"] = getJsonValue(jsonResponse, @["data", "attributes", "reputation"]) + singleResultTable["Registrar"] = getJsonValue(jsonResponse, @["data", "attributes", "registrar"]) + singleResultTable["WhoisInfo"] = getJsonValue(jsonResponse, @["data", "attributes", "whois"]) + singleResultTable["SSL-ValidAfter"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "not_before"]) + singleResultTable["SSL-ValidUntil"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "not_after"]) + singleResultTable["SSL-Issuer"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "issuer", "O"]) + singleResultTable["SSL-IssuerCountry"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "issuer", "C"]) + + # If it was found to be malicious, print to screen an alert + if parseInt(singleResultTable["MaliciousCount"]) > 0: + inc totalMaliciousDomainCount + echo "\pFound malicious domain: " & domain & " (Malicious count: " & singleResultTable["MaliciousCount"] & " )" + + # If we get a 404 not found + elif response.status == $Http404: + echo "\pDomain not found: ", domain + singleResultTable["Response"] = "404" + else: + echo "\pUnknown error: ", response.status, " - " & domain + singleResultTable["Response"] = response.status + + seqOfResultsTables.add(singleResultTable) + + # Sleep to respect the rate limit. + sleep(int(timePerRequest * 1000)) # Convert to milliseconds. + + bar.finish() + + echo "" + echo "Finished querying domains" + echo "Malicious domains found: ", totalMaliciousDomainCount + # Print elapsed time + + # If saving to a file + if output != "": + var outputFile = open(output, fmWrite) + let header = ["Response", "Domain", "CreationDate", "LastAnalysisDate", "LastModifiedDate", "LastWhoisDate", "MaliciousCount", "HarmlessCount", + "SuspiciousCount", "UndetectedCount", "CommunityVotesHarmless", "CommunityVotesMalicious", "Reputation", "Registrar", "WhoisInfo", + "SSL-ValidAfter", "SSL-ValidUntil", "SSL-Issuer", "SSL-IssuerCountry", "Link"] + + ## Write CSV header + for h in header: + outputFile.write(h & ",") + outputFile.write("\p") + + ## Write contents + for table in seqOfResultsTables: + for key in header: + if table.hasKey(key): + outputFile.write(escapeCsvField(table[key]) & ",") + else: + outputFile.write(",") + outputFile.write("\p") + let fileSize = getFileSize(output) + outputFile.close() + + echo "Saved CSV results to " & output & " (" & formatFileSize(fileSize) & ")" + + # After the for loop, check if jsonOutput is not blank and then write the JSON responses to a file + if jsonOutput != "": + var jsonOutputFile = open(jsonOutput, fmWrite) + let jsonArray = newJArray() # create empty JSON array + for jsonResponse in jsonResponses: # iterate over jsonResponse sequence + jsonArray.add(jsonResponse) # add each jsonResponse to jsonArray + jsonOutputFile.write(jsonArray.pretty) + jsonOutputFile.close() + let fileSize = getFileSize(jsonOutput) + echo "Saved JSON responses to " & jsonOutput & " (" & formatFileSize(fileSize) & ")" + + # Print elapsed time + echo "" + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/vtHashLookup.nim b/src/takajopkg/vtHashLookup.nim new file mode 100644 index 00000000..262e94ee --- /dev/null +++ b/src/takajopkg/vtHashLookup.nim @@ -0,0 +1,143 @@ +# Todo: add more info useful for triage, trusted_verdict, signature info, sandbox results etc... +# https://blog.virustotal.com/2021/08/introducing-known-distributors.html +# TODO: make asynchronous +# Add output not found to txt file +proc vtHashLookup(apiKey: string, hashList: string, jsonOutput: string = "", output: string = "", rateLimit: int = 4, quiet: bool = false) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + if not fileExists(hashList): + echo "The file " & hashList & " does not exist." + return + + echo "Started the VirusTotal Hash Lookup command" + echo "" + echo "This command will lookup a list of file hashes on VirusTotal." + echo "Specify -j results.json to save the original JSON responses." + echo "The default rate is 4 requests per minute so increase this if you have a premium membership." + echo "" + + echo "Loading hashes. Please wait." + echo "" + + let file = open(hashList) + + # Read each line into a sequence. + var lines = newSeq[string]() + for line in file.lines: + lines.add(line) + file.close() + + echo "Loaded hashes: ", len(lines) + echo "Rate limit per minute: ", rateLimit + echo "" + + let + timePerRequest = 60.0 / float(rateLimit) # time taken for one request + estimatedTimeInSeconds = float(len(lines)) * timePerRequest + estimatedHours = int(estimatedTimeInSeconds) div 3600 + estimatedMinutes = (int(estimatedTimeInSeconds) mod 3600) div 60 + estimatedSeconds = int(estimatedTimeInSeconds) mod 60 + echo "Estimated time: ", $estimatedHours & " hours, " & $estimatedMinutes & " minutes, " & $estimatedSeconds & " seconds" + echo "" + + let client = newHttpClient() + client.headers = newHttpHeaders({ "x-apikey": apiKey }) + + var + totalMaliciousHashCount = 0 + bar: SuruBar = initSuruBar() + seqOfResultsTables: seq[TableRef[string, string]] + jsonResponses: seq[JsonNode] # Declare sequence to store Json responses + + bar[0].total = len(lines) + bar.setup() + + for hash in lines: + inc bar + bar.update(1000000000) # refresh every second + let response = client.request("https://www.virustotal.com/api/v3/files/" & hash, httpMethod = HttpGet) + var singleResultTable = newTable[string, string]() + singleResultTable["Hash"] = hash + singleResultTable["Link"] = "https://www.virustotal.com/gui/file/" & hash + if response.status == $Http200: + singleResultTable["Response"] = "200" + let jsonResponse = parseJson(response.body) + jsonResponses.add(jsonResponse) + + # Parse values that need epoch time to human readable time + singleResultTable["CreationDate"] = getJsonDate(jsonResponse, @["data", "attributes", "creation_date"]) + singleResultTable["FirstInTheWildDate"] = getJsonDate(jsonResponse, @["data", "attributes", "first_seen_itw_date"]) + singleResultTable["FirstSubmissionDate"] = getJsonDate(jsonResponse, @["data", "attributes", "first_submission_date"]) + singleResultTable["LastSubmissionDate"] = getJsonDate(jsonResponse, @["data", "attributes", "last_submission_date"]) + + # Parse simple data + singleResultTable["MaliciousCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "malicious"]) + singleResultTable["HarmlessCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "harmless"]) + singleResultTable["SuspiciousCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "suspicious"]) + + # If it was found to be malicious + if parseInt(singleResultTable["MaliciousCount"]) > 0: + inc totalMaliciousHashCount + echo "\pFound malicious hash: " & hash & " (Malicious count: " & singleResultTable["MaliciousCount"] & " )" + elif response.status == $Http404: + echo "\pHash not found: ", hash + singleResultTable["Response"] = "404" + else: + echo "\pUnknown error: ", response.status, " - " & hash + singleResultTable["Response"] = response.status + + seqOfResultsTables.add(singleResultTable) + # Sleep to respect the rate limit. + sleep(int(timePerRequest * 1000)) # Convert to milliseconds. + + bar.finish() + echo "" + echo "Finished querying hashes" + echo "Malicious hashes found: ", totalMaliciousHashCount + # Print elapsed time + + # If saving to a file + if output != "": + var outputFile = open(output, fmWrite) + let header = ["Response", "Hash", "FirstInTheWildDate", "FirstSubmissionDate", "LastSubmissionDate", "MaliciousCount", "HarmlessCount", "SuspiciousCount", "Link"] + + ## Write CSV header + for h in header: + outputFile.write(h & ",") + outputFile.write("\p") + + ## Write contents + for table in seqOfResultsTables: + for key in header: + if table.hasKey(key): + outputFile.write(escapeCsvField(table[key]) & ",") + else: + outputFile.write(",") + outputFile.write("\p") + let fileSize = getFileSize(output) + outputFile.close() + + echo "Saved CSV results to " & output & " (" & formatFileSize(fileSize) & ")" + + # After the for loop, check if jsonOutput is not blank and then write the JSON responses to a file + if jsonOutput != "": + var jsonOutputFile = open(jsonOutput, fmWrite) + let jsonArray = newJArray() # create empty JSON array + for jsonResponse in jsonResponses: # iterate over jsonResponse sequence + jsonArray.add(jsonResponse) # add each jsonResponse to jsonArray + jsonOutputFile.write(jsonArray.pretty) + jsonOutputFile.close() + let fileSize = getFileSize(jsonOutput) + echo "Saved JSON responses to " & jsonOutput & " (" & formatFileSize(fileSize) & ")" + + # Print elapsed time + echo "" + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/src/takajopkg/vtIpLookup.nim b/src/takajopkg/vtIpLookup.nim new file mode 100644 index 00000000..81d3bdb7 --- /dev/null +++ b/src/takajopkg/vtIpLookup.nim @@ -0,0 +1,160 @@ +# TODO: add SAN array info +proc vtIpLookup(apiKey: string, ipList: string, jsonOutput: string = "", output: string, rateLimit: int = 4, quiet: bool = false) = + let startTime = epochTime() + if not quiet: + styledEcho(fgGreen, outputLogo()) + + if not fileExists(ipList): + echo "The file " & ipList & " does not exist." + return + + echo "Started the VirusTotal IP Lookup command" + echo "" + echo "This command will lookup a list of IP addresses on VirusTotal." + echo "Specify -j results.json to save the original JSON responses." + echo "The default rate is 4 requests per minute so increase this if you have a premium membership." + echo "" + + echo "Loading IP addresses. Please wait." + echo "" + + let file = open(ipList) + + # Read each line into a sequence. + var lines = newSeq[string]() + for line in file.lines: + lines.add(line) + file.close() + + echo "Loaded IP addresses: ", len(lines) + echo "Rate limit per minute: ", rateLimit + echo "" + + let + timePerRequest = 60.0 / float(rateLimit) # time taken for one request + estimatedTimeInSeconds = float(len(lines)) * timePerRequest + estimatedHours = int(estimatedTimeInSeconds) div 3600 + estimatedMinutes = (int(estimatedTimeInSeconds) mod 3600) div 60 + estimatedSeconds = int(estimatedTimeInSeconds) mod 60 + echo "Estimated time: ", $estimatedHours & " hours, " & $estimatedMinutes & " minutes, " & $estimatedSeconds & " seconds" + echo "" + + let client = newHttpClient() + client.headers = newHttpHeaders({ "x-apikey": apiKey }) + + var + totalMaliciousIpAddressCount = 0 + bar: SuruBar = initSuruBar() + seqOfResultsTables: seq[TableRef[string, string]] + jsonResponses: seq[JsonNode] # Declare sequence to store Json responses + + bar[0].total = len(lines) + bar.setup() + + for ipAddress in lines: + inc bar + bar.update(1000000000) # refresh every second + let response = client.request("https://www.virustotal.com/api/v3/ip_addresses/" & ipAddress, httpMethod = HttpGet) + var singleResultTable = newTable[string, string]() + singleResultTable["IP-Address"] = ipAddress + singleResultTable["Link"] = "https://www.virustotal.com/gui/ip_addresses/" & ipAddress + if response.status == $Http200: + singleResultTable["Response"] = "200" + let jsonResponse = parseJson(response.body) + jsonResponses.add(jsonResponse) + + # Parse values that need epoch time to human readable time + singleResultTable["LastAnalysisDate"] = getJsonDate(jsonResponse, @["data", "attributes", "last_analysis_date"]) + singleResultTable["LastModifiedDate"] = getJsonDate(jsonResponse, @["data", "attributes", "last_modification_date"]) + singleResultTable["LastHTTPSCertDate"] = getJsonDate(jsonResponse, @["data", "attributes", "last_https_certificate_date"]) + singleResultTable["LastWhoisDate"] = getJsonDate(jsonResponse, @["data", "attributes", "whois_date"]) + + # Parse simple data + singleResultTable["MaliciousCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "malicious"]) + singleResultTable["HarmlessCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "harmless"]) + singleResultTable["SuspiciousCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "suspicious"]) + singleResultTable["UndetectedCount"] = getJsonValue(jsonResponse, @["data", "attributes", "last_analysis_stats", "undetected"]) + singleResultTable["CommunityVotesHarmless"] = getJsonValue(jsonResponse, @["data", "attributes", "total_votes", "harmless"]) + singleResultTable["CommunityVotesMalicious"] = getJsonValue(jsonResponse, @["data", "attributes", "total_votes", "malicious"]) + singleResultTable["Reputation"] = getJsonValue(jsonResponse, @["data", "attributes", "reputation"]) + singleResultTable["RegionalInternetRegistry"] = getJsonValue(jsonResponse, @["data", "attributes", "regional_internet_registry"]) + singleResultTable["WhoisInfo"] = getJsonValue(jsonResponse, @["data", "attributes", "whois"]) + singleResultTable["Network"] = getJsonValue(jsonResponse, @["data", "attributes", "network"]) + singleResultTable["Country"] = getJsonValue(jsonResponse, @["data", "attributes", "country"]) + singleResultTable["AS-Owner"] = getJsonValue(jsonResponse, @["data", "attributes", "as_owner"]) + singleResultTable["SSL-ValidAfter"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "validity", "not_before"]) + singleResultTable["SSL-ValidUntil"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "validity", "not_after"]) + singleResultTable["SSL-Issuer"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "issuer", "O"]) + singleResultTable["SSL-IssuerCountry"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "issuer", "C"]) + singleResultTable["SSL-CommonName"] = getJsonValue(jsonResponse, @["data", "attributes", "last_https_certificate", "subject", "CN"]) + + # If it was found to be malicious + if parseInt(singleResultTable["MaliciousCount"]) > 0: + inc totalMaliciousIpAddressCount + echo "\pFound malicious IP address: " & ipAddress & " (Malicious count: " & singleResultTable["MaliciousCount"] & " )" + + # If we get a 404 not found + elif response.status == $Http404: + echo "\pIP address not found: ", ipAddress + singleResultTable["Response"] = "404" + else: + echo "\pUnknown error: ", response.status, " - " & ipAddress + singleResultTable["Response"] = response.status + + seqOfResultsTables.add(singleResultTable) + + # Sleep to respect the rate limit. + sleep(int(timePerRequest * 1000)) # Convert to milliseconds. + + bar.finish() + + echo "" + echo "Finished querying IP addresses" + echo "Malicious IP addresses found: ", totalMaliciousIpAddressCount + # Print elapsed time + + # If saving to a file + if output != "": + var outputFile = open(output, fmWrite) + let header = ["Response", "IP-Address", "SSL-CommonName", "SSL-IssuerCountry", "LastAnalysisDate", "LastModifiedDate", "LastHTTPSCertDate", "LastWhoisDate", "MaliciousCount", "HarmlessCount", + "SuspiciousCount", "UndetectedCount", "CommunityVotesHarmless", "CommunityVotesMalicious", "Reputation", "RegionalInternetRegistry", + "Network", "Country", "AS-Owner", "SSL-ValidAfter", "SSL-ValidUntil", "SSL-Issuer", "WhoisInfo", "Link"] + + ## Write CSV header + for h in header: + outputFile.write(h & ",") + outputFile.write("\p") + + ## Write contents + for table in seqOfResultsTables: + for key in header: + if table.hasKey(key): + outputFile.write(escapeCsvField(table[key]) & ",") + else: + outputFile.write(",") + outputFile.write("\p") + let fileSize = getFileSize(output) + outputFile.close() + + echo "Saved CSV results to " & output & " (" & formatFileSize(fileSize) & ")" + + # After the for loop, check if jsonOutput is not blank and then write the JSON responses to a file + if jsonOutput != "": + var jsonOutputFile = open(jsonOutput, fmWrite) + let jsonArray = newJArray() # create empty JSON array + for jsonResponse in jsonResponses: # iterate over jsonResponse sequence + jsonArray.add(jsonResponse) # add each jsonResponse to jsonArray + jsonOutputFile.write(jsonArray.pretty) + jsonOutputFile.close() + let fileSize = getFileSize(jsonOutput) + echo "Saved JSON responses to " & jsonOutput & " (" & formatFileSize(fileSize) & ")" + + # Print elapsed time + echo "" + let endTime = epochTime() + let elapsedTime = int(endTime - startTime) + let hours = elapsedTime div 3600 + let minutes = (elapsedTime mod 3600) div 60 + let seconds = elapsedTime mod 60 + echo "Elapsed time: ", $hours & " hours, " & $minutes & " minutes, " & $seconds & " seconds" + echo "" \ No newline at end of file diff --git a/takajo.nimble b/takajo.nimble index 3f5e16da..fa4a1a27 100644 --- a/takajo.nimble +++ b/takajo.nimble @@ -1,8 +1,8 @@ # Package -version = "1.0.0" +version = "2.0.0" author = "Yamato Security @SecurityYamato" -description = "Takajo is Hayabusa output analyzer." +description = "Takajo is an analyzer for Hayabusa results." license = "GPL-3.0" srcDir = "src" installExt = @["nim"] @@ -11,5 +11,7 @@ bin = @["takajo"] # Dependencies -requires "nim >= 1.6.6" +requires "nim >= 1.6.12" requires "cligen >= 1.5" +#requires "terminaltables" +requires "suru" \ No newline at end of file diff --git a/tests/testsubmodule.nim b/tests/testsubmodule.nim index 03347d96..4cf71d5e 100644 --- a/tests/testsubmodule.nim +++ b/tests/testsubmodule.nim @@ -8,7 +8,7 @@ import unittest import std/tables -import takajopkg/submodule +import takajopkg/general test "csv file path import": let expect_content = """rule_path test1.yml