From abf749b8f37e88a4aaf87f12a583a388a7fb9a40 Mon Sep 17 00:00:00 2001 From: sonichen <1606673007@qq.com> Date: Wed, 7 Jun 2023 13:26:09 +0800 Subject: [PATCH] submit the project sensleak-rs --- sensleak-rs/CODE-OF-CONDUCT.md | 45 + sensleak-rs/CONTRIBUTING.md | 22 + sensleak-rs/Cargo.toml | 35 + sensleak-rs/LICENSE | 21 + sensleak-rs/LICENSE-MIT | 21 + sensleak-rs/README.md | 342 ++ .../assets/image-20230605104420197.png | Bin 0 -> 84308 bytes sensleak-rs/examples/test_gitleaks.toml | 56 + sensleak-rs/gitleaks.toml | 2805 +++++++++++++++++ sensleak-rs/src/api.rs | 9 + sensleak-rs/src/entity/mod.rs | 1 + sensleak-rs/src/entity/models.rs | 358 +++ sensleak-rs/src/errors.rs | 70 + sensleak-rs/src/lib.rs | 92 + sensleak-rs/src/main.rs | 8 + sensleak-rs/src/routes/rules.rs | 185 ++ sensleak-rs/src/routes/scan.rs | 159 + sensleak-rs/src/service/detect_service.rs | 723 +++++ sensleak-rs/src/service/git_service.rs | 672 ++++ sensleak-rs/src/service/mod.rs | 2 + sensleak-rs/src/utils/detect_utils.rs | 1071 +++++++ sensleak-rs/src/utils/git_util.rs | 661 ++++ sensleak-rs/src/utils/mod.rs | 2 + 23 files changed, 7360 insertions(+) create mode 100644 sensleak-rs/CODE-OF-CONDUCT.md create mode 100644 sensleak-rs/CONTRIBUTING.md create mode 100644 sensleak-rs/Cargo.toml create mode 100644 sensleak-rs/LICENSE create mode 100644 sensleak-rs/LICENSE-MIT create mode 100644 sensleak-rs/README.md create mode 100644 sensleak-rs/assets/image-20230605104420197.png create mode 100644 sensleak-rs/examples/test_gitleaks.toml create mode 100644 sensleak-rs/gitleaks.toml create mode 100644 sensleak-rs/src/api.rs create mode 100644 sensleak-rs/src/entity/mod.rs create mode 100644 sensleak-rs/src/entity/models.rs create mode 100644 sensleak-rs/src/errors.rs create mode 100644 sensleak-rs/src/lib.rs create mode 100644 sensleak-rs/src/main.rs create mode 100644 sensleak-rs/src/routes/rules.rs create mode 100644 sensleak-rs/src/routes/scan.rs create mode 100644 sensleak-rs/src/service/detect_service.rs create mode 100644 sensleak-rs/src/service/git_service.rs create mode 100644 sensleak-rs/src/service/mod.rs create mode 100644 sensleak-rs/src/utils/detect_utils.rs create mode 100644 sensleak-rs/src/utils/git_util.rs create mode 100644 sensleak-rs/src/utils/mod.rs diff --git a/sensleak-rs/CODE-OF-CONDUCT.md b/sensleak-rs/CODE-OF-CONDUCT.md new file mode 100644 index 00000000..6d507b7c --- /dev/null +++ b/sensleak-rs/CODE-OF-CONDUCT.md @@ -0,0 +1,45 @@ +# CODE OF CONDUCT + +This code of conduct outlines the expected behavior of all members of Open Rust Initiative to ensure a safe, productive, and inclusive environment for everyone. + +All members of Open Rust Initiative, including employees, contractors, interns, volunteers, and anyone else represents the company, are expected to behave in a professional, respectful, considerate, and collaborative manner. Harassment, discrimination, or toxic behavior of any kind will not be tolerated. + +Open Rust Initiative is committed to providing an environment free of harassment and discrimination for everyone, regardless of gender, gender identity and expression, sexual orientation, disability, physical appearance, body size, race, age, or religion. We do not tolerate harassment of participants in any form. Harassment includes offensive comments related to these characteristics, as well as deliberate intimidation, stalking, following, harassing photography or recording, sustained disruption of talks or other events, inappropriate physical contact, and unwelcome sexual attention. + +If you experience or witness unacceptable behavior, see something that makes you feel unsafe, or have concerns about the well-being of a participant, please report it to Eli Ma or Charles Feng immediately. All reports will be handled confidentially. + +We value diverse opinions, skills, and experiences. We strive to build an inclusive environment where everyone feels safe and respected. Together, we can achieve great things. + +THANK YOU FOR YOUR COOPERATION IN ADVANCING OUR COMMITMENT TO INCLUSION AND RESPECT. + +Responsibilities + +All members of Open Rust Initiative are expected to: + +- Treat all people with respect and consideration, valuing a diversity of views and opinions. + • Communicate openly and thoughtfully. + • Avoid personal attacks directed at other participants. + • Be mindful of your surroundings and your fellow participants. Alert Eli Ma if you notice a dangerous situation or someone in distress. + • Respect personal space and property. + • Refrain from demeaning, discriminatory, or harassing behavior, speech, and imagery. + • Be considerate in your use of space and resources. For example, avoid excessive noise from conversations, laptops, and other electronic devices. Be courteous when taking up shared space such as tables and walkways. + • Follow the instructions of Open Rust Initiative staff and security. + • Avoid using language that reinforces social and cultural structures of domination related to gender, gender identity and expression, sexual orientation, disability, physical appearance, body size, race, age, religion, or other personal characteristics. + +Consequences + +Failure to comply with this Code of Conduct may result in disciplinary action, including removal from Open Rust Initiative spaces and events and prohibition from future participation. + +Contact Information + +If you have questions or concerns about this Code of Conduct, contact Eli Ma or Charles Feng. + +# Enforcement + +Open Rust Initiative prioritizes creating a safe and positive experience for everyone. We do not tolerate harassment or discrimination of any kind. + +We expect participants to follow these rules at all Open Rust Initiative venues and events. Open Rust Initiative staff will enforce this Code of Conduct. + +If a participant engages in harassing or discriminatory behavior, Open Rust Initiative staff will take reasonable action they deem appropriate, including warning the offender, expulsion from an event, or banning them from future events. + +At their discretion, Open Rust Initiative staff may report offenders to local law enforcement. Open Rust Initiative staff may take action against participants for other behaviors that violate this Code of Conduct or negatively impact the safety and inclusion of event participants. \ No newline at end of file diff --git a/sensleak-rs/CONTRIBUTING.md b/sensleak-rs/CONTRIBUTING.md new file mode 100644 index 00000000..94f24d41 --- /dev/null +++ b/sensleak-rs/CONTRIBUTING.md @@ -0,0 +1,22 @@ +# CONTRIBUTING + +Thank you for your interest in contributing to this project. There are many ways you can contribute, from writing tutorials or blog posts, improving the documentation, submitting bug reports and feature requests, all the way to developing code which can be incorporated into the project. + +As a contributor, you agree to abide by the Code of Conduct enforced in this community. + +## How to contribute + +Here are some guidelines for contributing to this project: + +1. Report issues/bugs: If you find any issues or bugs in the project, please report them by creating an issue on the issue tracker. Describe the issue in detail and also mention the steps to reproduce it. The more details you provide, the easier it will be for me to investigate and fix the issue. +2. Suggest enhancements: If you have an idea to enhance or improve this project, you can suggest it by creating an issue on the issue tracker. Explain your enhancement in detail along with its use cases and benefits. I appreciate well-thought-out enhancement suggestions. +3. Contribute code: If you want to develop and contribute code, follow these steps: +- Fork the repository and clone it locally. +- Create a new branch for your feature/bugfix. +- Make necessary changes and commit them with proper commit messages. +- Push your changes to your fork and create a pull request. +- I will review your changes and merge the PR if found suitable. Please ensure your code is properly formatted and follows the same style as the existing codebase. +1. Write tutorials/blog posts: You can contribute by writing tutorials or blog posts to help users get started with this project. Submit your posts on the issue tracker for review and inclusion. High quality posts that provide value to users are highly appreciated. +2. Improve documentation: If you find any gaps in the documentation or think any part can be improved, you can make changes to files in the documentation folder and submit a PR. Ensure the documentation is up-to-date with the latest changes. + +Your contributions are highly appreciated. Feel free to ask any questions if you have any doubts or facing issues while contributing. The more you contribute, the more you will learn and improve your skills. \ No newline at end of file diff --git a/sensleak-rs/Cargo.toml b/sensleak-rs/Cargo.toml new file mode 100644 index 00000000..431b7cb9 --- /dev/null +++ b/sensleak-rs/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "sensleak" +version = "0.2.1" +edition = "2021" + + [[bin]] +name = "scan" +path = "src/main.rs" + +[[bin]] +name = "api" +path = "src/api.rs" + +[dependencies] +regex = "1.5.4" +clap = { version = "4.2.4", features = ["derive"] } +toml = "0.7.3" +walkdir = "2.3.2" +rayon = "1.5.1" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +chrono = "0.4" +assert_cmd = "2.0.10" +tempfile = "3.2.0" +git2 = "0.17.1" +mockito = "1.0.2" +csv = "1.1" +log = "0.4" +env_logger = "0.10.0" +axum = { version = "0.6.1", features = ["headers", "macros"] } +tokio = { version = "1.21.2", features = ["macros", "rt-multi-thread"] } +tower-http = { version = "0.4.0", features = ["cors"] } +utoipa = { version = "3.3.0", features = ["axum_extras"] } +utoipa-swagger-ui = { version = "3.1.3", features = ["axum"] } +hyper = { version = "0.14", features = ["full"] } \ No newline at end of file diff --git a/sensleak-rs/LICENSE b/sensleak-rs/LICENSE new file mode 100644 index 00000000..33202a32 --- /dev/null +++ b/sensleak-rs/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 open-rust-initiative + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sensleak-rs/LICENSE-MIT b/sensleak-rs/LICENSE-MIT new file mode 100644 index 00000000..edf591c7 --- /dev/null +++ b/sensleak-rs/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 - 2023 Open Rust Initiative + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sensleak-rs/README.md b/sensleak-rs/README.md new file mode 100644 index 00000000..5d822073 --- /dev/null +++ b/sensleak-rs/README.md @@ -0,0 +1,342 @@ +# sensleak - scan git repo secrets + +sensleak is a Rust-based tool that scans Git repositories for sensitive data, specifically targeting sensitive information such as passwords, API keys, certificates, and private keys embedded within code. + +## Background + +Many developers store sensitive information such as keys and certificates in their code, which poses security risks. Therefore, there are commercial services like GitGuardian scanning GitHub and GitLab, as well as open-source components like truffleHog and Gitleaks that support similar functionalities. + +## Feature + +- **Enhanced Security.** Develop the tool in Rust to ensure improved security and memory safety. +- **Command-line Interface**. Create a user-friendly command-line tool that generates a comprehensive test report. +- **REST API with Access Control**. Enable the tool to run as a service and provide access control through a REST API. Utilize Swagger to generate API documentation. +- **Concurrent Scanning**. Utilize a thread pool to control concurrent scanning of secrets, thereby improving overall efficiency. +- **Batch Processing**. Implement batch processing of files to further optimize the scanning process and enhance efficiency. + +## Technology + +- Development Language: Rust +- Command-line Interaction: [clap.rs](https://github.com/clap-rs/clap) +- Git Repository Operations: [git2](https://github.com/rust-lang/git2-rs) +- Web Framework: [axum](https://github.com/tokio-rs/axum) +- Auto-generated OpenAPI Documentation: [utoipa](https://github.com/juhaku/utoipa) + +## Usage + +### CLI Usage + +Running the tool in the command-line interface (CLI) to perform sensitive data checks. + +``` +cargo run --bin scan -- -help +``` + +```shell +Usage: scan.exe [OPTIONS] --repo + +Options: + --repo Target repository + --config Config path [default: gitleaks.toml] + --threads Maximum number of threads sensleak spawns [default: 10] + --chunk The number of git files processed in each batch [default: 10] + --report Path to write json leaks file + --report-format json, csv, sarif [default: json] + -v, --verbose Show verbose output from scan + --pretty Pretty print json if leaks are present + --commit sha of commit to scan or "latest" to scan the last commit of the repository + --commits comma separated list of a commits to scan + --commits-file file of new line separated list of a commits to scan + --commit-since Scan commits more recent than a specific date. Ex: '2006-01-02' or '2023-01-02T15:04:05-0700' format + --commit-until Scan commits older than a specific date. Ex: '2006-01-02' or '2006-10-02T15:04:05-0700' format + --commit-from Commit to start scan from + --commit-to Commit to stop scan + --branch Branch to scan + --uncommitted Run sensleak on uncommitted code + --user Set user to scan [default: ] + --repo-config Load config from target repo. Config file must be ".gitleaks.toml" or "gitleaks.toml" + --debug log debug messages + --disk Clones repo(s) to disk + -h, --help Print help (see more with '--help') + -V, --version Print version + +run 'cargo run --bin api' to get REST API. +Repository: https://github.com/open-rust-initiative/sensleak-rs + +``` + +Example: + +Test https://github.com/sonichen/Expiry-Reminder-Assistant.git + +```shell +$ cargo run --bin scan -- --repo="D:/Workplace/Java/project/ExpiryReminderAssistant" -v --pretty +``` + +```shell +[INFO][2023-06-05 09:59:59] Clone repo ... +[ + Leak { + line: " String secret = \"1708b0314f18f420d3fe8128652af43c\"; //自己小程序的SECRET", + line_number: 67, + offender: "secret = \"1708b0314f18f420d3fe8128652af43c\"", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/src/main/java/com/cyj/controller/login/WXLoginController.java", + date: "2023-05-31 18:09:42 -08:00", + }, + Leak { + line: " businessException.apiResponse = apiResponse;", + line_number: 64, + offender: "apiResponse = apiResponse;", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/src/main/java/com/cyj/exception/BusinessException.java", + date: "2023-05-31 18:09:42 -08:00", + }, + Leak { + line: "// app_secret:bm92ZWk2WFdoR3RkV3ZiUk5SUnVXUT09", + line_number: 5, + offender: "secret:bm92ZWk2WFdoR3RkV3ZiUk5SUnVXUT09", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/src/main/java/com/cyj/utils/constants/DevelopConstants.java", + date: "2023-05-31 18:09:42 -08:00", + }, + Leak { + line: " public static final String APP_SECRET=\"bm92ZWk2WFdoR3RkV3ZiUk5SUnVXUT09\";", + line_number: 7, + offender: "SECRET=\"bm92ZWk2WFdoR3RkV3ZiUk5SUnVXUT09\"", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/src/main/java/com/cyj/utils/constants/DevelopConstants.java", + date: "2023-05-31 18:09:42 -08:00", + }, + Leak { + line: "// public static final String APPSECRET = \"94f391d306875101822ffa1b2c3cff09\";", + line_number: 17, + offender: "SECRET = \"94f391d306875101822ffa1b2c3cff09\"", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/src/main/java/com/cyj/utils/secret/AuthUtil.java", + date: "2023-05-31 18:09:42 -08:00", + }, + Leak { + line: " secret: \"c6e1180dda3eaca49f3d7ed912718e4d\" #小程序密钥", + line_number: 36, + offender: "secret: \"c6e1180dda3eaca49f3d7ed912718e4d\"", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/src/main/resources/application.yaml", + date: "2023-05-31 18:09:42 -08:00", + }, + Leak { + line: " secret: \"c6e1180dda3eaca49f3d7ed912718e4d\" #小程序密钥", + line_number: 36, + offender: "secret: \"c6e1180dda3eaca49f3d7ed912718e4d\"", + commit: "410eb5a84408d3e63edb4d0975e5516e56f6ea6a", + repo: "ExpiryReminderAssistant", + rule: "Generic API Key", + commit_message: "submit code\n", + author: "sonichen", + email: "1606673007@qq.com", + file: "/backend/target/classes/application.yaml", + date: "2023-05-31 18:09:42 -08:00", + }, +] +[WARN][2023-06-05 10:00:02]7 leaks detected. 1 commits scanned in 1.2538834s +``` + +### API Document + +Run the following code to read the project document. + +```shell +cargo run --bin api +``` + +The API document is located at http://localhost:7000/swagger-ui/#/ + +### Project Document + +Run the following code to read the project document. + +```shell +cargo doc --document-private-items --open +``` + +### Configuration + +Use the [gitleaks configuration](https://github.com/gitleaks/gitleaks#configuration) in this project. The difference is that in this project, the paths need to start with a "/". + +```toml +# Title for the gitleaks configuration file. +title = "Gitleaks title" + +# Extend the base (this) configuration. When you extend a configuration +# the base rules take precedence over the extended rules. I.e., if there are +# duplicate rules in both the base configuration and the extended configuration +# the base rules will override the extended rules. +# Another thing to know with extending configurations is you can chain together +# multiple configuration files to a depth of 2. Allowlist arrays are appended +# and can contain duplicates. +# useDefault and path can NOT be used at the same time. Choose one. +[extend] +# useDefault will extend the base configuration with the default gitleaks config: +# https://github.com/zricethezav/gitleaks/blob/master/config/gitleaks.toml +useDefault = true +# or you can supply a path to a configuration. Path is relative to where gitleaks +# was invoked, not the location of the base config. +path = "common_config.toml" + +# An array of tables that contain information that define instructions +# on how to detect secrets +[[rules]] + +# Unique identifier for this rule +id = "awesome-rule-1" + +# Short human readable description of the rule. +description = "awesome rule 1" + +# Golang regular expression used to detect secrets. Note Golang's regex engine +# does not support lookaheads. +regex = '''one-go-style-regex-for-this-rule''' + +# Golang regular expression used to match paths. This can be used as a standalone rule or it can be used +# in conjunction with a valid `regex` entry. +path = '''a-file-path-regex''' + +# Array of strings used for metadata and reporting purposes. +tags = ["tag","another tag"] + +# Int used to extract secret from regex match and used as the group that will have +# its entropy checked if `entropy` is set. +secretGroup = 3 + +# Float representing the minimum shannon entropy a regex group must have to be considered a secret. +entropy = 3.5 + +# Keywords are used for pre-regex check filtering. Rules that contain +# keywords will perform a quick string compare check to make sure the +# keyword(s) are in the content being scanned. Ideally these values should +# either be part of the idenitifer or unique strings specific to the rule's regex +# (introduced in v8.6.0) +keywords = [ + "auth", + "password", + "token", +] + +# You can include an allowlist table for a single rule to reduce false positives or ignore commits +# with known/rotated secrets +[rules.allowlist] +description = "ignore commit A" +commits = [ "commit-A", "commit-B"] +paths = [ + '''\go\.mod''', + '''\go\.sum''' +] +# note: (rule) regexTarget defaults to check the _Secret_ in the finding. +# if regexTarget is not specified then _Secret_ will be used. +# Acceptable values for regexTarget are "match" and "line" +regexTarget = "match" +regexes = [ + '''process''', + '''getenv''', +] +# note: stopwords targets the extracted secret, not the entire regex match +# like 'regexes' does. (stopwords introduced in 8.8.0) +stopwords = [ + '''client''', + '''endpoint''', +] + + +# This is a global allowlist which has a higher order of precedence than rule-specific allowlists. +# If a commit listed in the `commits` field below is encountered then that commit will be skipped and no +# secrets will be detected for said commit. The same logic applies for regexes and paths. +[allowlist] +description = "global allow list" +commits = [ "commit-A", "commit-B", "commit-C"] +paths = [ + '''gitleaks\.toml''', + '''(.*?)(jpg|gif|doc)''' +] + +# note: (global) regexTarget defaults to check the _Secret_ in the finding. +# if regexTarget is not specified then _Secret_ will be used. +# Acceptable values for regexTarget are "match" and "line" +regexTarget = "match" + +regexes = [ + '''219-09-9999''', + '''078-05-1120''', + '''(9[0-9]{2}|666)-\d{2}-\d{4}''', +] +# note: stopwords targets the extracted secret, not the entire regex match +# like 'regexes' does. (stopwords introduced in 8.8.0) +stopwords = [ + '''client''', + '''endpoint''', +] +``` + +## Contributing + +The project relies on community contributions and aims to simplify getting started. To use sensleak, clone the repo, install dependencies, and run sensleak. Pick an issue, make changes, and submit a pull request for community review. + +To contribute to rkos, you should: + +- Familiarize yourself with the [Code of Conduct](https://github.com/open-rust-initiative/rkos/blob/main/CODE-OF-CONDUCT.md). sensleak-rs has a strict policy against abusive, unethical, or illegal behavior. +- Review the [Contributing Guidelines](https://github.com/open-rust-initiative/rkos/blob/main/CONTRIBUTING.md). This document outlines the process for submitting bug reports, feature requests, and pull requests to sensleak-rs. +- Sign the [Developer Certificate of Origin](https://developercertificate.org) (DCO) by adding a `Signed-off-by` line to your commit messages. This certifies that you wrote or have the right to submit the code you are contributing to the project. +- Choose an issue to work on. Issues labeled `good first issue` are suitable for newcomers. You can also look for issues marked `help wanted`. +- Fork the sensleak-rs repository and create a branch for your changes. +- Make your changes and commit them with a clear commit message. +- Push your changes to GitHub and open a pull request. +- Respond to any feedback on your pull request. The sensleak-rs maintainers will review your changes and may request modifications before merging. +- Once your pull request is merged, you will be listed as a contributor in the project repository and documentation. + +To comply with the requirements, contributors must include both a `Signed-off-by` line and a PGP signature in their commit messages. You can find more information about how to generate a PGP key [here](https://docs.github.com/en/github/authenticating-to-github/managing-commit-signature-verification/generating-a-new-gpg-key). + +Git even has a `-s` command line option to append this automatically to your commit message, and `-S` to sign your commit with your PGP key. For example: + +```shell +$ git commit -S -s -m 'This is my commit message' +``` + +## License + +sensleak-rs is licensed under this licensed: + +- MIT LICENSE ( https://opensource.org/licenses/MIT) + +## References + +1. [What is Gitleaks and how to use it?](https://akashchandwani.medium.com/what-is-gitleaks-and-how-to-use-it-a05f2fb5b034) +2. [Gitleaks.tools](https://github.com/gitleaks/gitleaks) diff --git a/sensleak-rs/assets/image-20230605104420197.png b/sensleak-rs/assets/image-20230605104420197.png new file mode 100644 index 0000000000000000000000000000000000000000..56ec41e2cdf8a1315b1532a6f8fed18af052da66 GIT binary patch literal 84308 zcmeFZbx>Se`!z^}7zxl2AOs7IYZKf_LgNcKj;Sk)w=bb@k2v45BD9%3vExDp<|5w)#eg zYV`yPWp(V^p3BhR`Z?(?egK1yzKxv>4bvSBXo|Tkov1NROLtt?c^i&~2K#a64eakL zd0`qF8NY(7;`;R+rPq(vAv=xI0>30Cbc;`C_&mMR%2g@&L82H>qIvZ~^SsOa4%#a1 zC4}0YI)-&}FmA?iW69LWpugy!buvl&MUu)_eb;GRn@@?6UuB?Qxt3K2=}};W3ya{j zCSXG}L!-YVh%XPppC2B#yr{t(8gpjw)Cn2RP6aaE`}@9T?MKoh1#MtQb%9lp2`|mN zJRiFTSc=yg1hFxfjcn@OYc~k;G-w|zIB1ZN%`b2(AHI!N>(qkp(&bHrEYWB9XVIOy zqYoGD>>d{25hVCh9W1`JcC7CC6TSO7+ zy9ob%BVK`Nc^4BRJaP+1F6z_q_LmF1j<#WrRWC_2 zDzd#=7-jeZ^;$I7INF&`UW3e{rAOAKrFVt-U+1FrHJJyys&f~$dRQKwt`LNa%CuOUrcvGLa z(#6!pJgGyhuFGGYYv4oH)<>}tG=2|2%XNrC@masuNcfyZ4l5NgUs|f^c2;}JoTzvw zbpJ3q&wpw<_R6!)m9WfA<%5~ob3(%GoE#~51M2$X{Agnb*-*gUG53*LHjZ1r<@@*V zkE!q6`PUkv1$5VSZH))T)@i(rgL%3|a&;o3d>;2pXJq z6l}qHks2fjP5J!!vv!RG2T4+DYKhaHp3y~SqQ}o1^NhL8X-DMNK)RH+_OzVPRoa-T ziOK5R+`gF0^ zO181F5q?rxU5)viJg)xFj+dH-?vr9*U~qA9VPRpJn{Q}V ze9*3U1G&BUiH#|OE8(hh&m?y)&E#zW@aYxJ1ZD^efsoURoA`Qs6;M~ z`}d;G_V)I{ayRaSyB6hUzcKI`Pq!+ND*0O3HmP8%dS0Kce7X0;?4Y01Vl1hx4wzY` z#dxux`+2E{q=bYo^kiUoSi8(@7}&~FvFWKPL{@n@CnY5%J-y@^+|~7hN-82OGSX_g zI=opgPpiu2`tr;#Ra8{8tqv`T5T{7(4V;)caN4v6ZW@arT@`5ee} zUxz#<+I3H?qNtc4HD6<9r+FmL%H2TfF>mKWq6UWz?21{#$i2)zzr3y~61t&L?{@a* z8-^yC+veCyK0ZES;l|fr?r~VQ?Ciie_uxvZ zs*Xz?;hUSA^Yin-00N(LNJ>eWZ%uYsjf9vOk(Haf34B{sRn?p1xBVZN7Z*z{Cpn=%v$K7_f9JPdme$tRP84*%yt+Db z6NCWcqM@Riu5~^D*4afT;Bq)t@9x^*afMi&sl(LLd{g7d_L`mMPNbjZU= zx4eX7y_CvV7$~(ELgm8N#1qD!u10A%)>U^u#j(!KIEM;KF2nqN&xW$oBcT$y8(cJ6 z+|={G@O7^-@-Y9qB(r!%Ekxs!g-ZqF8>BOBowP3bc!w7sKUvBdl_-?*2p_iE))ATD z0Be;#omhXILm$4PBf4y9EZ@z_9(vzs>092ke6X*5@sI+|$wZm?sOwH0u*8*x1w}>0 z61`?Wa75fA6L4W?VflfWnx0m+f#Lgey#I3-TJ3(}5JW1_5=4edL|uw4*@2B^L_tOM zZ5-k6A1mN`ywiZHH||X+E@o{O7ZYopu5q-jnhFxS=H}(qczJ&TjCOQ*xW2LB?&ij< zRhd+&VPLS>5wctcYMMQtYt?WS@vv?rPcxIaR$W75d!{b7cje|vz$I6lX1B-?K%Z+d7;$7X|&bKicD|}B)O$|Q; zjsZs=tVq<9_&5SWfym0rqNSnn8CO(O<2LGwYHJf;>hni1Yga#oKwiTqCMKjQ2q5;` zQ_kQ5@TYiq=@06j0pBg%)nZf&io}nUKPjR*EgESHbae(8cIQt+U!q|$YX1Cb$-Oj$w^OPZe}K%C@2FL z_Q6zE5B85etP#9>&DY5_GB6(l${H8nn7d+=6g1QP=z zqs7r2+N?8n>&|0bh*dMO)%HP+Z?Iw?OhUp{th}kg3Gk`1;3*Q*}3;psIVAM87OaN>`u?jJZT}a zqTNYkHGBk#=XWlyu099nz?&GS(pikUo*4;uPf5L0B@d#p`nfugj!}}#PoB(5UAJ<+z(Chg zokrry?ls4BNR0(_U;w%UBaKm$^j*DHj*p{shvFm*)VRM65>k2t&vYb=#4Gk)oqv5N z=hHCUj%s}N*jWI|TQ@m2Uhzg#%&5OWv@na^RM=QuU_+-oKmWD4BK3M~In0ESfnl=R zegoEUIlr`Y1Oh~Lb#<{Cqm|W;FD}i8@nR}|el2J)w!E3y`bwgwES!{-G*dQSuZM*D z9&JEfi7g5F};dd%b&%EQcLeItwO=!EIKpJHvGZ6Tv!{JTKHkt{H z8n2=FQV0aM);3$9$As&W_uhIuc{rkzD(}0>hS`cf1rDP2v)la&d7M^6r~HcUrl0W# zbGI6uK#ebxC+8FL>!BtagQi-aCp^WTGhb+#{Z&-~Oh*Nbml*F@{-#NE z`}4i1v{b3;#mM39)o0x>3924n42);*ezS50vl$)jV+LB54v_geoE51tj^vs^f?1fI zLP(@^1vMHX9X}r8Nu>5Wrd+8p;&;N%WiK8*CDSEh`Sr%yFd|;JN!?nxvyrWWK~+yL z#cFPr3_nNV`}|r#llK{)+A6L0M#(0%h>nX}oHJHHemRTI&{TNH$yZ|>^qvz%?TH+% zXDDH>FU<=fhxRTq8qb1N(pZ_9zBlU?sl8u=&v!lW^l9y#Y9ycaVYjwD@>fvO=}DW_ z<_REGUMrKzhu_B_#VW_X+q%M0_pS-LqZ6Qm^zuTK&~sVoW}E7kfE(V}C|yoSnyZ)e zj3i}{^jG7~R!x;_6`0TM$X@rDmo!8Sdh9A{spw`#B0bq+Q5Sy&hTZiN$H~0|J~4`D zAzR1V$*a9&$budP5#|;Dk}w-|e=K?I0Qt}==IF^o_l;H0yW|+&K8(*u_xBc4QSq01 zEczTf7BtW5?X`dP?@;F&$?CGW*%$8gCtg8spKGu`&$3Z$wQ6v6Ro%_{fu1Uuko#xe6;RP)Sf&189pTor$K<9Rvv@cV|M@QNOrZNrU`(ZSz4 z)m<8@W5eYmmFCl9&GS?2R|!2howtU&+903OSM|_nq3#9(7gDZQ@s0-07qY|#=){ZgesQPa-zYrp{S?`q;Qqi z3xUKu(9m`T1%(XR_;cW#SJ&2*RaF%m6?AB13V(HU(7$}S2ZoWAlZ%v02ixY> ze-5}E$n8Pp`f>s9N@x9D(U3O$30eY!gocg5AXWAN1+kkj!WiZR`!zZ9x#Mel4RjuN zZjXL0A59l!5K8x&0e@E_ikOHXF|?5L1zQOvIbGJbWg3o$j_8#aV*UuH zz!{H0bTv$KL#zI5Q%xbBMK<3{`Hf?9n zB09SD@C5c~*H@eZs>!7*_lYm@t8vJ}NsIp??uI051BBZ%ShVTn`r|h*t2c&!7AvDp zQ7<*Lsg*e5yZ-W0j|;-Tw0nvZCoCXF1IG zC6q$&mzPKt7V%HjE9;zJ%UW0%xX2!|o|I1BXXT`Y{i1Hy+F7Zi)PcKIoJ6py2F?&S zj0jyDY_~7Mrj;}jBo!L(UXw!$lJJlRR6H+>CI`RLnbUN0wu`IJr9E?@;T12WjX3?l z_T?_K8!mm@DHHv1|EL@4$xzod?G|QgCQDRf+0yj>zM_0frUT*V=%BAIp9~fgb;>m^{e@hX66rYD zdmBiq4p%kCb+(^Bd$U@v&@vr6S623)Fl=x4iR#J&#CPM)P;hMQ1xTvBzkP%BXUV66 z-Sh@77DNg#CHmNIQeHa~Gcz*~I-}RFudgqxntf3xo+vJ>8Gc7cN04Lg@7svjM(J6@ zIAiPVH&mz(J9>I%N=*lEFadLOJk{BQGq7dok1j!?OvGss1=<<_ReMg| zmlOXs4qBs8QKUD2_Io#tw!>E!Z1VLgk_0DUuOysQpJvI$llrjF#}QMXRO1qzi#!jG zwQ(!9I}+U^u^v=EY_B@6ObUCjscIMGGZK5+yF^n=%;4P-1p9`)O7VDR)-W$uB7)O|iK4`&=mRhBs1;T)xj0B+O1| zdd8~Ro7mVdT$1X*4Gs)TrLI)p-+U@k;QJ_{sx&%V@UN((1Hr#+6ymzRn)pn#pqh>= ze;&cs>VhH4+R&amIPCiRBwlm7fAMt!{{GC@7@|nwg&4Tly^NlFa3$@xZ384X=KRGT}ReFtC&S|RLid_t2=MV2YfKr$GGF{U}#`ThNC6O!(Y=b(;fFjiI@8O z9fHU5Uv)S{um-HchF5Llw`6Kov4iUm?nX-ZgkY6wJ+4h-w@nZR=y&~XX9xR}6dmm? zT!#nYQ%0&`Cb+=~$+_L2UZnR=_=%Jv7ZgkwFDwd19W0L(>37{Lxg1WD2t{P&=gYnf zL4cGTf#^00BH{|X!3cb_Qc~tYb>8K5lhPBic_TnF#BDiI3Vb}Bk4{>tcLZy+^zrq@ z4nSET!QzxSzVNcLGIVrwLPEkEmAw7KLnX$+bZPqVo;aS+sv`j80_ZL$Cuav-6ktBR zf)^G8X%gt@L8vu{gJl_`)U^q1wB3-70N<&ZsSu@nDSbg#ViyR6unL{|DYpD&H1V(l z4dl$y*X%(Z0)f~l*IVJVf=VI~0;r~D36R%hLiPmx6~5|p4WpZ~N%lOQ3v{XpLrarbkJfUs6hMvmA+{c=$p z=le2NVX+9@5pPQDz0Xij^@&wd(c@3CDG~dK0bwX^Y^`kUEaJ%%2^)% zZA<3EGp6qv4Jz=bm2@fOhfwJJA;zP#8DCl^dMd@GpZFM~-cEwdpBJjx580q8ZKdB^ zN%~ak=QhILz@X6joy&AMrDR)|yt~xj6 z1d7F|j<>fU@j<;N%er6wsvZk4EwxD;7KHrnWlu7o>ho5e3qO()n~dlmA@cXNekwez zT5>BPSCRQ0(C4|6K(ar|CzY~HM(=SsTy1|+5!-b?t?5{Ql=(?=WbO+J_v87xov)W1 zu6sKN3#6zwIl=BbL9ta2{Ik|=_Scqj^djK~n>DZ5{}?J*IS30>F`(iLZ94b*60b2O zEuE5j@Ml6}b?)o9fO<9?7iPaKQ8o^TAK6pZpL}(<3fk_JV_!{Cp-?q^?>_lu=tXc_ zgOouAJ7vnn$&PM~L%{O^Mmae-sHB{nF*T1toS=L4O-UJMTd>d?B$d9O9Um7TA04o} z`%$DIoK>VNV;lmOv>>p!EGLe zUI^Afo9D1MEe8h*&I*BFLSYd3gsfpNNocSQ0?{KCBa5A@k}ZemELnocZ)U76t8$v} z(B!kwyh<9lWwi0)1ymA0yJz_?OG@HV2t~C8drHahE-!q(SJlywpVfYPu}Id9LN$f2 zxdxJ%a@)DfiJmXK_vWO~sE_p88LN7K(O-w8$fti(w&9s5-KFZ5Z84v9euW8X|Bz0K z<<}w63+rsFd=@FW5ZC>jT0gfc%;n8X4!zeyjhg#14!s`7OsaQa3;^Za)>SfKV!HQ^ z5r55g>AkZ0dw>5=W%(IzW@uYpDlzoAL|s_e6nI>nza&!4*OK2fh**2h@ANT}S^M6T z=kNd;9!hAmv;)YAq5HeLs{ly^^%#Pdib~|=ub!U3ag%ake}qv_+_+sPa7P6NFv!)4 z5Q>gL(Xo0^B9uCSkS(LW-UCEr4i3xGj^Ux9sXEt4g!dC7?+BQesuv}Xkv6sbJ1=9~ z&gT*$(bmD{k=;am9=Tu8{nJx-3X|4`KEvVcgX783n~0rRN%+%P8Z(#NPqZ%hIlhJL z%d;f!sUg9?>Z@pVk>XT4!4C>(aDzi*-Al~b$NMc4|JDNRo%X=tzR-;460)Lif?dX&V|xMD(i((P@GyFdOLL8FH~Z0i91O)2 z3KiykBB7D#z-GO*SSOz{cBOcQ#HV7r9$By;T7jnNv`FM=zp0~=O~tYMe9*Z1JG{U6 z?mqFLM?rDb{j0rI7agsr2fZC8t0{$roQCH`9k$HZeFO7q;pve?O!M|THp%ludgbyf z88qyLoJ!KA3LbJpPWS#452g5dTSk1dAGDGo)jmH;*zc8H_8afEBhI`}e67CcQ&8$R zfGdl4pG1Of^rS;5KPtYUtRY-PdzIWHW9DGT{%O z-I`7R!zj^wG1Xwo6|#7#AyhEy1c5+$y0|}U|;EkbB3k?={Vb>p0)=n4fuK*?3yZcG^ zt2fNSJgf?@Sps+sw4X^ zcMft^hjT^&eXy{w0BFN5Y3;0EdPSgot@}9l=Y69$q#uAPLBu>M+%X9WI;yIfd3hw* z*stKGrluh4rY8_`*i;h{`IekK2Us73IPKHr6`^b0i{TvA=m1##=^rFgx4#x}5d@5C zyK9+=5#iy4&!3x_a+u+Eb%Yon03y!V&8^l3dbHY~dLN5atKLohrF`-q05toG%gOB& zG+awcO8(4oz3fd32?+s_%Gl6wy6WIm^_+r&LM-4p74`#fOcIyXzGM-9d^%$fcv^Zo zfSz7jj1}(g?&9k(q18U?3Q>~%T@up;ReMUdiAM~ZcP{YtyHxsR&doSsUo21+nKjD` z3HBP5v}m?>waV5c8l}q)!7k$_m;+@yB)C#5??=K|*D5E4d;ZntZdx=skFUJB=Ys%g ziNMRYt6$MkT}U@PIjSi#LW#xFrh<0YMeHDPIF;eLz^e`TpHMj5?QC5|`l_)}ZX3%Y zIVf@Px|GRwYf}QUwph+hub18s3Vk9ISqeK*pgS_%y(@Jx^|(3|zea%Hz%cit6#_YP zC)qee_3_+WSE0Y;4ql@P(F?};^hknd-w#Q)UDlmk4rsE9m&^CG>v>!3J=}t0Ce~F% zImzAC-p*K9X*kQ*N68kSl| z**tuzXlc%mH}vs+X5ISQ++0S0s8&~NNJ;s(F4%93$jHeB5k^Eto&b!Nz870fSs6rg zI#9eZFfc&^zZdv;`g^(-OQrYR zJO+bRMqhet=WGNI8a;e&ad0XX_Vcp}{xNeF zeP&OWl8@-}au1wP((uspQrWxo1xhs5UMM$=9w+3pttWbOne)9`D7&pY3|j4)kzDn2 zNi?^I|14_$lBOM%#*up|?fl5lVC49&x7o3)Z}a?@`&g`XDlIK7013{|&j(CWS$Vn9 zn*yL=@Hpt^0^$O{HnG>i`t1g+;mvhVZ%B+lV@u{~7N^Df$3xyBHsgR2+`^fVl3%5QpGG(S1&^1}Mjv#iB~pBGIRshoZYTJhQa zv^Agbx{G&E$v}kpfPqQ^afyWeICbXBTe9#{0oIJ@bgXap9a?aLDG`&`cSa+#uCvCPsPz-0!eY8}SQYg(c!l#667ljiXT|E_a1p)bqlz8e z`~QtLeRjN?){QPXmxQKPUBsnPvx2QU*Bm@}(`VPCd`||6%uejw7aEs687lo(YQAhP zPPSOxJJ~2u8=CKL98zRbwGp?I>etd6Sy(_1YA`5{-P|^vW~q3;)o91@Cbw;Bz+1+_ zb98P&Ns?nhj=;|9f_@igV9cXnfX{yX_Q|YDCj2;MvPRzkidnuzXx-2>>?)YNclaE_UzV&jF!Icu0s1sKJ-#;RXN> zGc_I0luN9#UE$^9tEs9wJ3HGNFA#lbm0+1XjVISfoq=|bE04-NpMSLcbk z?&y$gZW|g>T9#1G@KYBw8jyv{w^b|E6Oh8il;thPUoVo9!ouI;I|Ei7e^y;+B@HP3D1z`aq zW~Bt&DwcP7W}71H+%F7dE0H~?&bDtD=ik(kwUW%27Tj5;!mjf6?y_jp$Gvg6 zr3LojL6P5+BZWs|@OIOs+gzF{QhWW2+dEl@XeqbJA;Gv}36Fgj9KC%J6?(D=SerPO8)Uq4cZ{42p2F9k~~18iMfw>yb@KP@EO=B@NK7lP+}ntQHaKk>Kcppu-4-D1)HrFafY`pge*Prb3=TiM3{Yzre!atsTl zaXT7p<4D+F(fsoUNr=^uZ z$hyO}#rq^Re<=qBhKpC`=~@WsiZh41Yz!$5z3(X;pM0!85|_LuD!|8ZEp%0_xfbd0 z^FYh>gTePz=#CQG1_{oDyXwY4 zM?{LzXOYRLn6ZcsQjalR(~DIzqoS@}L{S~o{rczAeBjLY^E4^*#5fjV3g zrd0`2%Wt}#S7qUuw1Cm4yHtD!Eixa?FAE8{<#n#16XJ(Ja8bWN5Ri~-@U>IXn3Q{2 zX+foZ1KaE{t_HXkHp0!9{4QyY zH5QG}IYT+Y(B}CVa1<0a#&&@+CPNwDy}r>m@49g{d+Ru2NnphqK(>w(3KHwk!ykq^ ziZrd@>|Lfh_vDgO> zpwQ92F_~)_{ph4&e^z|%U9QvnUH0X)xYVu9O}aRcD)cLy`Mg6S8Cfz>3+X+bQX!Wa zuf;OOOEugmYy?+)}YPHns{ zFFlHYkpymbmwj77u~tX-I7byIW{$9b4X|6+FlI}0QF}$_KRi+B-#?u0IFayRFTAly zb!llyMi^#@_nqodD0v0R*T` z^;KX;Tbb-X-4nV0n=Sk@Z`1pHL7Y+GkI3Q|$WJa{SZPXN?CibA+W)ks;n!97kgz9h zZ<(^@Q$kffOBXDR=0k5vrnahm#nUP~GsSVTxrfaq-ra=#b(yah3YHki(ld2VO@j(e zUDf@IWot_$_O}@MR58Mzy;f0FcUfyG(L}$kLkT)548=xwiS6y*^&-}X$SaIEav6_V zDwvHvm|BLrhMzQg%*$zataE1fbxcbSu2X& zRDr?xXl;;&hbI~VB!w)N3GUHh3L9zCVqJf z#eaApDQ$7e|KZ?DIz`(9|GJ2s?dQ87p&&#@^Pef=hrJ0_Eld29x??{;f57{|rav=& zt;(5S#ONWB{uudBKc4T&a8KcPSlOL@t_5e${6XZ{xcjOxLNM5xw2Wh0R6~I5rq$c` zw*I4vv!9^_;!S!IzPo-+DT(N#S>q?6_$@YAf- z%@vYl#GQ2J7jY3$d!omjo2KT!a~OMSRgZRk*PZJz^Gu9LY)m46N*vPv#bWIYN%>{P z1`!sLMet_iV~uSvPQYWJ90#;rs%SuQVd2M*AKQ9*J~%v%?Z(8yGA@rk+yF|@JNGf| zK7IuFP>o(Z3*PzpInv{Tsl9~!@F9!w(rd*jkL)^Zt#q%R~==+yUlz(muW zjb+?~wJE1h=aRMdFjGRKWswbLWak|3!(sr-lZ!_3N`@?vC^!~J~`_ja9sHV$17|4$9&RqZz@hnJb?vVVYK zv;PPY$lr`d%78<{0Ohq`P}joXjZqOJ*qw^k(sky`i&*{Ut>ZUIf=b>$|9zaddGx62 zG(GIUR1)yLP*?zd>68Zhr)mCg6Gk1A7t#Tx)A< z?dy9GzVko4n`q`*rXz+ItMrd_q4KIVn zzShog(eVya=-n?bpaKP%CF*cc4ixI0y}h2hv!8r{KrlMm*Y{!0`{@#pF~Bs-bL;C9 z5c#?dq{PIv)(fIUM6m$blT24gehYM`!az<1^l!|nGqei2x(xtN02ohoBPkZz#G02h zo2#ZSqsO~i#==y8r)G&J5#oG8NbpBvaq-&_tL&X9&Km5$wT!QDYwE&pLd>KT6z&K3G(tAbrC2I&d$$mxQPW5dshzd?sRl^-bjwX z@RyfvOSmUnla8S9J3CmBi)Q`SZ8SMPPDjQ>M>k$(t_tL}R~IL`eW9R1Yq{+^%SpiV zg?I8%@op0ErUN84qtkkG`` z!$QG2F#!}4v>s$+WPl`U&ZY`Tn$pwLfdrMFfkEG;Hm0MaL!;Jd4+xp`Ky6@VHj%BE z3FN@bxqurdtQJK>n|)}_9T*lG5+~er@<`)c#H8;d`#+6gwakBYXZfcZcg57USH@C= z9=9(-#s+B^17J3Mc@C&Q#hLZAXs>*tv5h({;Ptkp|I=aAXYq+o)Y0zY?j!C}Xho`& zlc9;d-7T1seKezy>{_}&SBxejR8PRa9J=Hmhm!f+hw=BbgP*g_uG-5yN)Di1rNiea zkd6DEk%qM1k%5dZE0hVU-dKfIoUYyOPMCu9laJqM~SCf5({K#K2e1%a?IpHGDzeKQQo|<=w9za=y*D=v7l6zF z#U7*t+1Gc2c+rO&7#T@Y;y`U|ZS{`<%wS+_ES0X%{0tZ^D9ns{&FVscjNV~wAl;Pe zU-#c9uQNDx!}%Kc9p(f@l2&ALmNdw(ri)iSB?_P@x*$mB@g(0fTLh&U;MLBf*^GgH zn}@guP$tk5I4=Zn0Z#^$?o9apczGI+BqStMR8-sqJ%K37 z!^1=RD*%guSQs2f0QzIOlbf6S)ZGYP3Y6Up3^~9R8W|Y4?f$+`Ke~x*Xi&Z*vO4TS z)m1{P$_i?84&=XE(fF|nT({Fd?!>WgWp>n+NI6Plw|Z(hu$Rss_J#xC!MW9JKX4eB zUgCAtzKMy*WvgLBAl^=_ZVVoibCl)@KYdYQ;aIU@CxNcpUIXgN3V7z;%X2ru20(P> z1v$@8<0?$O{L$01_S-L@t>~`;fveb%)#P-jZbB5X>42oKm-;kKexwarH_wT z$!K<7-gL2{6wu}mu07F|2lZHDVsDpb76^VpmzS3S4o}O-C|7Uyx^-u$;4g9Rf0iks zYSb~IF+zzytH9|k^0;!PrMYipPmOhKx->1051Ow0$_yZrY4rb6EH+PuseW? z!Pz}7PeVd*XlOElwM2x4VF!Z(EhwmWZ7{PpL7?+-TW2S*o~fRAKH)cSz*YjJfZJkQ zFi5anU0gEK(zNPa!Vy3Q3&@)rWq0ilJjn3ya8*ss$qo_)x)f+(u*8@8Ens;NZNtN5 zNlCNdsXM*^v>xPJ+)lgilHXPWS_m|hbsBVFWc3GAh_!qtECIg)$r&g_hetK-=lh*7&`MKnw}u0R?F0l|plk*eHC^Q5 z>S|VIW@J=UrBQdx`MC=j8JW>9;Pat4zDn!5pdDr?TM>XiF2|dMxVSeRVBxpG9wy~5 z3j>(|*!y5*a&5vI=N=QXer;V~QZF_D9?aX@o8Dass0lTS4S=#y3he$!06l&EdiMyq z1;R5BgaGDkn6OJ}00d>w#`O3Z%d2!8eE%bM&`bNz@r%u8Y9rkiJS|vKr;aeX`=TrKPDk2_luKm>9v+r#JE}{S;Qvs07-{ zLTTgxVSoe{2Y`7X^@`)P)UdZd0yH3?^#Dj98b}-+6-A5V>##YNJTPpBy1oJg0Z4TK z;d8RuzX0|>=xqVWFf%Q!NqPGk(6ih~zt~MfIu5-67}2a3FXUODo7;s%A_oRo0q?w| z1(XPumX<)j?dW(yD&PWIp=xSs7_dN_qqUL29tF%PG>@h+fealn5z56IT zJFIs>LrJHv=zpfMc9}AL0OZulpPeyvMFEtEfr*L#;I5#c;KPRxr5^#+FF>!OZ&`50 zB0wOE67CA>aDB>lpOpIA1{D+) zX{4hHJy9M<47Z%x!|{}2k2znYl*alh-s{;~wfV<*baca;%Sg2U)G)AV_rJ z?0_T!sXPJ>1q);3%p?*6BzlbQ?pL7g%ylqB25tb_768mtq_(-bdb1-y<@ODOAVJXG z1vJ}QTU*nQHrOor0mmsK(hPiEl}SGoNWcXI>ZYcqhK7a!+gDr5uc@ht>67Z_2O8Fb zf`Wji09Y{SYm|Wl4+wN9EubqWb|f(&p~t8MH191fE-EQ2H!EfR;(w!{phJKw`?nTg zK+~$uez6S#njHodbda6X(^o)x?nfqdUIp`Yv5WTfmN~kwt&M$lvKMT5X_TzN*7b=l6f>U$lU+u`%gbP8n!0 ziz2~|0=W#h@x6ssz<=1<+5&B3XIdI10YN0tlIc`_{Dg1-Y!~SN0$Bve2P$r~JwT5G ze6KBN0R)u;PHria7B&f1!embTJ%1@j=)*H;337sMY`MMYsam>r4~05W1jV!Iuz zt*hKG%Jo;l>a!BP6$S^WETW}Z!QWs?!lyt&1Rk_oWuU@J z7r2NianSh)TDIuHM6b^{71TuMy%tmbXL@%vTxoNwi7%4&$u;VSzD;cmm)IYBK1K;Q);D-aFS2OYdO@K00! zEmXZ||9*uEHoDDU+co9yCx7)6G;sgtK-~V@g5+PE{5OMq@#yar`2R14hW7uhIs1H? zDsXGI&vGoP&f%&>Cf2rmX^W9@N>QU4wc0}?SZ`@#*nW+1*lx9FFJDYALP{>kkIjM2exOs#Isex+L zz8I-!%heFhY^patuG1H<$O-!U5{lpwl+M;YL}ZK_9;L?Cw4y=8E_>gM$!rc(RXyD$ zdK#Iy6Gzzi7cvg+&v=#^|0m9Tu|J_LndqZlVh*+LGcshY zF}hUeagWyCw_K8f(g84VLl1!#?_&08Wm>8L ze|^1xY)kz=(?YBp<2d)MsTAbB0wS&(o>oKzkppy8`7*H-Lg^|BKr2#MQwS1O4|#y@ z*!W=Py<8nI%n?ksa%n#NowDS9aY!7&vzo-AYT>>gx}TvcEoDQx9e*B8KAkT(f%~5c z;MQrYGr0x3K03M}I~Bg#Nn9U1SP`nBsI#32PE=qRudqT+pG*m%>b@QMGuNDcwwiLV zHGh0w;k?6<>5_RZ>}vPem6H)!nX2Asw;exTBkX2+K;(`6rs8)aAI-O`8m@fx$*(P* zu_ro;HU$+UsNEMF3d|jwKkz$>xh&lU8ccC5tA|S4>u^Hfj8zG&q{rP`H?XV9ka8QV zS@A7HFJh=IyY~67+w!6i-9~RMJ;T6XTH(U}Ib{6@!2l!SOVQLj@~P8Qt>?7usnNt7 zSl>VJHa?b=`?*KQ8F^A7dadR>FS=qg>R`LlcfYl%XF7u7f9!o zEl+0}m9nAj6Rp0T7P&Gx|AK8dlLv-;7>+Y18_kZY4#O*Jwk~pQ;<=c1WDl44Dt1n< z_;Tv&a1}nhQRRicT*R2XJ0lFNHN)2gyxEhp;Ih~# zpOqQ2-Nz#cn6|qE%B6yh+J4P(8E>@Q^Z5T@?>(cM+`6{Wy&vB#Y>}dFKm|lLDk1_R z(uHh6L_|PEh_pmRx*wq-oSm>fM(p|4l~1N<<81The) z`uUaf>9;ug&5Om;goL$~;Nw)*U83BqJ$tsITw%iSbd36i2EQzr)bVqtr_IIkHz3&; zM)yRutt>nS-;UtCXpa;(k4XgfUn(@%QPI-438SGZhtHs$dYk_}B$8A|<6j*6gCoWP z$5&2&M(dJFetgC+hB{tiKVQO&x{`LD($88B(^H!I3Twq2xyM+Zs8NDbM(e~z`pR0f zqf}XnDI%`EOj$5gMPj308!OOMQI{g3Y_IaT#tA>^t1Cmn7_We6gAgTc?0S7Bivb96 z<-8kPbBV+3qz>=MDmDzrk#hx&iv(l@x7e@9u6~ z?EKB*tF-2(yx&`pRqngrY6Lez&0Ht8yu;5OIGdwWFRHQFWo0MAFi*u}_=l??bBHZV z<1qgQBZK;v>g27F-q;F>z%N=-)~AJfopN9gr)N~40rrifoluRaz0MxnoeO~i+Ww?d zcS2Szba?Z1HU{@3*bfG=8?LtKrc4m6J_Awg^=J_Aj)x``@ zmb$p^8%6L)1a3z{YyE$efr@(b#yxMh<;)3abF)HAv{?afB?9N%Z6`v&E>;Hd6SmrM zdADUjq>`mBsQQEbY3nDZ`XeTZ_1at<n`%HC z4tQC-+km(&Pn4-;OOM=19g{oFr%f(N@&l$*Zfx2Kt)82 za~2AE#bsYuRl8HUl_Z|C6^W+@Fusz$ND#4 zu1JUa_u#EO@G}xMCGn;`?F-rD6N>QyRyDoB&nCCy{C4?Eh2c&fc&{5JHFcU>@@jSV zZtZ!^HY}1ng#C!HzC5WlZM}tHxo0>-2F4hn5W%o3oYl1aY*=1rd(D^8&~~9eO!drw ztckl;RNRk^9o9Nk^Z1-kSMT`F?Y(2gu?I}Uzq$~85~8)oco;X*N8gmqb*9m&z4_yQ zaCh$C0bgnudMO~JY}e9j+rEkQqvv_Hk5(pX;TK$FXM3omm8N=D(1RFO1kK~#JZ9+_6r=e(a{;4TGb;^TGGJ=8*9L7`zSX$MlJ4mVST*pXXBzP3pjK{WkM*Chr75GX9Z z-PDhXezSolW_zr7y*qdiD@IvOaW=6#Jj~B~@r1ax(^6=R3unGq7t{xzIc&5z0_auM zyS2Si^O26%*|qU}oHqm)8N-!9hncUoMdw=}zuz$v7LOZ#@Q^#b?CNNGo`AOg!@Uc# zUj2dL$`XO027Ef)5`PfD=n)%dvdVRr$GyZ=dCU`L3;lzI=7md81Uu(q+REVfaBctH zca2gFZd-3?b}d(7o&cH#US)q&3yao%h}ICT8~jPRxjmDFZCD?6jrnR<5NsSvMv=;g8T z@3W9SIZhKZqxSK+`YVK(tu{qP;+RK{qH81o26+WDk*Diih&vW(9$MYv{trDS1+?!j zU$eQr&7AQoY(p-Bw`}RXp^`Q!v}H@NBCo2259#<}mtn~aR)hqLQ4X>%B)WC&@K!$G z^xIkgHQGdmO+0e4jdw*%2wsr)Xmzlp6W4vJi_M(f0M$EtwgXpR*^N;6nfcpR>`?!` z13=<lb$2`tDPcC&+*qQ2^6ZHXXIL0eb z8{5A~HoAMpgf_{qENHqHf6Zq2TYy(r?adG7Vw-~XrC%-w3hbm_liPYabeHOBxYu}P z?nMo`qi}>YJ*e*e;uGY1r+lHhs1QbbkLQG~>;W_MWh`gkjzORQ+;kZ_!8x~ji zq&ll?^Bd|fLQNLG=oszGts~b%_{h`sE>=KSidNy)ml_pYXt5f_FDz?bfrPuawgXM4 z_sxp!{yQI|6}c`VRkwq@GBa67n%q?x`F!ZdL4|5ySJ()SN~om04l*Fdt6-R+p*8mZ>o|AWFp?1@TRAxRN${sUxz10_tD*&z_mK z{zIGFfnO{A`fn4pUZx|=j9xU?=F%VDaH~>pJ#LA>_YP^}mFv#m>M0!dWV;a6@#&iy zUQ2CGL%qlqa@Nd=XWi9Bj6)cKXX-JnE4iSWQ$Yd#6*x7^J8u zg|zHGCZg=h{?VK%2twYct`>5t=95d?#L6ZOlS(ANhq{ z6W44RhC~0#Gn z&DUyyM|cy3MmL5g*CDmDJC4*?4VOCa4mRN_5Rb6{H>{Dv4Yb4lDF34Ryj_=S3W2cc zy9^p@9=^dBEB<)Wy2^pocm-(r?k?q$#4+pD!PjG)0S6=QvTEUdoOZD5?u=j@)bl$j zR2kOE{u#wVj*z_~hC8|@nXffy_0+@;JZ23M!9RR&a?q-`|E@&8)&Q&Lh?c?d ze>FF&IM4OB$sF&hTIwmFjk06Qe>97zNBX(lcQ$=06c?LlMS1@fT`L~X{Mokwcv|?9 zf?V}F!rRDoWxwv6{X4ln+9X<=TcTmMW$h=`cLbev|E^)K35rIa60d`J{$RB5ojQt) zzot6tQtlj6rv+~%YzYr;{}Xkk2Qb9^g1X&t5_G3Ag`uoTa0(UE*x<1AK%80Iz8jk6 z(y|23H@Ns-D^|P+^Eh9+!nKP9pkv?JbZ`)`2H2kJLmk2OxD*&sUKk+C2bHPP6SY9x z%wm|!%EL(v+V;4{1`+Ps9Q+4$e4}4HHQYB?KJc2`U%35}ayCvSQcUrBIfUDUI58dt zQp3zfS_m_P>MeypoMD#{hb$qEBDSRN2E4hQE6u-L$93%8@bJfC+dIk-(lDgb>)x^@ zZNmU7FA8$mLrfZwV?GuO?E-~P3=Tv4@-gpjfzQv*)AGsn4PQTzi<&BIS^URs z7)>h|Q53ukf$_7%bQV)NlbOd)S!ayon>DClNPQ0IRe7FJXEgQW7z4OXD@5X8Hpyu-DpcpH)3}IZoRE8zz6v!*yIwmfT31U~G z3+qv-Pb?o~PY|Md@==?4kz3-tHQA#fA_0{OfplR0kRNVA+vy)cw zbT&RVvBYO7RCc^Nq6N&^e!6BC2YJ9bTYoS;K1`0~J49{t?R94xiw$Jsf0s^qG&4~q zEiX?jR>V+~Hui&GyozQ6Yq|+&_Gyh|lhO<20v`ZL2tOq5v>4pn5v_${SI;I*PqqrD zrr=x{iC!XnzFO^XewCtEZ#R`DK{&gAGlc(uf1g;pM={Ke*`)Y4GLO(e6Qzd%xZ$=M z1K2T!(P@9%mJ&c_;8Dg)&E3W#paZ9N#jD|G!_ zgG;F#IJ7>!#uPv`=~BFPBn;bSVpGq)+ppSaJ_=lDy*|*Mre=G;)ND8#*~i&!`v};3 zILk08RGz}AUJK!D;3y@}+9k{>Dz1dYxT#7r>d3(qu?IGBP68mQ3R6$*glM83)1CwY z%0^$1Mzzld9r{$!n;3$-@gDC~YNc%>D~wN#ab=&ZL5bNhw+;egx8E#p8l1`-0XA${ zT_P>`f6l<}Tf=!hs0Ze?^e)@JXuC;DW`~StzA^~8>Ghv3-dl}{Q;xa-cca$3wSjA8 zc%Ye*_9|~QU5Awq!i={Hr&y9uzP`+4@N_q!ramBgs4aPY1Z!mpI(6@^BXY|$ZtJE* zMlS;h1Yi!G0qo>?EuYc(?bkgja}C2QuKdM5aEM`2kMUut)FLS-OZBHCQ??u?XZF3q zUL=I?%qEMAw#P=gt^jaFJsoTa?6JXlthdDL%76uM)R1{~1B!5oRV=>)uDMtkOR~UqG_xCR?r2`$!js(SEhhfYj<+KveB?bPP znf>se4z0$+-ukn}FiQxqu#B)=8GFxRR0wHQ*tDoD5a0Mu6K$cv}9P;Q??=hcD11 z4w-6xAD5VTo%vxS(W~ADP2#WPd~W1N-dmo0RO-x|a*_qPv2BAFIat&Xm2G#TZte0Q zApT3`CHD3=fv^l4tEy%*Y!ak+&v`!6DV(T)Y4|+g><}wSE8ly^CtA`&>{4$$+kO{( z&-hHjI55ibx|x|hs>Pm;`y=8mV(Uzl$VER2?-D^ScLPhLuC8!^sE-NormshV@$vwt zh}9W~Q>AYXgk0}B8*Il{1M4gozJD4(U@O;Zonh8PLvr!gs1?$9zAjPE?RwrN@2xmc z{cb@3NFNT`UPZZ>FRgH;jnJf^?9KIq<}Ko|c^jb1L}LM!(;byr(|veakK0~J1{3Sp zVQC^jEpVx!Y;!G7Ng?fvy4`d8S^VhMlGtoQ&dkh*<>jgO%}Jgx>scL7+Ne@Jg74bd zv43*`CQ>zZx56q&D>)mPt(BS8ku%D{0-c>*Jl=wHu04}=7fG|GE+r+(MI2JwRN*8U ze|4f3>)1ElU;S1LY*F`|2~ABBD|KA|(g*Qg`z^w%X6&kMd$_2E{T$FHbiu=?%G4Yz zr+b^4G=Jj;*YD}Crdqy z^^Kgyr-j6G+}GUo#U|{&AW!>icgAbwk^m6fb|Up4QvI@}tHwAc5w31GwxI}(OehHy zhz;0WLpE?`zqGVq)Hnq|L{fSB7p^K;@h#wdN-mr;rHhgB%vIV;Z?~;D>o@p~f=!s3dH8Jq!6Yr!>04ya;G6?a@=NtUQ&AD3TAtfq zCBlT{UB2{AqgfE2wHRTD|HTXxo>G+H&-R^-K50AUSBb59?r`LN$1X!C`0J`b7MOF* zOh+B*x_qZL(0k3vDJ4YvL0S2HeP*=G1Z+xFH^fXvzcf_cyB*A7iPUm1fKiP!9RD zoW#EN+mSB?pr*z@Y3SvfumGx{_Gzsb-)Z7?jKus)N_$?JYS_9pmLA)GyFDq=?ja#o z80xo@K(bfa^aErUCsU=KC!=~3pf&6bP|Yl7cP%}Q^Rtq%MfGa>8HE~kFsfy?<=YAs zLZY{Z6&0h_&<7yqrEv{l8Ph_Y5PGFQGYoI#H4aV1EHQ-_=4e+`9itQE8$1@OjH3WB zbIZls$Y5nmfhk}QZ2j`!F0^=%4~#_lcjX0F!t{ehC6C(+V4Mfx$F05DiLzX#CcM43 zs!Dl_q5@9pUnmt$-I!*DOQ{c4ybboy`!H;3laQGeyj7th_n ztu)-v8X0^+hu?g;uRLzYUQ*xK-Ms?}P1Sfd_6`u@3l^A=x@hM~qxbP9QSf#2#E!Ju z-OpypNEwY}m{ZZ{bX$|O*fs15 zO4!E6TA51bxsfiOyh&br*gGJLvk1QTN z@>881y#W*hfUAg>?&Cfb74=Q$@IZwAdy(x^`4+%P4z-8xY{>_BcC>8Rnth*qkJE(f zwe|rVA2HSQbo~^&F7P}_d^h-;ai~2Eus>OuI#~&yq@T`x`rK1crg!h%-`y1yM(Wf2`x9StZ5iKn1anwaGr)bmW-)zT*K;&%n8-= zhpyJ1c$owNKt5_O0!d$%R(I^F0BBw?+ZVbabbT?6OJI(=+h0T=54A4;8~|yuqWFbW z?9A-u=V^Pk4-vBoLc{wIps+H+9X9i&{noTqxy*^ zjBU6nC)Y*=IB}Zm+fV>DJFne3NEts=h3BVaKE~m8$s*7yS8_AP-+7XLRK7$8DDQTX zXfO5w%R|6Ffkx+JPU2L0v+t?i@NxYBcF`n<#nD{b41e{Cz{~si!BW#oP;5*bZhO!Lw?{|=`2KuO zVi8V5Sv^6*$zD704;2hHHjo4a5q@*?U^(m;SsAvY@u!g$xv7oCv{dwN2Z?KwGrX;6 zEE2H(POBbO@z&*(_EJ-16p zJI9_id3`il_1Shm&>*{bp(RA1;yK*9-jC4K+Er4Dka5n+@^=2Is|@Yhcicf`K#3Uu zX_5^dGgf~{vWlBZxHrJ-#Xk&AA#yx>Bsa^;0t8{pHL z>^TIKLynu7wH1AESPt>s+C*TF0le;fGlxP8LU`AF4U)2+px8dlj8B|!>g>S>FgWbz zv<9?-WlSs;%3dP}pUki|G}xcS)5?+mWHR4C7v#@WAnvGdw1O%AaA9#AuAo542yHs$ zy-gwRD%g`0q5fMYXg|Y-eSL|-@2&`r&d5DB2s1t<&*0U&!v7ibsmk-+YdgpXxow_=XL*34~6G;|h*{ z;d)a{_-!NgZkbd7Q_M+(i+7np;3WI?>kR`^|E^VZ1hk%6QvtJN5MQyS0?0e^kQ2hf zsE;EqCHsR9G=(g^4jbieH{YxS3m&NG80bz*S=qrYBJ&+}z#Ww7clYlU~n10B~ zQN0fXfMOI~)WRC@=OXLhC?(2QEsL+f_1LLRnlffZZ=>KEn_q6NoSfiQT{Ahtl{|)p zW4%`<#MT9XN(88lWSNewKpCy!J%_cZmn%2v{Ped}TX_VEA*++@>I^0a!`KVY*>l;=brRzcMM0+hS??hgzqRmTP*bf9d`8lVxrvaq;JR zckE&TYR5Rc;x?$TNEeL0e>dg2lGnY`umgL)oNaY?n|3iD*nD;E3-nnVKu9^^9fRJwDa+W8rh7sv^PSXFwP4fK1T-tSN*e4<+jFI_h+F~j zok?9EA};_>my%*i*Et#xBZTdG{SwY;bCjOmqh7Z?#i>OYThiSxkxrD;b2g7+C%o$b~h=3NMVv=M^)z5^t37j|`oz zT=*+xXYw$*h`#=uike^0Y>)`lT5&!q!U&o>VEkl1N?#7I?qVNyXL~|3LHa>S}b&=&?5YzMfP< z_Fi~yZp;=*$T0CtqJY9^Y!V~IA+&k*;~BwIT|G%!Shsug7m~{OOG)Rx_{{Ssp>lT_?H?zu9m-`f#D7Vq|HYE1_&;{e0N?L(~$k-9r(QLF=O86RqMm}YB zRXRCka?^&#p-&oeVuq&v?rEk`^>v%OJy*Ia2qj-$d|&-|#OWSmGcEO$__0u068OoQ z&06>8aS7MX7_NU6hOb#mc}JaaiKr?Y%`NcDmrLGr99Xv4_+3)YFyX^V`#wTN)o8a1 zA6S7jIO(u}k2j7mx?Vdt9Ouq>=IIlTFJ?%$}q}H*#yy)nHul z2jyS=a=~i4Uq20yUK@(AmmX zmwP%rFS4}lYu`!~e4mIrrOnNxz2HtaepZP3m&-&oNXq+Gy=i;0@m;NQs1>0b+v`R+ z)n}A@_Oc3Xjo0S;DqTsV<8D}fy<$0dSjqJ+w~Tje@J^nbPl1X0+OUQ2N&DnKKwqfP z?6=SM>J#hum0k~af~;QG9#%J^GJm4`ewKV*`VF{Ji$&k9k$^|}OQWvx$J2}3E48;! z;K@$2z-t!@d#BqN`nl|Cx7l6+Z7W($f?0%m*FF38#S4KCPUs0@FJEr3v+xd=Y9wRT z>HgYx;;IBXn{EkyoDY0gT=+tl+eW|f%f;WgU;{5t$7w3-F3%O%NFHqo;mri*QD3Sh z3tiGYb*O9eV@zm%%4IEaP+$gID9AH!j6GhTck+E`uEXajx-Z zH#7nT4US!smCi{BXtnSPdwZf($+T7MHLwIE9U~tg_UyQ~TY|OtR4S_LLy5pWcv`HO z_R8j=&%Vi7oJKsMohT5<<9oCp+_ICN&>nBA zfd|L9Ugnl4gxz_a^4K8ddU<LPDU`RrZkA!M$<^--)Fr#AvJRr;FaH0! zt<31$cc1#Wnk&Qizj|oN6(UJC<09gC{|9-idj%x|Gwn*7cb}jI`<}rrn_F$nmV8Kb zNV+n8JYVyHT}o$r<}>d{VDiT!OGaBBb$+O%{(D!2VWPQAuJZ#%OQ}gy=tbQyk%@Q5L_!-Bv0t5wt1IE$ zI>A!yH^pNW<&lGB%j8Za+0+D$+MV;(RCf8ufVJYkA8hXJA`8+Lnk5`4(R_*$bSBrN zXbMM9n6b>V3lY%1*Yp~EX$s6hV}VR`$<^l)YH*4SO+qL^gSv|pn=14XVV4YL@|71@51z^VuC8-G9R*v~zYz3Kp=${KX?$u1s~!0eQL9@m4fA56t> z#pgpAq6`1zG`@>$=A60~*gAdYdUM&EbCAQRmv|}k;jR6r)to@6u1EHn0w#QN5HT^} zd5!0X7jnN6S}olEc0Uox@=64{|y-F;Z~i#y@vjW;vEV4AF}uVTby=* zrKx*z6u)=Bq>fetyc(6g-S$eC_V3sGxHsf;Y`q z4|VeJP0@td7x>6O&2S10hF&6zp;=3tK0B%0p36LbVt%AGSd)~ z;y9#5JDHzpUYfnP{&#hfHyC`{`zHg3+*>Rv-btHay$+!^%#Ih3J2S9*jRckBMJdjL zMHpr=@#YP9KtRXew~TR1mB`d2NXDb-+9>w&v>F$=jON9Rs6@ja;a&&39{!#a-oWoE zUV$(dD{1~dhWgwqgNjD#E-$bjcMUg$1M03g=(&5P^uYI)K^QfZ<=wk0Uejlq{Vpv- zlo<6)0|zvvErz4ESM*lX1^Fadi8Tpgj&}wYldM#8&{QtC{N82U-0OB)o#b9at^5+i zGKc^~~(+bmlpjwgCjKUI_) z3>u#$CVP9sAUi+TS5Ha`JZ+4`$lS*;gn&%2*ls^#_5b#qLI4Tuiy^NRxo}*ma6jWt~Ok z3s!}I_BLrD#l|zjGY^X0d>uLE&}2x`WESG@Ef{kag8HI%)(yeNqC~81gcXdD7?ZEf z{kyo<&8^W8N*SS6GEiXj#l*>`My|s^@aE5Iu$V^03qFZJ3UJ4Ga_J~ZSK~fd}U$f zYuJUTLpc)xK@V)r$y3^w`C5Z6N|d?O_4;zw@;R!KYTglaUVpgN;tNAf0eCQ&-YxAT zud|d-4F^M(aNi8Zs44&ZtGn6M-EMJTZNL}QUh2-{Y&Ee+Q=T5(J7RihCRd#QX1%$%ESemfpSq&4URJu} zb)1;3@{89@kgWRlkMZTSve_*V&sxtjypC$*jgn15l^@84a&t z(~xU@1aZzX3g>?wJ;t0xWd;fWfhWFpHA6r>b#1;wM<)Mx4c!vT`q9fZTCm$%XmV~v zWqJ2K;grth?|zICT@7Ri4=v4u`@etkJCqW%=_`eXaU9EF?2?N!gD60QY4LL?w!^kP zd8|0kXh)4$gCiw#1#B)ZC#P&I^d^AEuQTTmMYyG8YRGkGKWuUC$^uCi9ndzI=j#cg zO(bPOH1hoV`vX=@aW(KCI*jSH7iN&rrtH!y0HIFYdkAB7Qlsr~wy0{A?E@tMxdaOe zQ>Tl)(2&{&&7qU(m!!%fOzn_AsmWfJ+KdM4Xpy|3@L!R_LbS0m59IofU_F4vx}T~@ zuI3NpuYUoz2j+$SBX$#M`ycRfet(0(;mhLXzZ+5iv!(5q|Jk_*ELZ=3dxIh;t-dy@ zC{JtSH+GlI-JJ8CVdr0kK0R8XQ|D|UBRDvH{?)|HyNO$3Zdrc@#h&4^3>_=Z=?yf{kNu^H; zlFX24&*zX|GXYY7G+N)UJYu1xs`(hkG( z%KNGD-`sXDcZg_aUn6_b_kv79kQPdXxmV+r5D%R;-=~*~CxMFK&0Xd43jjeowlGE_ zqwk>{^>Xf$(!3ji4=Q{uxzUqW=xuKytQ?=jM^}_oAVhCf>mkzLdAen)WE8^~)r!}| zn3rZG3<=9u)fkuBwJ*qD+qZ|?V;G@E2_Sj?_|_{I7@1BCqd#Fi^)`C3D5!lo`F%=d z5rXVAUel_UQJNG8Aj%S5mAP<8T?Kpo&BIw8?w{R%yPpMM<4$DVRn5R_p8YQ+QE8Bb zw65b4U-(t`zE5X2PGnZwNdyWUgWmk;T|YCGkT|%1-@<1V5(=~Fp5J*=G2I!)E5xY6D zP`sfs!^6S!9=0JnIdRxwJ0}eHPSf6p^D&E(?+uf)t}J>E)9;v&MdeLP6HP}l`aYGp zIzZeygfE46+{||9LNaqa2riR!U2(OQg7EJ%gvn4$Y27EcW_mpho^wwF`NKQ`dK%NS zeWLz@R)^Fh^J_ALI;SSZO+Kjj^-5L0Xm@_G;XFEJVt&nK;d2hY*+I@)O_9F*p>6z4w4iaPASv3B^_gTk`sn zh}1%C&~6;QN7Ux4#QvlKiU2^?2sovM3GfL3T`Q$^kKvq$eVUA^1c%h|@yxZi@rW@; zmdQp@ZN1at$T<`Q?cX*BmvuhtC)OR@BrflA&aX8u&&Rd*qwjAk|p??iCo@)0Sx*u(J4Sd%K_Xj^_iFtS(+vG%DQ0OMn2_TfVU+#^@u?q z%#rQFgoNy;@e?;I{anOxGMLH6P^{ll2uDqMVZS z=`-c{Dj)wB8&=QH5w?A093tO9#uv65Q(E)GbiOBRHKe5D0XJ1#*Lgkz-HZrDI7IdW zO{me3U+-i|^QG*K?RL+6f2-pFQpT}u%>fsGetu5kruE&lnz~F{<&wk5V9A)$Krz%d zWp-y&6W&syrq-qXMz*%0;!e-WmbMukg5sT2xpgmQGsy6`1E>VE?2#^s0dUYC_J{}$(E};GJ;Q*i$1?a&@9^v;w zi{Um=Bj$|SOXH?}?n5MbXtAf=&%u}=1D-f}_BbQU=$eV9D2^zbx6CJ{u%Vbk#fB-#|3NdsneUB%{OXu5U@p_9la(*l9S7E^jEp`{dbAE zK5@-_HwOd!TZnnG#>OW`;Z=yaP*Z=`^{AOqt5#pbw9E@|Pfs9Jeoq6bYrw5bO_Bom zGJQ`a$yovxF73;^0e|M*`}!g;R))u3$=pOWBa^>WHs-fdk7qhqpr{I87Q#_rTN?G{ z?u}zjiMtD;ka;iS`m(;HHg`Jhd)J^2caVH+nY+d93uiR5P(u6l+RKb*fOI>cP6`lZ zX@Ec)k9?iL@4zNb;Jh{Yx>apJ)$Y#$;P8|+oSISHAWiULHM55te}sdYSk4!z&3IJH zIOC~LmyLT4#i8KCpUGYvrrX14<^1Tx7g7Cgs`ZoZ&_j9MYwiLyk`T z1K~~nb^<>Ypcl`U2TcvTy`bfNfZ?vAqhr;AB8{sy+EYd;uyg=kJ{ueO7)Lm$SMGZ|yZWD#)5| zr7>q-b&qKks(`V_OyY#k6|lZiUk!*oeJ@}0(>dgkZ^rjah^4fyg8Xdbg~jxAK;zHA z1>WK4gabr($rFT2gnl#Pg@dq!|1%v7C_n+HlZ1ta9_2qLy65WWw*zSO0OvbaxDw7T z?Wj`aMt4W-`t+*S!^-8ZXu*Bq zQ=YR6q5YwqJ+fQPD*$8wfJ+WYwhSUxR5F8DX#JwswH}&es8^7`+XE|3xj)Vjz7wJ^ z>FWE!*`8a1GhefA@g_){8{I~z^zq){%Z@*qb)vFNjlcA54SFE4S3*<^v9vdXOqciwi|no7D_*a87@MJ|d^LZvB>yKO?4hb8IX@~80>G8Z z>$L}6RaJ=_fe@YGdk%8KDb0DEM$L@k3>CD85vRP~ zA@bPS+xTL=V2s^FRkL#r(27bVYu3uB;A*nwDvjAS7nF_ieK38Eef5sr7kfNjBNXM- zrXE=+&crrl%J4}3djE27ikjcrL=EyFN1sj`q91+wzR~aD-?vTP^Cq9&I`c7LmFJF- z=!Mf1pJ=%VszTn>E?|(WNztd0%6gze+LsA*qs;`vv?815Q0qO7O4sAEG}BQj#Ad%h zQ%T$?u%ye$01`7 z8qls>T3Y&yo&Bg5azZo2fsfn8Hn?y_x+_gl6O?@8Jf&+EQPQ3~xeVPUjt@R>F3^Hj zl|?uO8a|G}?pgmtw930a*@sw`vesNfm3oW?E`akc+$80==4_be&X5%oJ_CsZaT*Qc zrawGP8%_58>X>dbWfg)~Tuc_Y*JUOMjtuKm{__YhZMRDQOQXO4mI4vc#p@74N|6Pi zp(0nssy_X!j~O@b$B#X$kyyVrQ1O|w>pp;4*gE@O zlTkrY{LJSlHY#-B=` z?Dzwodpv!9G2fMe9sAwqy>+>kuWYk?4<9o~7C7!zFYVTn#K&_U%a7$RDQoO??|Z_y z#hL4gE1yQMX65zK|E$Ht$ks@}`_tz%0So+@VhQ`rS{8CGJWaSX(>zRuKXctnUhv$q zMP`1OObI_F`e0Fc2CB2D1b{>+Kpc}w@qC2pB?D(=0XaAD_fuwA2pUjc1f=#hfVmK) z7P4h@R$;51LrOX$U07#bkASfYRi<{IM*kkD@TjJ&3JONd2O+X{Q8MY^&QEM~@5+G1 znfAP2k5~|2NC)mHXX&flCOwL2mC-m4@QR&1Aq5WiE z*Y+gm0(>(W=w>f0zV~|jbe0Z&*hSv?tbI2vG5R6q+5PX8^w!<}7r)r@kd?bua8}s;2L;%#u2*JP z6E-f*t8hmDg@zBHl&Tr4l<^y0&^sy!ClqY>%s32J{}t1Ca*4^Oe;QWy7rDP+E2e^6Dw&56_5)mQ*&UYJ@hcp^`GZm1zr_KOJhW;UC>m1$5 zKeC!&0M=4gQe1cfT%u>A^w(Kx81tHx?2`#km4b$Go@0_joU?XDP|*$Uo@1^*)r$cL zfa_4w;g$_>vDvMQ2@0tMn^Rgebd{YD6282d`bPm^Dj1G#6R|b$1nXSHc`3-Xt$ZlbGxwd)I@vQZ~><)G5*NjQ3mxK zGZ$~CHaqkT3>GG_yTYg9lxg9KnC*cIFWl}x_*-9D_^mQ}N9IM82$TwmR$;Bp2>qkK z+po?-jj$YtN>C)(S{1wg--M&i`nP@J=~JK2=u}~sEwqyG1+2?{))Q6T!O}44=Ir~m z3zZDEdX`<0AHf8)6OygXEnUBpWoo)SVS=o4WkayQ-lL$Bl?Vz_HVLsIE?4C<{DX=i zY=#i)C%zu4*u2y^&v5?iw~;Te$#&*8F5mTL6ga63*ph3Y{#~!uL>A_-E;bg=fim4% z*6P_}d~zcbK-`0dl8-`+YU-TF;VMd?!`0`Pqd>hIAWeeMBhse*+?|&PhEL!*Kc;hM zja(RZAC6jW5ONc83+oAp!4zWenL+a8fjNIEyGs=uaF`R6>|J?3*jX!Geey%vS> zU}f%ou<+6Fa-=V}3jxx}aeVOkW4W5Zg`(0#hF5Da^jA|!*fV3E?#{$82YGg*>={G) zaY?!7K&4;hIp2K(_KzkspMkciDbJ}LBP;P0LPKuB?(4QAX=IS+15Vj+>`al6>B@M8 zuk}1s1d=x8u=q_25dTaLFNESk5t~x|LExP|63RDe?E3#E`k3x$m_XYxgT$~_rxJf%l{A-)W12%RevotP<6ck zh|hX?A9Ch?S=CSYhsN;N|7O`41O|H)P*?&9*vNRWFHO7|o-c*@|H zLt3pUE3w6T5UX=Cq#CR^IUH$K<%_rV z3-GIEq)~6HH~CZf>E#rlu2ND`dXJL?E&h8=-;OtBpB@g#vjea9;wMj?0#vDi6Ipek zu+aP|ncG1SK<-_^$(~)r{&+zXTngLzl+b6L|`bxl$`jNew&8ppxHV$c?79QG<9E1|3x#t0`5GoGs=`sH7}M8&Q@m4u12U0uqv=(ED4rseo&`Y87+spdM zGz>7c{`vC897L?JZ^ZY=QM$8tl^>^9Me7MeJPFvt#FQq;Rl)j!EzHBRvg!Yez4wl4 zYU|=f{f@^jBBG)oa6pQ5>7A%l=|yTFNR!@s3DF}>5D=t?B3){f-V#)Lmlg;u(tBu0 zs7c<&@{Rl6yW_nv?q6@barXcQJA1F4z1Es*t-0p>{pQ>;8YO4Ig7z1~d&b1CddA@6{T7Q4X25Ikv8?L7d0NzWt+0TI`cc(Gw||GYuw+B|+_-a~ z2+-)kTQh@i45yD4Ajz3>MAyJ{)F=EQNbKHby!-O^0Z2mHh?n=pqAt#TwQDSF`5f%| zHIhi7ow|b=UpfJS%kxVwGKVmsUUi~s95axS1CJ@^p61LM|Iwel@76gp`Ppi$e}CX^ z0FJY%A8x}CK$Kb>8P;5XSNy5jP+3|VJbAE3E>0uqiykvzH6)=YF9l4sDaF{APiqNp zE+ZZG4M#T+Qs3)j$|lcw4hyUsQg(Y@^SfL{I)G+SwjD{b(lr~?mCKI+Q!dj*mJLL4 zd87S%HPM@T`jyQNFmGj998DEYoKnp}DjB=Bx*;WnZd9z*d~<4YJ9mXsPJ7}hK_Njd zU@_rM?d{FA5!RpY9do}{lhQt}V?{nGGz4`iLcn{4pL5%LGX+a>i|6WAjwLmrO$AP~ z%`SjlBDfId|Ml>GzdGP;A9;WNMb7Nk( zUeb&}1q!IlfS>o$By-3+cb!^(@17EIIs9=4?9ieGh!~oYEV}XN;_T=otl5ErI%-+g z5}yIAI3`3qN}b>oK`I10TrG}q%(b{`L}Yf%=E0F>inv%RH*hb4ZD z0osHyYP0HJZ$3QaK~gM+Lb)=`RKf%HH*23_ynH+m6vXYr zQuRc;eRy>>ONd1l5K$fH-8Xs8QJkG1At_lc7b~TWq)giyLfvqqoB(2G#pCGUeM<@R zvnE5?pyzBCfsBj$WS@m;Fxr)xCFI>mnWM9Y#zN*Bd;4g6Ll3Jjqei{%KU(4ovq}z5 z`i#$l{SW~yQIy1e>p{oHeZW?vIxxGsZ0Jnh^A`un@Nq~@5O&{#j*WG8mZ(eQRV3^8 z{g-VtPmYoYVl!h+_HbD+lr?}%g9SGOI9Sa!cVhjj@mR3UMyNNl-lC^kWH%8-_{PJ2 z*7Npa)A8OG$Z+?ux5}mu@dJx7#cOYTcKVwIy_XVJZ0uCFzZI^VTdRX5e4lLH#yVts zZ)A%L74W4?1F_oZSNR+p3Pu@_sD%Qa%(+DzHaD~28ey(s8GBNZ8p$YtG0u+LgErp)CcnZvH1CziO{ z6d$cGnWqQoS^?I174@g&5!7NJy-8~WVU6j@!LaAnRuO1uF@45rk4!wOKO3gpz)TCI zAeme0nKH$lvr3NkdU(j2X!dnbv3W%;ez2&PG&x`%2wNBl&kD9a6poeg z8T*)txJALoY{}`+ZHH8S&63Mq>x-u@4h-I&qTcf3CrnH-2Oh;;aNTJ3ZgB)5NgZul z%_J1E9yamL0oI4l{>*-v^=$ln^&yAg+AO z!q?UW-~Fw!;=s7{q#I9EPBt5EHDe~n#c#(0`>w4kQOcUeZtb6fmA55hD%L=t&z7Xl zbe^!y&ZVp>7-E?{!+ z{WuV{7Q@YI*7eMz`JOCe2~rc#Wwe7Bt&8l`e7&-bWPDTC-Rw|N!jP+zk>|`pc$PmP z6oS(_&ZZ81@mn`VGV4xKm6NcEx1RVN5np#Wxh}{(^BOga)8n9a-1k;xGD%^eACr-B zRFLVrO{#;AN6gc@~z}j~!iC)s@d50@x04B;3qCNSkoI7Z0 zE*N~N`nZH1G7@(E#*GG-Lvpb>*<-o9c;ch?8&31@z8lEp7p+h6)r9&iP_oBp>p)&L z{>9o;dzO%~o*I!FU&26xdjPunK12JN;Dc^NB)9y<`^`aZ{@&rb{bX}-&CUj5`&}yK zZ-tz^M>r5=(%05Hb1){pAMQp7rP8AcOdkS;T%X|wHQi$;wKsg?mVq88EUk_maV8~% zyc2Y`z{i-&a;leS9V7=^^#aKajo(*3xyTOoX}v9TI2$&6`~oT)Pc3RZta+hvq)|~V z@=0gI@=R&ii}r^eyR!37F2b?p665;91$=K+$0nc|dmNy9)dkBT_4{wHur%qM>^1_p z$)&_Xc-bXr<(R!UZ~0pv(qw3uDV`KFIg36Dhzg?Pc@I@TkQ!;^gm;iD*KE7rr zI~j)8j*ZhBK&?d1kOD?8Bp0TiVCrp~1ot+nW47h**^j~?H9@2gZz`2)9?kQLk-3Q;J7B9jEzNo%P<4;ggHACzQmQlypM#cCkHOyXk|X z4h$VkYGfn$I5CGV%ADtLm;qrJWW*Q~Gh@6j7q1WcyuCVEC^LO$wCR`#W(g^EWboZ{ zQ0z4@(y%cluV(591ZE082oW~{2f1BneAedJh5+0bC{LR`)fvA=nGgftL;84s_mQ(T z$5I-RmsqYII~hE9z>TT6A2PE}G1s{`Tij)=fewkIgWQVD3?^knUQa{)1m1st%qnRx zBM&;9An4%`#c2nLPc?4yQrar+icbSseWao2UTIt^_O58=^#zD>KM)5Pg<^@%A8Be{ z$`EbF)br$dX{^TGdvbA0--X%B2LUJ_D>IvKH?(!fH%=Hd*K{cJnVa5yikIF@H{^3{ zPBzPb;Y((z+0qDnY-GjZ^hTw z{!I@F;WyK3@G2@Ro}mmjghzrz+18`H`*gTCVPYYiu`9xi#-r<5eiaKCnAo6k$) z8!~ndFH!KPzvQHN+a%D=`WO?^6IXnR5;RbSPYP@`so1Zl97i8RO0LP4AI->$qx&9i zB#^QKY^NI^P6dE*>urc<);84>|J>Sf87#Y!b#1n|F{kE-l->N;;CJitAA`vL@>=om z>q7l9n;%Lh)PKcBmr`OI?nb)qfUrVV=A$UsK_UFfMbPnT3zoc8#`1Uen9g$;;{Jnd zM-$c9sstu`E?H1wz(k?$5IqpZ`o{ZUlh5v2(@_ha0n1bV0ahKl&5-12X+NXphPGqM zZvUDg5Ur1{S9Yt1GR_m9$j%1jKEoe8bJ%(h8r^0zfSoMG!9Lz!Az?vg+ZIa;JpC@! z`X%^5p2DETS3?bkh3Tn+hgn;PgN8y+2w2_2J&l{9@omb=eKh807@pg~$5ldH2nO{9Q}%NnQ#ciYm2ABCRy zO_K7K&DdTZv!LQz)4nxr4bu^SWQ?|wyZ@ADI?N7e4Pra zf!!;X>mrM_P0Upme1BT{>HNG>vtrW-P;VVNLm=k6BWdlPt^q3##c>W_*SGFjzDx`z zRI#u$#Yq!zIUj|6%jyCx<;5_c$GD8{K&?q<Q~VpS4whLaFbxn z{!dcnk50P?6t2#>pRu;@&Ar31dMs|ELG$9$J)g9Ya%MhCmU%|NU`Vrce&BR=D?dqd z?FRD2zy(;-@*)K@SOR97EG`4UIaMQ&)gt5&+|&izZo@Ff!Nyc;zH;2AMHeLDwEGba z2ederyfAITM2_+Pz|l-!GL`Bj{YI<_dBOFGO8rBg++eiXL<-k1CC~-eFc<1eY+fc_ zvdy|jL_zQ?|FSD`+D`9{vH;+_FzcYt9RtZ**92t9=H}|nJHuR!V$HjA1DPj?PLZZ* zge>WM`$}4h;flfZl82e@idZhY z3a3tdl>Eij)cJ42=VpH{5^g~z1~~aFzkUlW8JN3noUQ{pc0$TK9|ANFCh*I^L`MR2 zP(6=a>>Xdbeb;4gp#lxL`Rq{ID><=LshQ&SxZEqN;!7IAfot*cW64uqi}B;MbZC%{ zwS2X_ACP1e&_Y|>I%M}6^Yft;9E@5=@Q(7UU_}4>l=vy=usp1@ak+1ZuWv3Qm#D`PY)_n~m?ip8gFyJ{-l{PDr^w3NQ?+*Jk5_sOdr>ak ztvrQO9)`0sR|TV=?q-z;uV2A(D?{z9(`W$gJ=QC-msQy+*r^!sVs ziiv?`9B)$ga+#;#$M>_x1{1XPM`b)D$s=eqlZ4v`AcY!W*qo0Ap^Abq_hEa~eC6r8 z$ytmT-D}SboA+m50dvArxoWU3!&gh^%?@TmQZ;wT^?-DEqQTfkwe8jfXaleau@&U! zdpW99IylN5Jg1-@<_$zVrgAnr_Pwd^0&JAar*%PzLtZJC1QfK9HTUMEp1QKb=AJ8N z#xPjwkR1IUhgxoUy$nhJz)H&q=x+(nf4)0s8W-#k(?a4rg$V)&5ED{RP%s;VRuz$O z>6mMK6B1I+nP0%h48&xk;HVTEb7Op04 zTxN}7K(fP$cB>0f-oUE5I-O6bjlE@`m0GbPoMHBhUh0>Z}}=Jr}D}#d#I*DSBBZ6<+_@I(17@#&pk8u^d2#gosG@Q=XaK8M)~c* z;GLwvL)_0YFD34i?FtWo3?n&gKMl;Q8&fK(G9rSIHxVZqvfuihK9&T}q0? zC$)iEIRGQ}+emy51)K>2AGWqC{5EpGk$N8=FEo4}L?Ivb^Z%yYZ_5H_y;WZUuR$7e zf=kl_5k^JM1eV^@HYPKdx8-eRmjr>2q{$%R z++Ut;1q=e<0LH;{ATuQ3ZGhQgaTjL5(+IWL9<94R#HWRTwcOk1)vua3kDG({(8|)8 zFc^slZEOx+Th`nfx-WTp)UzC|ZTuB$zw-2Y|wCKulat&N1}*#$Ue*nj~Il z{|spXXMiW&S;{>S*9m3H?WVw8YxrM=>KHmIGjpnczn4tU@$FyM!9w#$JiC>@Nk9@r z#g2QA=e#549_~ll?FRlB`&CFHCPM6*{L~VJCYbYA^JrdwlkwXta>nb-|90#Ft@1y3 z?EZi1=~TgO{H1BP&XjuCLYGS&goDQ}T-_>l_$3iBwjK!0>Ov2BfBZT!L;u&5_hqL) zUH;eg|5v6=%w2Z>+)7U+0mvlA`&XHcd_d@5&W}x8s9Y9s^bzqqnmcE>zZ&8ZArD}3 zU+{BNokAmom8-Uc6=zRjDR9Qjs^_bGccTS-~u!2DH0(l-&k-NmtCh=+~!(9!>tw7!R8IhZEJ{lHAzz z09@x|Mt^;aubwcYN;YOLNhV*__ z)%WA6-nTT#W&xP`vwGKR3jpZHDd$AIiTfu7C1Ht=uPer-?v={0{(H^R*Wi6!i;MD{ z(bPS=SCVB!QKK$p_@bVz9+!hH0gv2wAXtAYebZ?TUm7@LDr|aF1ml7cI=M>7gSn{F ziE$(6Aa4QUO+0a5EvvT~RQ^eVSes~CrI7s;p+5SkRpj-f(CcEdzUz#k8g$}4$TTz0 zm5mOix6*H?N(w3#KsXZaJ)Ee(IBBd?Uzy|40w5<+;PE^ASk&H@vX_>_>J+0RQ}fo3z_ZP1ZIpO|p^G0Ozk?ZblhzdarJ3X0)YfOpcg<8Hx8Pub^e!_u{QN;+u1Jh^5}B(Si_ ztv>Aqxc&Wqf#`+X2Q;a(y}aEO1GN|dos)+5>gg9t^egI?1AK@4 z{CpsEBXH#9=PO=w!Bpf|mdw)*ZjBZfCpI6%FZmvh4M?A9}3cUheIr?mItK27MSvuHdk`0lp!Lgj`v- zy4@PnalB9%U>u>N2tave&8C-vtz ziDRg4;te@^^^VGfSGpmkQ?X_-I#cgX`v6!{xFEUQrNX>NANoz%g*c*tW0I5w960id zd{N|o0l2m?dcyWbZpDw!->!elQ&SGPE~HFNm2VT3RK_T-N6T{B65DT_fc&obl|dkM zuWTSOh={Qj77_w1-HM6;%#%+*Aoi-v2*LYg2xt>?I;7W#a4tvjm~4%N7+$r0gZaPg z0-oF0K*EoeXe3&esQuVWzPF?#x%K>u*K*PXPLO#;#Y2yP8NjGLfU>kh)DX{0vl0HI zPF#q=7}{^X#tdc~Ud(n=Iz{!D2|Vb7*ZDb5(((5*OyA)O1=vlVnZ;Mx1uw|k&$Jqc zN;5zVZub9l71~6(MDTMo6o-B1sc?ee4J!cMy$-YNr>u7zlq-I2pR<&Gr!9nJUQ-C3XpS#t$PZgeRy(?A_Gl`gX2 zyH8Ai;<1Jr`xV0WV($;6Rzzy=P_!uqTYHXWkBUC5faWv0G_XAVNJ zXsK?#7bL|<`(kC555o$oD$FS^lXXW)hzziJ`;w0p$~}-+PZjtwzkncRyHIC*ze59a z+%<>%b{F%RMpN3PZlVy7uM-=VnykLTjYd)#I9>RD?domg_1goc&n7l#>sSyHzV44-c*&1i$a(-1K0qsI z#jpLmui34AF3PF_rMmYb;gmcx)cMAH$Cf;m6D%$(dkV}3av6hb zU82398Qw|G&=}AW@)|3<=Mgmyc{ubKI^3|YvOZ(=WxGTG=X=w&Ir~Rf3NGoZd8QcZ zDXqTA3u4Mja2b_A83sbxGvD8cG&>;@Z`UfX#i3s@ngif@#6}9x&+#XU`F#azHtI-N z&_#>+r4`#ZF6*ms*n3Qn7e3SznU_h83xkPluq(hRfigfFE8I-r(YgH)|WML z;MD-yh)kxHq;ia1 zI(yz)38t)LS5z5zG4=|OkD8{AF_-|37KqW4*d z)Nk0#xK7_vgCDSI4^j6X1F6S#3KP?iLZIYSuz@L7-shq7#?DWdX}TK#{|DmL1DuF{ z5ak3*Ce{{aPOQ1r{-yw!pUlw#G&O*86}tc_>+X~*o;K6ioT;jjS$ec~^sQ;?MUUJ> z9oG_Ja?&^CyEw2>H*@RS*1kO~NE|F^I*08BzY{u%zhvIv;$;xfQ4nz7g)5z|Vp*Uk zi}E9XGtI`=ZOh4?%hO@DKuuY3D7PV`G`E0Pj5d{doW`&id#+405w0eGK-riMG#c@q zq3rD6(`lFZ7(vg(dSm>iVUy?Thx&fvSXQ%Ou(xN+ zgJ2U5m&dGAqAw<(YBgWN>R(nYyke(GXc77-eD#EU=T6w(1hP&1q<8fQ_2)i{6KKuF`U-*Gc8ib;kvIb3bpQg(DUCN9n=L z5=7^v&oSXXmg)z3`}t)jrNz?>OIBE%ByltI?}1A?C0W4>bR<7GEnS=Xc=e*+F&`q& z`8ZAm1Jd%9Os?MlNRTsPtaM?vnPhidEM{PQ_wUJehw`MrkuB0vV3DmK>(S&aH&z_( z)LQpE?u1z@v8${s7a{g|?7N`&n|oUtxnP7*k6rc4S^b@suh#;8m{>y-183!Qf9++@ zoRPJ>f&^076M(qFHUN1>RubUg^*n#5prjPM*C#G)+b`qxwNTe1eWYHG0p{YZ?WZ5rz_nX1; zh|bPVAh#-07{ov-!v%|i0^J03-f3Nen>f`Hz8-cW&%b4nSDq4Tm;^z@t0o%N#BpEg zhI~*BF2s=RiI_rJ!Xb?P&cuEQ0TUq^8y2O4P8x8wcq{rfe!!e$rMZ`ltLeN!b19=n zTVa=W8PQ!8i;8hJ!S`r@j9{p3%oWGYzr?tDWJfA*{dL?a=g!q*I=RW`)h{>D2wZgb zIcy+kuG=0|O;2a2nG^6~Vpph-G*g*)SyOphXg6~aSF(`XJ!obVtk<)!2%x{W75QQ- zk~Ondr4${ziQj@H^IymCW4AvBDlQy^nSt;-yQj+uaFlau$r7Fcq_Y-k&`B3(mYqo2 zOeSvWY@MayA1uI9jqO0DV$^Mb<)WahES>WNFO8P;`mnc1**NYhuDKAr1E-i2juC8% zvuW`qkWrAcT;R#~uNhgegH4bl80jpwakM0U6nN|`T=_k4>~C9_&H>YUdz0a#Kn-Q< zu?fYR^F^VE=6@U;A#84QdWi5X6FBCpoYsELNlNwNs(H-}u~dUcpR%1HBx_W$8|&)l zH5j&Jb;TTJ;Ur%B{Z+PwZ>Y0m=IC0%H>^LYAd$#rvQpuT>oYy{hc{k;71a`lh~}8JgOCwxJZJK za&!5Uiiar=eI8bYhhg!;0|pna*)TWeoV%o-7WcLCHW{~X&wSEBY5_NUR~#&~@uW6H z(|Dr!uh1T!N6>hR0g^u@ltbV*&bVSm)PGSzTedt=Q%N6yE!moKq%4JqG6zj<20w=m zje+e3#814}1E3lvuK3XnbRkId&1W#bn4ht#a!afD386O@47+&acI9eG@Wd=|9d1bH zr0-^qLr%A>efezm#+Vg|*QhC6cG@vfDobBcV(GsuHfQRBdsBs%L+KzZo+~!_+7C{d z;s6I|-%#J}L1@=VY@;;Qb2_;|3v!Y{iKovkr}U}4*7wX=pFlU7c=I)e-ce?)O5{MJ zYgIL}{2gL^4#+QNB^MXQ3qB@h!ND0Q(x_yvbn^wZb$&s$3mR4i1@f-p%|wvTg@O~8 zG0$OHX{NW1;e;!+w=25@F~0!sfViGzAVHvW>ti*{?35O|+6Blb5oB>6<)p3J8Nhlq z5|U@j#+YPKEK8$Wd(z`kJzleQ7V|LFfqzM6r4=~Gz;z0ilWUf#d5;7x8Os!pVZ+m( z36?mhkLVm141xR{5x^g>mn7to^>%`JZ-N%}II|G*@ka9!j6*&wpewX^qF-|(@eQ?% zu(0Rme;ShzQJ|*kwJuOVOv)XR9dsN$&EZ(rlocnB)-$*w8*f41;Ziyj7GR0Z3cH$d zP_il>b0tLxU5VSk!a5JLCuW9%II`dp@xHN7ePCe_TnqD>@a3=+97`hmHI0mWv|6=>?1*1;H;wxv3}^cP&;m|B zG5kXQub7;1JNE|!3|#r&C;uO80|#5WAJ@a1`+WYm41kUz{UDmFd;45E8}`jwNV4%4 z#o)AWf2e-$|7jpNJvn3gpFif~)00<@xGkIE@o|8k@bl{Fp8$<)ZHdN^R~t9$IJT5 ze}xhTN`Dv=>SvbLH9Ogja07}MlsI2ul4>k>OdGN)J%V{>8JM|^~1Keg_BE4`yl=0HqYHMrb-4ip}C4>+LDc| ze;)P{rN{G(5Pkdj?^et$vvpI>{ffD`-eAyQO?~jkZQdC=V7K7vJD67pW{Z$#gS6hu=$Nd{ zKa2{%Wa!;7ik93!<2*4TkZaye>H-vuWZ^kfD?`UsXWwyCSoX)I@hmMUC6-uNrxn8} zzFG4XC#BdUs7DIj>W*zPH1Vt1w95+Yz*dg;bbv?KJ{F-)O5zz^GqO&)^H$$CLW`4w z>p+x8TDbQ5-ul$WDEXvPhRuks5|rj!thY()GXoRnjeU&360Bx^1i#2bdNv)q`D}4E zuR6=tFu^cY;okxuv0}He9vC^NxNeA-b+7z~gJJQm+t9=S?53c8QnIO+B_j#J%}3rT zQ_LQ`-g|!F>LfR`0`(tH|Fsul8or@wy(S6fgl%XPk(r3 zKgo!8fO1f0)prTI4HTKd%3UObCsV?$pg|k7h=QH9Y~Ps3gZkxO2O1oU5dUpofN zTUqrC5IEG3UAoxa@)sjWak7KQUP+ud|64{2x&To$oR?4#mNYB1-{t;w*tv&E?xBl` z+RCWks*`iu>O|yl`^3a~nz;7!G)3a6aQ=xG++acAX+<+r=C$jOI*6N{RKb3aUskTK z$f^yZT2DUNRFV*Rm=er87TJpf=NoHdO$Osmcpck~-zAzYE$IG!?W=YrR^N{wI19cUw7d~EqWQn+!>MAHm`>EH32btfW z6O(mmePFitV>sWuZ5e9F#>)Did8Q!thkkk9O~(ro&0AUgMKqU?ebZ(3b%z~g;h>Ye zejh#o=sUSjj}=hnz6aW*#if?_mRH5Ex$3=*h;WI32g0m{u06{j?2x;SwWK~`da~b| z7jB|%-u4|!6b=0PpW-26USuykS4%_OnvrYFx7y{;(&@EW04E8{X;HinrecT5Lf#Y3 z&=i*APTwe2nAtI60{)i^ZhyBNj1QEIS1lVUPBbB4vWQL4^5Vmf@crjBaod8A#l>MK zwrZ3Lk*!A7b-(C}l@U%#@J4IslCcZ1!Kxyn^LutpX}6P%mvQb8mG1iaB5xN}jp*!u z2-AD4tdUDm+|>^Gr1@&ljEixbqvlss21YKHFTHrRrM^H{euH+;kfC@Z=58z-NZ8k2 z*9A5@CJFRbo($z<=hN4Wu(FaCOa4O~@e*Rw`#WP>N5Qf$fihO3eKou`6&m=<^8gW{7o0AmDlOI#Z4itTcYOk&< z2;^MEWrP+`_jlhk3O^H1M2vRn!3NS!PB-JeNKZ|}Az;-Zm_*SoUfU7A_wl?Z)W1DD z4UF5nPD4|xvmem8FKV;gR~z&B{9|1032s?$PKz${77b0}@du@-wg8QM&9Qi&BJf}3 zNtWkdHl&Y`YZ`K|z7-rQP&dSD5wa+C%4n zlX)<_;W}Ka_jD4QSnsJ`=~}jzb9YmoW0Hpq+b()LZnF&+U77siKqH%lR-O~PV5V<#V@GtU)NIeAVCi!of|r?ZDQ)O2U)YnK!RC5W zSz8$$0)8X0V*M$USxu0Rv-u`T^#yUIv)0F13sdhhQ#UkHc4^R&YMGajsy2b%$J&2j zFdXo&>KNAfkIp0V7>ln$66<7bwulJ}Mo8sgr6FdBDt865A%P%0fKcr}u*dH@PwJ85 zuhV7o@RmCYC!6xBn%&ypEj--q8IL8pP=53&()I+eLD%EkkCWcL$!5`eEi*8Zq8RiR z+p64LXMU`50@jhUEao|p+IjxUPq97U=`G&X+j2V|37>b_A`lev3bU4rs*(+ywfLxv zD%?H0XEvFe>g^niXR>KW^^?o(%p1N?xb=4~)>q=`htqmpH8R=%@McP;-ke5|rNi*()lHNJU3Hpd1^Se8PRj zG%OW8li#{`SR$>gevCSSVY%(sNc0E2%D#oSG~YxvRug8w%Doy6Qm|G}wOIa~^F_~C zy8mxPz%gbuw~syQKHa@!8sC*NMm~8NS;cN}M#-KNT%Mr)xEn031+TVn7f?J}Rj@7% z-t6xFc#zQ%|DH&hObeV4`<@z`cdC^s20dzgqiK_G7!?6iu>M&6x|A;qX(sS_V8h8})xI%e6}sq?|jRQ$T)5qL^bfi5rB-;CRkZ8@C_al_tFoqEwWKPp15cjAXAy9`I? zCw6G8#Ve?me(+Gm!abT)T6jfH(dSaPy`kk{ubYxlj_{;bsRaUQS$jC1?xsT;MZ z^ojZ(ehbA9@*U^cH)|=f*A3+ zKJXJ|sUJ+~KYGa-$D(^n*coqKeZNwXjan^DWW@BpJnBYguLt8x(X|cFzb5>Sg z?95Pe{^b;}V;+PFF0 z#w|dBNsx$$tV*b7Re&ma$$XNi*O&}0@m?Bjt_v~$3hKNB88`%$@2!!Wg*VhEi zPIf9!sB6({Q(mkEN$QRf?S>0kjOjn*tW}1!2-O@kG@)pX+++9kxZ2tPP2(llq*GR$ zJp8qqYpnF#Nn|E}k^8oWMB3p>dBQjMJwuIEmu7B?e`R5Vukey;2KPycvBDEqCL7p@ zuhUDFxO>M%rQTa{hi((cg-W0rvAUKaq|2q98-ll&?-E}y_w|7SikrGeeAF?qima@U z|3iK)El^xqHO|H=B>1tBDnGz>?H zPz33l19NWglZF<SQp}}=Izp>H4 zyB;MbE+{TO2(|R;Ouv9U|0W`0?ZtroeKpj*@e;-)M)hd{8pp!e6`wqVGH*Z87go^` zD*fp3LvSObX|S7b(iBn`RPcU(;NH}(Zbf!)=?;~@UsrcWL3ED4le}7% z0k$a&+?ME84XlYx7WM}gBlSvtq{`8s7Lo-<`a`>SlS$EV137ZuYK8qy{DVob^!s>< zYwvIf#PC1$$@3#wpk`ar(@C}bp=qg;eP;E0`V4G#)kMTPEyciqRc|182h)W%1eX`l3j2ah+ck$&`vN z)SUNz?Q=jypTUQ}p;KIwNHUtlKY>;b2ciyKW8vW=11UuyAoE!uL8j$D4?IvSx$R zool5xTk5%}d#|k>I8U~lsU?YqLKt^Z?xhhb_J`*hUsqR;mYt{C-=U?6>+WtydQG=J z!I$PowKlt)nkC}9Di7a-L;Pc@q62(LlhmP+^%Bln2DPy{XVHxHzy`UtaKo9@(ZFRG z;FBF8QfB+0hyuqBuJ#2fAHS(qD|K;F@qVJs zg);EWpjq8Lp8ZKBP`Vtu)Q)W|nyp~jIY-w-G0}?$vm7Dfh7)LSI299BpteyniU7Gn)jfzGZXL{%&w@a;@yER0hxkHu;+Y{>E6m zMGjMNW((=0?ZigOAb&b#(28{A_PAuKm+u_wV+%d#Byy4A#%(s%uu@Z+^%!&3_{@jc zq=u*U7Tz0423*hF#_ydDQr?;8YVkjf0QjQ1CCrVheUb*JtA08068r`eEt~GUlLLp> zW_W!UBcupdt$&z->!NKn_R>!n+$sMUlY^kM8bjS_LwWYS+MfCUs)|}yMRK%;lOi^J0lT1GrnSDtI z@{?}PXNr1NH4s-!ALdTJKuoNym0wsXP^Y4eOCw1Sceh!}B{y=;R=-|WP{?~I>c32` zp-EWPZRAR`cY!S=N@iMitNP(`zp2vDSnYhxE5tq>U9$?7t?_^6jqW%eh=z;LqcxXz znynhLB2)}?Em6j;rv6f#htxZ7(EK@T%swZxiwiQj5pPqg7lN6iM>f?}_(dX9RH>2V ze$DlxOd-D$)&HXt?O-2XbW`HaEtM9ss)xdL}4#;WIltpm4wiNqb%Mgl+NGXo; z#bG#eQj4h>XBy$Oao;GS5h^Ra*Adrd@TpS60#!EnF1q`;i^ zzhg&hg2$fiIIn)Ky%LkLcPGa_^51?^ixI;QL2DJbSIN%MYUI?yi2${oi*{|Q^^XVS z9_4VhjYYs!$*A)pqIT;wmr8Ju3t)dpA5Rd23;PckksEb~8IC<`+WWD^JG)dVG_l)cRXFlvN{s|d%>eGTy%)5D&46K%9@~IsRr z(66`vT77D0zXNrotiziQ;`F(%!?qe362V@>gbAX|-OMaUs6EJnT25e;{76*)%Xg+f_BF6y+8t~t}dBfW9T5rO~gt5HENKZ>WgRA1DxZRuP(5abs z=vaY&I0QAfCQWA?kDfK!3T{QKH~Y%*?wT-hOYuO7d(C0&znzVa~0>~%zhl5DTpsD`|m)qK8M z0W4KPLB@4(eX3O1F&Y#E=3~8_uUY6`EBZ*(#KB=rK7|wXf2bj(%i#o4j74b*FeUrm?=r z+a$ku)Yq`cTMrG7>?dhk!=B{>Yen7=_XXV8R7$dMkalNN#DC}l-rU)Xy=_+@kokD! zkInn#FyLtx&sUnC$CL=)k-Y6O0T$m$&!z zb(9&lNrd8v`3x?;J~Yr=cmtbzab=WUS>M0!GA>01Y-vE1!H_>r;fLCBfo9S-BbmzO z$Gy>^>*)wN7sL!k@lRcU{B7vo@tf2EeD=lScg(NT>pw34ubcqR5XTgy<^PF~gr|2H zi(#*m`@gtJ#{Q+zSjfL1H)H(`%#S zNjLki%j^;MmU-!&`6WP7o;C$^=6eV@juhz@ee;^9es)``@!p>=dx?_PHojB9fY2?n zBd)Q~bs?QdP{6sEudBH7*8rrvUjo*P@&40!XYh*chgN{oPkH4peeT5S#VC9U`11R% zg$?>6Wvicuyu3X0^dM;^mxyOkUgW`-)wdo|zP5~BPOQB02hMP2MG2ale_+}6s{31I za*5&9M#ltr;i(I8v}UCTa}F{Lc-i8YR(b#jDDQ2@qeKhdUZG4G&-$P3vG(Wf&opEk z{2#=9cT`i`w=N!!auk6B3MdF@RFE3!(p5qiP^z?`^p5lrij5{sq<4{CBfSPxdhdi5 z5JE3OI!Q?KHY%R;?)&}o?z>~WjKLUWXP33tnrrU0=l9KT=C)RQ`meMCEh-9*Q_3>V zYhrP#07i)M$A_CfWY1xZdbFz0?>Ay+U{hEUYp21!wjD6q2uvnBvf2-TUK&9${gI%s zFz2Q7HtXa4{U0+{;t4>l4eT<9ZN< zhpH#r$PCz{dr$axz{X|r|Np#$V`TkFSXfvu?{BO4fV_0xzfAeEhk@`xDI2tMDunFU zk_!OP@e|F4nMR1YS2b}wTctE7I*epwcm96znpai;|9UyLU;$eBp&(s1>7V1iO=Nz` zur-Hu)l^Xa1>m>idUe2n{u)k~pLOMd9(iGtP&A~Rr`njOazWY$n!p3eAEfr>l4&>VY%< z9i~ohl!T_*_s?WxUJGsER-E=0cV4_{(<7f^lP5L~j_Fyj;Ki*s^%V1Fad$6198D zh65G7uE14UL{F~toUfTxa^AWo?)&C@Xk=f2pSxK_{;^oT4x8TUlmva!xWNGlRj> zC!4xHvx{D$!&|kbz^PLVf8R+p#=yNsN87ceO!>Kw_sJTO?AvM`#tFQ?TYLWS-N~sw zkGP{2{zdLl{GXm=)vYn+g?beNlk4mJxH*GP_%L3dm2B0D+t=fRV9UVhh(D87Y3TToFXuSlAkM#$l(gt?zDe)=r#@csF7 zT~*EF$O!4{F_d+uDi1)qEhIEtERktY=qDHA;t?^WVYMqX^l@Zajw>z9uuxz$nm*g7 zoX@*(04F@=L@rs6!%@#z~>No(LsBUkY|d7GpG z;-!O*5_C37c+4=zQq(-Z@w*{5@<55AuabGgdGioP55syC=rIe8#QABJqKkH-j1mNW zoT}9J2B{C0SeH9ccjH7woZTJU-@9fo^LdUSgSYBbL*hG?3c?6d_?DwSx8%eI@2s?Z z^Z|uC;%Et+|CXx<#1F5E(qU26!($^r*`3?(Lv)@jf7U8@*sBYKWC9ghTDj*ud4_j% zWj$jQZ(6R7GPWr!gNL0Brw#}XWSAl8V$Jj<#+$x$Ss<)G4LY3`7Xj?#{Aa< z?riD~9i0{fr$g%VF1p^G2sC?rfEP8BRJ~FD9{^8OCB623-e4s@V+i+a%NgzI2ZXFS6P4n1Fi`f8y zN;?(lu5FQZ*;LguzDmaYpw?m`!0sGVrCn&!5GjhOQBgTYcL#$J3_WM?{?oI`C>oz6 zpJOx~*yUC|UCyS>OFKGymgfYnQ#+NnY{z0hJwtzdWDy4fje0?i__ZOXp%KWDhFUo! zI<=h(hjGoa>?+OLfh(a?DrC7=7%jDb4)NzbTi0$sNO!#JuEUH`$4$=~zmRx7o1RWB zf)?Fb2$AoDprCN&k%5EtC7OZ!L}`njT$sbri2I0<9*H@$-tpP`U#uy1-X@5=`;hce z;kL*hFnsLtk<4f@oV-<~PG$0{rw~-krcfw2$hg1%i%h`ntdjeXRh31rD7y!BdA2P< zLBmP-H8kN1XAzH%0^=bpU#NMN#5c83zD>k;i(x)SutMOh=rz(T!bGM(4 z?t_JW?YK00XV)mOk8pPbtdWBnC7jzZWyRV*2K1LnS}={zJp|~qYsgfWBkux>+Tn&G zO=M&Si-*r(a|!UWzwwQ4mSx)~ENF7KnkFMr2Ckf=-1xQ+-;0}^df`ajKzmtYP;A=A zDS75VEp9mI&GaH>RB-ADJ=(-+1G=a$21}mJfHEP2N z4LQHR(Y^0!J3HJU*c-1zS)$f$WS3hiZZ7fusCz{Ba*ujBn4?dc(@8x7nc`g9`7sn3 zRhDe%kDy}FmhZd7!)=aE@Rm_`l7ku^yk@pm8per&jK@;6?ft3�cx0MB1GfU1X~Y ztE-V1 zTlJ%i2GD(H--rk|(p9)9rj*zd*V|=`d{8?-aN*Yn(+-q~>9M0&G-o~4z~;ie^CBs& z6FWmsJ8TN8m>vEf$N+jsY0VmH>+&asNX=Cj9;>+8Ti>iX%*k@Q6j4TQ2T?daw>Il- z0XI{fw$RD0QhO(RQe8RGwI^`h+<_sgmfDIrOmF?dLd%`+a#Y|b3`pbKomCs^pR9H3ZZmr|e z^PTZglx;1VZgYVbrRj96=^JaXQZcgx5$WE$CiDY`{t$^W%qbFjgH_v;ysuoxT9v7i zXw^15$QflR;r;5>*F~O?znIa})}dLr<=*BL*baqFHwg4vUM+1N+xJI86^EObkra0I zJ-Cwn7E5m)U)NJLl0I9Xex)UjXOv5i;dJ=`e1-{XTz8))@~2llo(HwOMz#^q;&z=T+2tMc?VrhM%*?g^_@$gN!+WS50KAVCMrem ztJo#INv|^1+`4hn2QIKqB8h0+@tQiRXVGjxt&rsN3T;#0(7)-bbsLf4@ZH+i@fW#( zG&jBr*I@O$V1Et)f#f}&c`lkYI(p}uluJ<1k)5BJ-40Dvk`IX1kEk#eR;8yy(9G35 zALkjLbD^$rlskU1c3fHDD7zm3>Z2UCi}U$|4;h_Di#S0m8N9wyV}uSK4A1*hG4wj__K9L4_Ni=YtQbdD^!#;5VlGZRdMy z4@X{-5md19rT`YZuoW;k9XGw9*?_A?Ols`gt9ka|ww96lMi|sI+wN*yz6)B0Eg;}c zK)A40qO2S>hbnO+KO%y31ju9u1jj3H52`ohVV0k{?mmPSbK7(cf$gvpmn*$Y05*Ys z8YLeAsP2x#W*Xoda%+=CkoENffZ#tCy1F0Tc(eY9%XKvJXpS+XV|b<65X-++rN2%1 zh}KY_A)tr8WY@oD(vfVec-?@ zA{3&Iyv6VqU`Gh`T?4o*sip3)&Ll4|4Tpg3wBGQlc=BaB&i7?|hCm)ypi?DVPH%8U z>fq9~{3KX+&y;%q7v`S}$CDTZr~UUByjmtdVNMB?wq#3~WJt?))oiy^*e=7x(Eb7k;enPNq z=D~b)I*2g9b`vnhoD-!BA4+s$j9?loM>1?G6bY9^n?ucRV zlkIuhwQx&j$wI1Bs|ZaCJ5@nVg{PY9+jSDM%{F?P1`j_exLQB3F5#&u><~Y!(4F6J z>5{3}1nH$I4v?^&SJfhiZW+q4rSNxTF4)9QZQS7O394h&yCVz9cgM7s`e;KN2jwkg zGdssz+P#a*4Q`rFzk1P7^~_4#Cr5rplAJ?eU6mo>qRos8-H(v@r8u|piDxy}$FF@f za!@HNOdM5}7->P@o=Qo+0Z^9QBuyJ|bOWbAvbPL#%I-hjtgy0MFJhU_AX(zYKaT&5 z5`As~T*%kbW%kvzwK&O3F_lY0y#ZMpSeJ2`#{jzi1{&TZ78ovY+rtzxIg=x-xz2TX z1Z%mLB{&hDFs~knx8q=3Q>mOW7kL}>fNy!1DNrgeMHnyT0FT|jlhtNA6C^^urk&O< zI=Lz0xR)~C=-g;NZByl*qDjZXm2yx=IV2wv)ao#LI>bWsRQ%-i`5o<74GpFaA4n zvdP{-H@}xOV;`qR`5tyV$5^fpIaMc|Q>qbd=D8fv^@kv6j`=E$kJ%R)i(b<#^Ca2U z)lGX{H+2SnAwQ1i`gIZj&kk6+fIzY&?np&+VaC*HR%oR`Ob(MV{ff_}1wI?c zYMwu~l3nCq%+o8n`Ig+WeZRLtO?9qLIjTf@(=t_boi}XQl=Vn&9&<%Q66Xs~Sp(h^ z?&pV_E(9rfeLdPIMJRt!GXp{GsA2)4nnJ-X5&cUs(QjR*F2tq9@QyD0T~!W)XJ#5? zyG(#DiwR`fQ6h#3lB(w-&hgupP+2yrZk1aT#VrMX){eE^?wyUtu8fD zvAxC4#KZ)kjRXL6OnHoU1?0^N+FZ8U)QEtXf?kFCb>sZ1q7TB5p^jQ}j7)$aLDR^F zmgcb~O0br-ip3M=raeL85{IRyn-x~IF+2KFVT-UMW>U3WS@QnGa5`zLm=UV;cRTP_uUPGM#g3m!_@s1v#b_2n#kb$%p3TT^@qNx zx{z0`;UzN?w!68HXd*w(2_SA0CXzd+>jRtQT}tKk{PT-9rN7K4|Ju4Y|HKEMxO0F;<~Ze?0ZLO=3U~m_UthB20zUqAQO=3@T%b;$za0A*{L~oY2g~}Pfl|5A7mgAO)X}LrSf;xY zN6P`i9&;s!E-q4p{m`%lk`X!*t?b6DSH^R+p~8l~wpcW1n^<{}3Nx<=0#d<&jHz%9 zKjKm99=jblM#LNaJkew<7&_c5P=Ov16B;YKw9dOg#z~Nb3@>c34XFNti@l?bh3K zTX!LMRm}V2>mqjjp_KQo!ac=GOLKyL|I;>u<<)$p5I-ryo42iHGb4gL0H~j%^LWuW zUIvcXw3LoaepR^6k7!^Z7&j4KLC#+|=A9Bs9ki2#E<2ZLh_g<1!7e~cMYI>$GU?;A z6~B|=$$ti`UZj@D8Ue(B377GkAaH9jUF1xwkC&ZW(7WloULIi30Foq zQ+bm^|FSI*9#F3+T5p@kW>`(~p-1rN-_pGz>Viu`pOzPk8R&&3XTEgJ?2Y|LPQ^T7R>~IyQrFc* zI(;`z$=kc#cG>37F3&AF7;Gap>I_Ibc1zEOXE2x}~FoWbuDYF~Y)WPQag-vEi zDt%j^xz7-HI0^z5;A4w_S}J)we5Y`K8(l!D#Us;2SrEsz;C5H}yU+0hPCr&inuexC zD#Oj@bZoRBT$4izcN-CuS!^R|XTN=KqJ-~VXn(K;r+HSR%)6_kMB|XQ`12W{nLAOy3 zOoZpS_AU!BlPp?P9rgZb7<$M_Osh~z(Srz?_Q#7m=rQ`D{f8L3x>jW~_OJ=|x$e#w z*YwIm2hiJaUg2w(Hi>t;Zg@9w_hACv6f>L|XgGKp`=?5t_4anIc0I&sSOi*?mUSDk zbbE5&)kJ!a#mDU?NzZrtEEMIIsK|$JOh&FX+R`!t=Ae57$^c&@V9Vdm%wfyUdgc)+*Q-P>pRwwq@M75* zHDt(DQ{~o6F`P0<>3#z>rwsCQA&_^#I=K!CRVUC#Uj~cg?hk#1aEjAkt8fSPDo&LQ zf6BVIUCCCHsYDZu^w4?9N70m7KZwPAMBZXEH9cU_-t&u)L@KE$43h^b6kT_9bt}== zvHbOtcgNGMt(uPCI;2NNWEjl{mK&!-b-C-@(#&9p=1kSW<7hTcKE6@;%$qPBUqsMl)RDlTn&(SkC#6JL2xA}1)M#nD`Xe$+ zbTfeN@3c}AtAnAhBI{kKVt3JULD7Q=hNTNZYK{$FyS3%fheZ0ibv(<`&06h(8OKi% zlRvd3Uo0J3bqX|kVB^FO>K`t<-r#tcoo=+b+FCYlyRoqWK%=JohBZ!RDm~)(212`* zJR5-!Hp_GCJNYJ1i%||T-#ODu^iuXQGO@EO%BD~8U6c&gxRK+!c0oYsC=0++7jo8) zS}p6>SG3y`KedDqT0>W2?Z=Cg?QVDQ;)lo+ORS65EQV+){$Owa{Pv0-LXtGCX=d*& zCcTXnVGUWei^~Xa4PuF96DQrDDqfk_UjZ|7*z<*0{_*N7ec=TG3}<0HU76`Xy}o@C z3*9OhM7N@-Up!be==^oGmU^eOG(4ryPAFu#^^9B(!X0~q;sJA>B356`Ud*X&bhly$ zr=M4xE(j)UBwrMgqd`?$?XV`>^)5qWg5vw=5LHW`)3FM1^!w}@D)2aC)2#N2X_7XTy?oRxv zLYXA7wI;@lpu--HvH$o{|uHo_OY~v~;2!IR`T`edm}u&kM|mxy;$_nb8zE zEbo58QnRylyH0#6Ix1qXN~v}2ZO$Gn)d(|>yg&URNN^#ixFfm2WV2AisPxCiv{(W(ohQQJ(H-DoVma?%@>T9aB6A0_42>6lkRK~-?K!fZSkH8nbFx@pd+ zz$ddZ;RHr^AcEn(*n6wtF@!y>(7Qz~R!ka=Z)XTf-t7wGu@^FAcS?$w+*0E4iBq|doa6^F_JqOW*58IN7lZf zRC$8=eAApoPP@eJu&k^!&pxFfCRbo3Q?jmEV@~Wm^9Fi3x64d4>44C}?zA8&C3-6n zn`HbO^?aK94n`@IC3n%oE+l|MlQAYu?v}7VY$>t;;HCIiRkcp)+Y005jP^}@T51`= z@Oy9@&Q>0mo%hh>B@F1SJVP0$Kr2eNS{KeGSG(GI$v~d_iNm~LHSw0=- z(x1&Jx-b`DpK*>$=d;vBN4P$$I=!y@2fV#ppWj8Pf%qd@=~gh3%u^F%79pm^sa0s- zN*+ds`6adsobc&)Da*p3k)B6+nu0LOcY(l51W^h|eOJ$$bD3qBNTdL#y1Ad|zLJr> ze(bTe>tDHe*RWbCtF+2H(sxh!8Gpi*VN?I7aa-Y}A_M|;%qWT7vFll3kWO4*Xnbzq zk=qBUc(8!^rC1AG75A(}w}4l~yZ?z}dXZ<-n@!cNZJi%@K8;UJ-DjE}XD9GVXhR|3 zlC3LpjQm^Qg3vlk6N%iy1$70#`)N5)FFkRU?(c#mnjDHMb|`1e+bk;b_8})Qjg`ak zr%JDA-){rF&N@!|4O?HfS}%z;(O}1K2kiR++!PZs+y^)JQ!)TIWz1IC|G~2e$;rt{ z;0L{;N>?ZMmnS1dhETuz*v-j%etJUgOhhN5c}2ep>wY@#94`8%d5zKYDaH)B;r_YrH{c%%f-t@QR#Fqk+<3xiCVP?e zW)Ste-DG4kFMf&k9yclV|NobJ8<;-=;(E-T-G4Wk4EZqqQFYy$fi@FoFl=t2{ole3 z?MNOBh)~PM5)Tul1(W|vxFO@EgEV2(zxTU?D(91Wv{J8$Ig)v>cdB+66JqHZLSN58n5VQ1w*^`QskCxbytp?KQ)y zz!uO;i198|MJUy=qe*^Zt9?9+|K*I-iXQ3OYX8d_ZN>baoIjkHwL8O|-Lu=MrQsBA z>*88V;prD%_AD0#YoCz%>7tDPqkajA1C*qs@SGy&F?e!q)Ej8iYT4)n@E_(M2H}*8eeG01ZcQiZmWLF}2|<;t zdJEVLs5aP>{g-XOpEzVJ!T-}!?1q8LY-<=^EfHnd`gC|f>cCWPC6}A|S4Y2i95h97 zBN6LCWU>Hl9#*!Z{0-{5*k`(={My#rA3r%skR!`@KY=>)Ie0VFUkW-ntvBs~c|giI zy_tLm%5E&2LYi^V3uwGsl`Ea7aS3`D-+ z^|-ES*iQFRMMPjOj&oIIsz(Pvl zaD+p6>xZzpI=hv*k0b+^k@XtfHXEmF@(O#IRQ;4NUCwZjy`ef#R~L;* z+}4N#D|d1_x}S`_M0-m>09Xotgj>SSmGyyz1r10NpKcs5T5kyJBW*%Dtod|#4AZQ$ zr(j(N=I#wi-d(3VqW-9(1t+zbR>V)#1*~~n2ns4PgmAMtx5?J2EZlAd4&q)+uSGp`!SyHIilm1BOU_r8DD zZr{3CT%d3VEtjjHrq{qd@@NfnVu1!3Dr=-0_Fq0~_~GZh*f@Bw`ctoD)ubC7bMwFE zF5|Cc6-Rr=?hnBs5TNQG$v;;-n?n{-E~UYM?_lQvL3cFRR0dqBr1W=|h6f|KWFa?g zanx1*@~V{)2PrAE+Z`82ct8X0I4{==B7O;uNA8=IV#RbHBKiZXLfYB&BUsDIU0K5Q z9IDNXtBfHK7n6E#`NKcFdt@D0tqgX@?g+ssxzK@FQ5|EA2J!kH=-{E^z{)J*j{qYd z*O)rxU-RwG?W$>dy~DOwtlZFjH{zzr;`BJ_oMf`Z*4FDS>FV8{Ed3^XrtNKeyUsm@u;;K%azs%3Tpmr8s}zw!s8NlRP(3IG%_x-E!8E22kJfs1?J25l zDudb&x5AGl6h^Zjuhfr-IaKs2=UMSZFCp9>Lm&g5j{z2k3(+ejqOLgaE^Q3!gdhL+ zNOEJr*LRVcCJMeTST>ocAwG2QI=e%+lKW_`W_2;!3Z;0ux8>(Pi{iigc+m;77C9#G=DZ=aUm9O7f;&h8F^ zEu`$*t%6m;DH=L0D6>3Q9F~%E`Q8RuJ}~Fi+wL=}PtqSZ9b6XK)FYvKXJqWGdW*a3 z2hB5r>RuKjZOfHN!&P^T^C8EXrZ4`B`^rO#aAhWW%c`DLk zKKab#Kb2WsU7y1o7?a-aiBqu!Q0Ed0Bq-z5;2Uwj7HOf3;H*vh$aNpxuDE6r@6rYq z5RW5Mlh|LPCT$SOAKKXkD;BIrJGsgikQ|8lfIjo6a+m?nHujj#VvO0`$`!C zZEel+*L~D26S6c`%JhXbX8)`RsX5MqBP<5LhR%QD&l;M(k!3R;uV?Rz(l$( zgCvCDMTf9dSNYv_{Wc*XKx!wnB`M99_-Qe6*Usj3%apoCb=#xs$Kf6~xJww<;?2c| zop|4OS@8v>Y5G+g-#fk?qBen`PovN?FBPcVL~T!i*xE>dzRd*(`Y`?ch|#^nWv6kX zN@J(kW7Y;?A(71rl4^KlAN3~6NK{IxaqcDg9u0GPT+PwvSl7l0BbLMO#k*7WyGyLe z+CFz-T@#uzDoYZ_0Vuo^HKq$?-^fm&!cV@&7xU@$hbD=xg)i&!(GZvPxfxpUpY*n77ofqI^yxUv{HCkxqHMr(vlU+i8>4U^tuY2ynUA zc;C96FYyzDCTSufhF7wERys>#Z;1v%LF&no_1?N}{%D_r>c^Q9urOt{XS$=M#r<7U zGS>U98=n_xnS5v6X5u6 zfb9;yo&(|@u(@?VX5itdPl`l>_*?o)97k>`XDAJsEZon^o;DU#d;#$LiiU!Mw*6F> zmzqIzfJL|MPGH2UtNK?SEHQtPG0iSThU;Ssydeaa~IN>Zi6&zDDi4K z0cjjwI|lQuacPOHw#CpZjdKDCE^*V-N&KxdM$LP0k{BsfbofQ|5@yUIC$XeuXwQvn^&FC0HS3i9Y!$-OArD(C1K&M_C|Nw>#D_i zGIh(?g4=g-x$c}HIcXe3LzVT`;jn{06Z@tb^J~=YgLR*7ChY7`FZ&k5?Vsa|Dc3!< zubk(CeDwYP!8aQov<%u^GkVubIo`6gngsE>dpM(82A|RBysSRNySj)VHZfg1q3luBE+{EzRWOV8V*Y&TCo7 z#nNSB%o+yS$GB~FwI_#)C67L`Zpn4eA@A$Q1}>2LF3~8dHUiFfRBSmlcsN#I_MpN7 zSV?66DQ3T!JrQa3c*k^Y#e8RJ9HcWoi5K(Zp%Wc% zp{7Xr#waivOrl>-U6 zq--`Knjrl+pr@RUx_fmq-6-d!RNuQdnRJ^Z&ZQ}>UviNCne!*JS!HYXyvxRH6@nzC{918l-estOPP6g>cAJ+K%^WtDiTh&086AMqi%o4#l?_MEu(r1q zCG*}L>F^{7x@ZfKb1^8ayCdmR59l$3IudQ~4 zKvfL9)!lTLE>82>i(uzbF^G(d)p(EKlIFi9)&C^5QMys<=3ykA;beRQxazHoZH(Ye zk>@<+{LLssNyl>XX3pwP!(7cY3fj0GAOf5-hY4&g<2nU-^I3@D{S zgsLXy!);jMr_PK!rWS*$RK9< z(9%zP->=y3RsY)AYX^){u^|l9>xZ3N+IhQsT6pVA%r=a^yY43IRRmydD6lvEcViPu zui^+zzgk=Qr?~C=UO<=O`}U}Z=lDbOu7#m1vL;Qt2QGs0R!10G(L03)g7w{WqU3Mc zdA%H`pNlwAMr(5!JB_#HiY&fw5?vs2jkfMi@MlprnAkt>O-st>kBYMZV240C?y+jz z7wk|?DUD1|fWfZy zk7wu)zg2P=`ax19e2KkRW~0Q<>Jxp58ETp!e(_s##I(>V3$ye7+67yz1(dtMVvMRs zr{OX&iBT@BZRfn6$;G%gR50`anM$LO5@ddO+}C~7nzvJExTnTX5dy!lN_@BPeP2a$ zZU;{CHNt%HsS&M%dSAC9K8$+_dt*@`xrN@nh5Wl?;&uRQqGN zU-tFgvJZ2J#f7N_wF{`(du@!6e1X5pK&Z5eXTVSEQdO!@FVMCgQ3@;N+FCt&=$U>E zzs8la>AiWuxB6*sZMOsIw(~wZp-N8PZQsifhwIBDRc8V+4q;oVUZf($O*w+4#S??= z(CTOh!eNgniFP!B*2gDr)j?#gI6zsecT(0jBo4g2>1UZHC}u5lRnWv8rU0dXavJMc;Wr4+tSO`alY7g8!KOcUH@~6A} zt`NSwHa_3e$L3JUiUr_QOz~RwXR$#0jPDJ}c;t*)wE!^?-Rg2x-AiSE%ZbG~ZvoBB zPMo+ijZC#0w*`U3HVzdw4l&9B6+YPBxHQh_jC~3_?oKXCN;QLihcaZ=;$j2&@?vwd zlGWp`NCFlYops7J3k$3I4XlTSX!D+TV{xc;8nJ>v1b2MP?X1RQUjgv?Q59i5{kx-a zp;%&>q_|qa%+_nGl#IlXX(@FPAMC41Jh$?KpUu10%N4i|Y=epgCs%`nhU0dW43<3f zV=y6NIER7!ZO~z}#C9QZs3_oTO5^1md>@_jQP$RaNukcr4DzOJ#2U*{Hvp_t`e&nM z*)i0l9JZ$3su1>7_uYnGW4K--UQy`?WU|;^@_sb=Vbd#yU9@%A^yHp(s*hH$hCf~) zac|AoaNk6L2`(0PVb}AmrR>8ju)Vh{x{oDE+VDp9V*b?2D^urBxB#aEA%c9kI7C#y2vn>z(>mGbNeRR*-P6l!d@-A#wfKk4yCS=YmsfTz;jG_a|AnA5H1xz&Q3An>o%l zQQomOp{4#XoZIFdvlZ9fXE7ahSI8-GbWcO0$;djq!N9k+*C90f#`%2wR}YSEhwga$ zB9lrk3nrz~F5>P>^TOK6n;G&Wn=@i;C4FE@j4R|Pp#xoncECtxymF)%B^I{s0`J|S z^9xrJUwrsXJIvtKatZVFg(=f~Ac67k(sjJ(Tsw`nkKM+O{ znwWGrcMOXo3>O0T)1s@3LJUgN>={D_qsk(JedWDubCpa-3oU7A`1}r6WvtjkrtfI- z_}hAsPiwFL*Gjo>B;Jun{#MZG3WgNLuxmDn#Yi4_QlAJE!Rnkol3j*AzXkQ zx0mmd%&e5pr2!ShGmY(3r#5~0+#zS=Z4#C#Enfxi+Pkvur`A4tc&4>m*q$zl2 zqL1b2EYskqj;J5aX}X4IMi|55KJRZO2~o!j*(@7)>9VU8#}=jT6`$0LnQEeUyY>#y z*~VOs07 z(3zO%zVCyNEC_#y!!tGME7gsAmsA)+AQ`FO3RW$yD<*Ap(hZ3Ks$rxAFGKk<o9!Iy^)r6P(CM7^5VAsk@3C^FQpy(0gvB)?w>P_K$_h1BJ@v&2pQSs zp7%sz>)ID;4;akmL50`LI_@?^tu@FR;YK+Yy+@N0v5TvBx<9Z#AflJoo z{XesdEX%Y71N5ORc_*k8=f_~;eePeyYXP5&jPQj=HksROdSoq&HmHsa@vNFH+9R)LsFQY;*Xj?RNm!Ig_Kw2>|!Wyhji8oo8EyF#ia>KfQAvoZ8SodNqea zCr@)f7CZ>|<#@;zRcCXVQ8=l`ysVhPC>nwkgJ_LWq9r6 zs`sS1g>6O_0;n0ec*}(x;=iLfphW9qxq0!AV0VVQA3aquw^9b;04N^7({~I0Hwg7t z-A>?K{~Lt5-wdcWlnl?30f6o;5J0Z_$*&^93I2o|_{Nh8>4h9<#(F-)AHvoKB9l6BVQc1L3IX!AaoHE|H1b{y?glr-+36`pzcPj*Al1> zbx&IL;_(|xlfNYZsRgviZ)<-Z^{BS)WDkHh@z;OH`2S~*$puIy-%H1BG5-ifd`tXc z(06biwiq+D`l~ezr%v>{$u47STL7U5Wb9P@wSN&41V51*zE$ld9{{U$A+Fxn zf#ul$bKK|l%;z81=MUQv6=CO(F>JlMpWdBdgr}%&IO&tm3X99T0HX?!iO5nPH@Y38 z1du7p#)BGg3&#WG4+RX-xa1Ne*V|B|6u@hb6=HaQEh>SU1+3Qo*7-X33hm{Rc@2s% z?T*Pv2?*;Ywx|x2&;?4oQUG8;`^{_K`p&qP2ml2KUu9rf6?k0qsHX1TNef>1KCvAI zXfLdS+6I<3(|2a;H0qCjSZ$@d55QFE$#4P#9Dxo~uLjI_15$tWxvIKfg`|N~ys*&* zxGlkp`Ah6Pqd#OKO1`DntuYQn0|se1CN`b{e_+yL3if;OD{=t13#As7YM;_Lzx0x? z))kdiRpKB2c1H?*^i;r@S&$lvv~41{3jEPuQUAz9eEtRi@=(*GPV0*I9+dCo7odYR z6SFmy*#MgW^N&o#)9(*_*iwOB5`op!#SVM{d`r^Bke>YGiTf}AllOxk^e_L@@Ehs> z>9+t^Hd}Y{Y$IcN^UdTNFuy)aI<2bG3+WgNGNBBEoB~w*(N3!vT^vqvai=>OlW*D1 zLkm#Z9xV*-mz}MXwFBAbFn3_{`ikr6@Bd}n$$@0w75*_$vhRQX4}7^iYkgkCe#&of z9m9V%gZH3@T|i0RY~|#Np%3Pvmj`}^w>;abgfP7G6NXY)=ss55Tt58kKPnM1?Fc#A znJX0RZ@^bYT(@7ykxz?W_nv+8eKT~AX|Z%FmuRW_c=_}(WCy53jEoIhOXzs6rx-~$ zf~-8f#`{%gc5}Xd<=9eQr1D#Ltr;0p*N7yI7Vk0b$6de!*{|70!wX&S)pyoVhVA#l zYG3{GK~FOu*VU0l2&^=8>t}~=I*!?5YfBe)9eTaDW&1R|E!fr4U^x+R)dAOTQ!EaR zy9h^W8Z1kReV@K^Ph{= z-R&;D*jRQPGx^0PnD=OOS}j`!zn7t&lJ^HDrC)@Q7cXnZ`ulfX(RMS5o+&%tTjNWo z&}}O5cO4au1sh~k(isahtzbtvKwe~O_4gQE>Z9fvv%?e(R$me0*#|~($GQxwTQdo? z2TJo_#Zw}@WF11ho)g|+ekeKkMvZ_z4(1m-?Xi%;-qx4Z+(uqgMw_%zcCDW}kRZ+- zL-80TU9sILCh5+;DA{xF_yg(PT7S3_faIGvB1IH`VfQi`1It9=(;#skoagN9x<}Q|LX@EX4(c9kTw(vD8pe&BDml14e%|KI9 z5fap5imA4&%DdVUUq~O-BB5C&9;(XfRd#cGmhKs*7(JoOMQs+_QVgXPJL-WldwW+P zU45NV3D@&YH4BRJ+a1W)tV#%H4xSWTuait2Z8JZX|usghD+bdg#1)_2-Uz( zU$aTi^*!cI@gBhaTu+mDi~`a6p6|q6Y4baNUl`H9~*KF#@-@Isx)$8j4=L0@&336#*U z?*E*-Jw-9u2#g8LXp}k*b%6PDV0;YSi>ea08{2ez{X7dwgaB-dG}&Mm|F-_zz@||0 zFSP`4vh;VS6q5}>xT6#Z6p|x1HHtk14mTf&rlFQoBUIPGpa8`E9qtt^5SsEJICD|Cgib3p}s}!a-drmSIp|VG0ic; zXL5E)`Jn3WkBHFq^KH6G7gSQdxSqC{nOanR z4Fuvea!4AnZp>ECSyQSTHaw-H!SiNIlOC?P{_wWG?tNdY_kC69a%cCT@wqOzbxKdqxU0H@glaY&K}|19EgcZsC z6Da!vc8iNf! zQVizGBK_4qAuM3wk(!9!XNn;Fy7@eK0WCP72s?BK=!5m0onIR zFk(+Ld@C@T7E-#XroL^y5BERpEW>8T5Sg;TIE(ol{vwkGfC z8U@F%TL#tO?i#?_fHJsXj)5VImz?eg&=etZCH?#@+) zttD8e=Es2$VapQMUOjwSh@2Y9;rFszMSmpmv(EZD+xS|Tq$)bOuUYbe^^CRrYFz_6 z2MXK%suye+DvZ$|V~m~z&NwT&{hXgN>ndY3NN>)FAE|X>f3>V}ffCb%k!ZIHpQ`KL z;yhOyKuG*(PY7wV}hx!Jl8Z2`0kEtKdK z2v%(M-;ddzzS7XmzI-7S2+K}-T*4_>vE}hd0YUN=OJ`O*_?&HEY~q#PzBMa&ISf8x zXg8EB(5K4#$bKk=tyax1Pwm~zt{rcc#~+L5Q>7;3uq}yvD2NjuRM>vI$=Ht!BJlO< zo_zR^{){kFv@vcWjp~Xggf}`6eI|Ci#<05EJXU}_Do~pFL?HCk?KLbuh+)LxAB=ZRUP=&s0HXw@ghXMsTbZ= zQeB-WYg@FzzU7l06Z%-g>?J(ZUlnp?eLZlcv*(zWrgSDJ80>JCF<~@1*nQd{MpPld zYwd?Nc;NVOq#wT!lGdDQ?9A2N3a^aok6yUNT^{UQGvU8doT33_SI3yI?Yb+aM(bR6 z?|Ej%CX;FsDne+p*qUJ3;uO^zD=zur!ifC_k=>(+HLr>n@2d!SQ4i5KeY*>O`(YFy z!I|`}fSn2ki-@h09kZXk1K@tZ$Gtq=8=ddxIm zGCQ=etQbFVR>VFeR%nAaJ2so_@VjN+B8TRQMMycoW6LWaUO3IN7%Om;%I8Kq6n~A` z0h8PW719sT`|wLL051;aOG8YtB0)O)&*y-;1GM_J>@R??4;nAV$4Q^r(i?j`pGa0O&>m7ltrnL3 zc%;{Yx5)-c0J1C%I?77+LeXnDzlxzGI1o=-nlyZ&5oSu`hV^VCyy{CV+YB5xt$RKe zbyTSeReG|gy$7R_gk{<=%ZDhxJ8~Lgq?zIqv9ESTnt?I^>|7B_ytv?FA^(a?4mc+F zx=~E?(Wnb~Bm>Ul%L{d~A*maX4uV`*LLSoDW)nD8t65>o9v{DtO4R9GFA`yLWq$qJ3*o z0t@mN=@o$jw(GCET`Zd^G5bMsez^&buUPL3eMIR=@onoik(_k>FHVbOVzNx;#Uz9k z@z|t{qmqre6ySTyVIiHc(izV#?YfCK=MSDw{?sv}%tve68c#iExCQm^ygK>i!S%dg zF^mIaEy1r6ZJQeRJwimLX*a&k#`Vgc2UpUuF&!_xRBh;NVzinmPyg{K{2yFKVFTM+h|8D7r7FrjWLa0%=D+Cx_5(6rW$X2c ze;XBO#1^5KvaLc^!TwD1w{8Vo>VP0ffb|cxb(dQfbgxe*K84m9LjgrrH<~OcNx9=+ z5kWuYP8KwQ!KRFH4;)r0vAOb7T@}J1dYVO~;FzOcJHEr5WKEKPUyc5+>-Rv$(z1EZ z73e?1vORRYLbbsrXlWAUbG=3t9gq;+SVgD0=8g%2{}~jk#>aMA1REWlA{1)G^zDLK z-p{wqe2&e_z=Kz2DLrzNe`T_Z3d$NfqbsemUr%7(i8e`^I`f!s4ZTluA|@OT#{=FB0S7#>tQlfbC`bP}Vt$f(|%rK|_yPUVfs46V`EL16{w zl7=s{(V0$-ss|bSOf#=HlVx<3E}hD8HI zijH3~V8DL{N3b|~eZcqy3Kq|m^y~w1(OEz0mqaw?YwvbLphR6_ISfO0_b2CbUWL@Q z-#1+)e(RK%^=~kN6X!koQS$(h6*SDnx}?lNw`aA@MUK`RMg45p0OW#{%zQr@u3ce@B8FlmV)E%osmfl~ganG@_Bz%6buoV*; zM)M7;(2yPZY!=10xC;U+{tR5E!7QLU&{m*OBJ}>4_h@glLJ6)zKLfF zUhese3q3RRCSGoaHoTE@udV!V^u0C+9D5HUt)BKe+#OJnXnw#Ds|Rwun0 z`~6{ya>K5TK*7Z|N+LVL>*q?en3p^oTTVb|Ap($lPEVd6L+SH+?GwIy-BrC{p3z zRT<7Cl!h!n)za(PBexiq%t^{{uk5t$rF1*U5yIR+#WB`dJfmzz?gi)A9mAkH=s+SN zF1l*AqF<1$3P8jZmGH0PM#zLl}GDa(FQ;cAdDt z7<@f2b0rBG{7U#Rr(J0acyw5?lzUQhvH?D^%XQ9?p)3~qS&guL&O9@Fv}(%fjoNV0 z2!LPfZr5^&0F(EpAoyRNM~<8Voc?d#V`2gWWejtikF-zv;ks}w23###HNT0RDv})^ zd{y`q3KNnkGJ(Yx*8Fba<|rF%pHfy;#BCEDLlug>k`6@jQ_R%RVu-s){ zi_@3>F82DXSH!vJ#rSZBqRoZC>WlALtd$y8_rkeTy1VRy0$f)1f^aNHJ~dE@dbZnx zX{%v@qg*{C?5lncZnHDzA!{l=@>vO*Z%vR?8f_wEwzR!+^Z`yB(!|&#@;eWT@8D4>YKC|mEGI=1NtVWl8gmLo$g#t&3O}2Fy$+#_QFu${iRpFMei*AR;(YB zDvV0*C?dUySXAIHHD-k;(nkH~Z2Agfnh{kS z9zvhWgR41r?a2Svzben&0q8JmOHa}b(&y!x2 zs?6^18LsnPjvRnkhs|ZUT8HF~GvaLbh%or6fyH%`u^>mLkn7&l1dQ#(-Vhwo!0ZOv z1@>XJxF*krKEd@mcmea?~D~(W-mv|d2BZ=@kK>G!+Sa}=mOC1{DilSQ{1sPbA8a!PAvVgC0j4uL z0+fK!?UU7Rqi}T*i$fA>T@Xx)QTLy>E~UHN!9|+C>s+6h7-{wtjQC#VW?W;avYnS} zYC}FNC=66h$W<{DTWjk`p*FpLK%<(nPUN8{s5@0}fsNJbmSfk3CpY$*n%GimU1GKxI*ge@uD(jT*arX7Vf2Mnddv&vhe{{h$sHc$; z2QKfT3ge^#g-K*ha$%UEWnDjc5gEvw*;*0An_emMu*}S%jO&eFamNNM42ipDpL&&A zeR2?y$ z`MIgm$>rI4lCFaItfTns273|hkdQj8>TJ-%@1~To7ijM~!wloPclH(q^`SGEbWCux zu90;xoo3JK&YR>=bvfeCmV7Vz{jq>$hE~qNK|GH8Hl0^F*(ri9*!Y0t5Sm%*(%0u{ zp?dITylXx8)qswtzi!*{W2YeT2RCrLJAK%LSJ6*b4(RudGmj2Il*5}tDF5g4-w9Zk zSI7zh!SD&EU)xV%A=C8)bmH;<_8H(oiOL9dMwUbm%cY>KAR+F^l7lWpvw>!Aonq~3 zUZ!Q_th7RwfHpCBa;V*&SB1dAj=ql8sSvQ+5YfIymLs`H*Q6Zof0)qkAP$F7`r7b8 z;6EuzUGq}zO6r-rI_TgDH!Ad?i}cD|h>o`WhbJF`3#z1YSBzoF!g_smsqRtmz?$HL zkk%93z|BX5v2Pz_^*B*%v1K8grPn*r&I4Nw6N*Di4-XoW0t`UTN+EH0bQhX%N~0Hh zg1+4=+l{{O?@#GzTFGgr*90H<`#;gGTHkDX8#aza##Kv*n>6*K>_q^gJ#alfQ3b!g ze;uO+V{K4OBr1eNQOBZsA9ilEvWRv|h?pZ!9vmKzrmD{cwecEK&U3Bp`euch&#S0M zK#6$6QrXu{7OGRPx#0BEM;DPjZnC@y1>wtN#g7g1H4uh=|GFl!w}VvDN?c_Ek=R<&omQ+{|3IZl4>V&g!?aoE$ZWK>9%kLP zvANHjgN|ZPH?be|!44f+`UgyLEF4nzFEN)c_^$O*`{BZHg`+vN)hCa0Nmff@e0@YI?KL7v# literal 0 HcmV?d00001 diff --git a/sensleak-rs/examples/test_gitleaks.toml b/sensleak-rs/examples/test_gitleaks.toml new file mode 100644 index 00000000..c1f60887 --- /dev/null +++ b/sensleak-rs/examples/test_gitleaks.toml @@ -0,0 +1,56 @@ +[[rules]] +description = "11111111111" +id = "stripe-access-token" +keywords = ["adafruit"] +regex = '''(?i)(?:adafruit)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' + +[[rules]] +description = "Generic API Key" +entropy = 3.5 +id = "generic-api-key" +keywords = [ + "key", + "api", + "token", + "secret", + "client", + "passwd", + "password", + "auth", + "access", +] +regex = '''(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-z\-_.=]{10,150})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 + +[[rules]] +description = "Digit" +id = "key" +keywords = ["1212"] +regex = '\d+' +secretGroup = 1 + +[rules.allowlist] +regexTarget = "line" +stopwords = ["token"] + +[[rules]] +description = "Adafruit API Key" +id = "adafruit-api-key" +keywords = ["adafruit"] +regex = '''(?i)(?:adafruit)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' + +[[rules]] +description = "Adafruit API Key" +id = "adafruit-api-key" +keywords = ["adafruit"] +regex = '''(?i)(?:adafruit)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' + +[allowlist] +description = "global allow lists" +paths = [ + "(.*?)(jpg|gif|doc|docx|zip|xls|pdf|bin|svg|socket)$", + "gradle.lockfile", + "node_modules", + "package-lock.json", +] +stopwords = ["token"] diff --git a/sensleak-rs/gitleaks.toml b/sensleak-rs/gitleaks.toml new file mode 100644 index 00000000..71f886df --- /dev/null +++ b/sensleak-rs/gitleaks.toml @@ -0,0 +1,2805 @@ +# Use the [gitleaks configuration](https://github.com/gitleaks/gitleaks#configuration) in this project. +# The difference is that in this project, the paths need to start with a "/". + + +# This is the default gitleaks configuration file. +# Rules and allowlists are defined within this file. +# Rules instruct gitleaks on what should be considered a secret. +# Allowlists instruct gitleaks on what is allowed, i.e. not a secret. +# +title = "gitleaks config" + +[allowlist] +description = "global allow lists" +# The paths need to start with a "/". +paths = [ + + '''(.*?)(jpg|gif|doc|docx|zip|xls|pdf|bin|svg|socket)$''', + '''/gradle.lockfile''', + '''/node_modules''', + '''/package-lock.json''', + '''/pnpm-lock.yaml''', + '''/Database.refactorlog''', + '''/vendor''', + '''/README.md''', + '''/gitleaks.toml''', + '''/.gitleaks.toml''', + +] + +[[rules]] +description = "Adafruit API Key" +id = "adafruit-api-key" +regex = '''(?i)(?:adafruit)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "adafruit", +] + +[[rules]] +description = "Adobe Client ID (OAuth Web)" +id = "adobe-client-id" +regex = '''(?i)(?:adobe)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "adobe", +] + +[[rules]] +description = "Adobe Client Secret" +id = "adobe-client-secret" +regex = '''(?i)\b((p8e-)(?i)[a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "p8e-", +] + +[[rules]] +description = "Age secret key" +id = "age secret key" +regex = '''AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}''' +keywords = [ + "age-secret-key-1", +] + +[[rules]] +description = "Airtable API Key" +id = "airtable-api-key" +regex = '''(?i)(?:airtable)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{17})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "airtable", +] + +[[rules]] +description = "Algolia API Key" +id = "algolia-api-key" +regex = '''(?i)(?:algolia)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "algolia", +] + +[[rules]] +description = "Alibaba AccessKey ID" +id = "alibaba-access-key-id" +regex = '''(?i)\b((LTAI)(?i)[a-z0-9]{20})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "ltai", +] + +[[rules]] +description = "Alibaba Secret Key" +id = "alibaba-secret-key" +regex = '''(?i)(?:alibaba)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{30})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "alibaba", +] + +[[rules]] +description = "Asana Client ID" +id = "asana-client-id" +regex = '''(?i)(?:asana)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9]{16})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "asana", +] + +[[rules]] +description = "Asana Client Secret" +id = "asana-client-secret" +regex = '''(?i)(?:asana)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "asana", +] + +[[rules]] +description = "Atlassian API token" +id = "atlassian-api-token" +regex = '''(?i)(?:atlassian|confluence|jira)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{24})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "atlassian","confluence","jira", +] + +[[rules]] +description = "Authress Service Client Access Key" +id = "authress-service-client-access-key" +regex = '''(?i)\b((?:sc|ext|scauth|authress)_[a-z0-9]{5,30}\.[a-z0-9]{4,6}\.acc_[a-z0-9-]{10,32}\.[a-z0-9+/_=-]{30,120})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sc_","ext_","scauth_","authress_", +] + +[[rules]] +description = "AWS" +id = "aws-access-token" +regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}''' +keywords = [ + "akia","agpa","aida","aroa","aipa","anpa","anva","asia", +] + +[[rules]] +description = "Beamer API token" +id = "beamer-api-token" +regex = '''(?i)(?:beamer)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(b_[a-z0-9=_\-]{44})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "beamer", +] + +[[rules]] +description = "Bitbucket Client ID" +id = "bitbucket-client-id" +regex = '''(?i)(?:bitbucket)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "bitbucket", +] + +[[rules]] +description = "Bitbucket Client Secret" +id = "bitbucket-client-secret" +regex = '''(?i)(?:bitbucket)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "bitbucket", +] + +[[rules]] +description = "Bittrex Access Key" +id = "bittrex-access-key" +regex = '''(?i)(?:bittrex)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "bittrex", +] + +[[rules]] +description = "Bittrex Secret Key" +id = "bittrex-secret-key" +regex = '''(?i)(?:bittrex)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "bittrex", +] + +[[rules]] +description = "Clojars API token" +id = "clojars-api-token" +regex = '''(?i)(CLOJARS_)[a-z0-9]{60}''' +keywords = [ + "clojars", +] + +[[rules]] +description = "Codecov Access Token" +id = "codecov-access-token" +regex = '''(?i)(?:codecov)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "codecov", +] + +[[rules]] +description = "Coinbase Access Token" +id = "coinbase-access-token" +regex = '''(?i)(?:coinbase)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "coinbase", +] + +[[rules]] +description = "Confluent Access Token" +id = "confluent-access-token" +regex = '''(?i)(?:confluent)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{16})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "confluent", +] + +[[rules]] +description = "Confluent Secret Key" +id = "confluent-secret-key" +regex = '''(?i)(?:confluent)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "confluent", +] + +[[rules]] +description = "Contentful delivery API token" +id = "contentful-delivery-api-token" +regex = '''(?i)(?:contentful)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{43})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "contentful", +] + +[[rules]] +description = "Databricks API token" +id = "databricks-api-token" +regex = '''(?i)\b(dapi[a-h0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "dapi", +] + +[[rules]] +description = "Datadog Access Token" +id = "datadog-access-token" +regex = '''(?i)(?:datadog)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "datadog", +] + +[[rules]] +description = "Defined Networking API token" +id = "defined-networking-api-token" +regex = '''(?i)(?:dnkey)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(dnkey-[a-z0-9=_\-]{26}-[a-z0-9=_\-]{52})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "dnkey", +] + +[[rules]] +description = "DigitalOcean OAuth Access Token" +id = "digitalocean-access-token" +regex = '''(?i)\b(doo_v1_[a-f0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "doo_v1_", +] + +[[rules]] +description = "DigitalOcean Personal Access Token" +id = "digitalocean-pat" +regex = '''(?i)\b(dop_v1_[a-f0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "dop_v1_", +] + +[[rules]] +description = "DigitalOcean OAuth Refresh Token" +id = "digitalocean-refresh-token" +regex = '''(?i)\b(dor_v1_[a-f0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "dor_v1_", +] + +[[rules]] +description = "Discord API key" +id = "discord-api-token" +regex = '''(?i)(?:discord)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "discord", +] + +[[rules]] +description = "Discord client ID" +id = "discord-client-id" +regex = '''(?i)(?:discord)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9]{18})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "discord", +] + +[[rules]] +description = "Discord client secret" +id = "discord-client-secret" +regex = '''(?i)(?:discord)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "discord", +] + +[[rules]] +description = "Doppler API token" +id = "doppler-api-token" +regex = '''(dp\.pt\.)(?i)[a-z0-9]{43}''' +keywords = [ + "doppler", +] + +[[rules]] +description = "Droneci Access Token" +id = "droneci-access-token" +regex = '''(?i)(?:droneci)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "droneci", +] + +[[rules]] +description = "Dropbox API secret" +id = "dropbox-api-token" +regex = '''(?i)(?:dropbox)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{15})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "dropbox", +] + +[[rules]] +description = "Dropbox long lived API token" +id = "dropbox-long-lived-api-token" +regex = '''(?i)(?:dropbox)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{11}(AAAAAAAAAA)[a-z0-9\-_=]{43})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "dropbox", +] + +[[rules]] +description = "Dropbox short lived API token" +id = "dropbox-short-lived-api-token" +regex = '''(?i)(?:dropbox)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(sl\.[a-z0-9\-=_]{135})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "dropbox", +] + +[[rules]] +description = "Duffel API token" +id = "duffel-api-token" +regex = '''duffel_(test|live)_(?i)[a-z0-9_\-=]{43}''' +keywords = [ + "duffel", +] + +[[rules]] +description = "Dynatrace API token" +id = "dynatrace-api-token" +regex = '''dt0c01\.(?i)[a-z0-9]{24}\.[a-z0-9]{64}''' +keywords = [ + "dynatrace", +] + +[[rules]] +description = "EasyPost API token" +id = "easypost-api-token" +regex = '''\bEZAK(?i)[a-z0-9]{54}''' +keywords = [ + "ezak", +] + +[[rules]] +description = "EasyPost test API token" +id = "easypost-test-api-token" +regex = '''\bEZTK(?i)[a-z0-9]{54}''' +keywords = [ + "eztk", +] + +[[rules]] +description = "Etsy Access Token" +id = "etsy-access-token" +regex = '''(?i)(?:etsy)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{24})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "etsy", +] + +[[rules]] +description = "Facebook Access Token" +id = "facebook" +regex = '''(?i)(?:facebook)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "facebook", +] + +[[rules]] +description = "Fastly API key" +id = "fastly-api-token" +regex = '''(?i)(?:fastly)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "fastly", +] + +[[rules]] +description = "Finicity API token" +id = "finicity-api-token" +regex = '''(?i)(?:finicity)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "finicity", +] + +[[rules]] +description = "Finicity Client Secret" +id = "finicity-client-secret" +regex = '''(?i)(?:finicity)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{20})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "finicity", +] + +[[rules]] +description = "Finnhub Access Token" +id = "finnhub-access-token" +regex = '''(?i)(?:finnhub)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{20})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "finnhub", +] + +[[rules]] +description = "Flickr Access Token" +id = "flickr-access-token" +regex = '''(?i)(?:flickr)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "flickr", +] + +[[rules]] +description = "Flutterwave Encryption Key" +id = "flutterwave-encryption-key" +regex = '''FLWSECK_TEST-(?i)[a-h0-9]{12}''' +keywords = [ + "flwseck_test", +] + +[[rules]] +description = "Finicity Public Key" +id = "flutterwave-public-key" +regex = '''FLWPUBK_TEST-(?i)[a-h0-9]{32}-X''' +keywords = [ + "flwpubk_test", +] + +[[rules]] +description = "Flutterwave Secret Key" +id = "flutterwave-secret-key" +regex = '''FLWSECK_TEST-(?i)[a-h0-9]{32}-X''' +keywords = [ + "flwseck_test", +] + +[[rules]] +description = "Frame.io API token" +id = "frameio-api-token" +regex = '''fio-u-(?i)[a-z0-9\-_=]{64}''' +keywords = [ + "fio-u-", +] + +[[rules]] +description = "Freshbooks Access Token" +id = "freshbooks-access-token" +regex = '''(?i)(?:freshbooks)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "freshbooks", +] + +[[rules]] +description = "GCP API key" +id = "gcp-api-key" +regex = '''(?i)\b(AIza[0-9A-Za-z\\-_]{35})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "aiza", +] + +[[rules]] +description = "Generic API Key" +id = "generic-api-key" +regex = '''(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-z\-_.=]{10,150})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +entropy = 3.5 +keywords = [ + "key","api","token","secret","client","passwd","password","auth","access", +] +[rules.allowlist] +stopwords= [ + "client", + "endpoint", + "vpn", + "_ec2_", + "aws_", + "authorize", + "author", + "define", + "config", + "credential", + "setting", + "sample", + "xxxxxx", + "000000", + "buffer", + "delete", + "aaaaaa", + "fewfwef", + "getenv", + "env_", + "system", + "example", + "ecdsa", + "sha256", + "sha1", + "sha2", + "md5", + "alert", + "wizard", + "target", + "onboard", + "welcome", + "page", + "exploit", + "experiment", + "expire", + "rabbitmq", + "scraper", + "widget", + "music", + "dns_", + "dns-", + "yahoo", + "want", + "json", + "action", + "script", + "fix_", + "fix-", + "develop", + "compas", + "stripe", + "service", + "master", + "metric", + "tech", + "gitignore", + "rich", + "open", + "stack", + "irc_", + "irc-", + "sublime", + "kohana", + "has_", + "has-", + "fabric", + "wordpres", + "role", + "osx_", + "osx-", + "boost", + "addres", + "queue", + "working", + "sandbox", + "internet", + "print", + "vision", + "tracking", + "being", + "generator", + "traffic", + "world", + "pull", + "rust", + "watcher", + "small", + "auth", + "full", + "hash", + "more", + "install", + "auto", + "complete", + "learn", + "paper", + "installer", + "research", + "acces", + "last", + "binding", + "spine", + "into", + "chat", + "algorithm", + "resource", + "uploader", + "video", + "maker", + "next", + "proc", + "lock", + "robot", + "snake", + "patch", + "matrix", + "drill", + "terminal", + "term", + "stuff", + "genetic", + "generic", + "identity", + "audit", + "pattern", + "audio", + "web_", + "web-", + "crud", + "problem", + "statu", + "cms-", + "cms_", + "arch", + "coffee", + "workflow", + "changelog", + "another", + "uiview", + "content", + "kitchen", + "gnu_", + "gnu-", + "gnu.", + "conf", + "couchdb", + "client", + "opencv", + "rendering", + "update", + "concept", + "varnish", + "gui_", + "gui-", + "gui.", + "version", + "shared", + "extra", + "product", + "still", + "not_", + "not-", + "not.", + "drop", + "ring", + "png_", + "png-", + "png.", + "actively", + "import", + "output", + "backup", + "start", + "embedded", + "registry", + "pool", + "semantic", + "instagram", + "bash", + "system", + "ninja", + "drupal", + "jquery", + "polyfill", + "physic", + "league", + "guide", + "pack", + "synopsi", + "sketch", + "injection", + "svg_", + "svg-", + "svg.", + "friendly", + "wave", + "convert", + "manage", + "camera", + "link", + "slide", + "timer", + "wrapper", + "gallery", + "url_", + "url-", + "url.", + "todomvc", + "requirej", + "party", + "http", + "payment", + "async", + "library", + "home", + "coco", + "gaia", + "display", + "universal", + "func", + "metadata", + "hipchat", + "under", + "room", + "config", + "personal", + "realtime", + "resume", + "database", + "testing", + "tiny", + "basic", + "forum", + "meetup", + "yet_", + "yet-", + "yet.", + "cento", + "dead", + "fluentd", + "editor", + "utilitie", + "run_", + "run-", + "run.", + "box_", + "box-", + "box.", + "bot_", + "bot-", + "bot.", + "making", + "sample", + "group", + "monitor", + "ajax", + "parallel", + "cassandra", + "ultimate", + "site", + "get_", + "get-", + "get.", + "gen_", + "gen-", + "gen.", + "gem_", + "gem-", + "gem.", + "extended", + "image", + "knife", + "asset", + "nested", + "zero", + "plugin", + "bracket", + "mule", + "mozilla", + "number", + "act_", + "act-", + "act.", + "map_", + "map-", + "map.", + "micro", + "debug", + "openshift", + "chart", + "expres", + "backend", + "task", + "source", + "translate", + "jbos", + "composer", + "sqlite", + "profile", + "mustache", + "mqtt", + "yeoman", + "have", + "builder", + "smart", + "like", + "oauth", + "school", + "guideline", + "captcha", + "filter", + "bitcoin", + "bridge", + "color", + "toolbox", + "discovery", + "new_", + "new-", + "new.", + "dashboard", + "when", + "setting", + "level", + "post", + "standard", + "port", + "platform", + "yui_", + "yui-", + "yui.", + "grunt", + "animation", + "haskell", + "icon", + "latex", + "cheat", + "lua_", + "lua-", + "lua.", + "gulp", + "case", + "author", + "without", + "simulator", + "wifi", + "directory", + "lisp", + "list", + "flat", + "adventure", + "story", + "storm", + "gpu_", + "gpu-", + "gpu.", + "store", + "caching", + "attention", + "solr", + "logger", + "demo", + "shortener", + "hadoop", + "finder", + "phone", + "pipeline", + "range", + "textmate", + "showcase", + "app_", + "app-", + "app.", + "idiomatic", + "edit", + "our_", + "our-", + "our.", + "out_", + "out-", + "out.", + "sentiment", + "linked", + "why_", + "why-", + "why.", + "local", + "cube", + "gmail", + "job_", + "job-", + "job.", + "rpc_", + "rpc-", + "rpc.", + "contest", + "tcp_", + "tcp-", + "tcp.", + "usage", + "buildout", + "weather", + "transfer", + "automated", + "sphinx", + "issue", + "sas_", + "sas-", + "sas.", + "parallax", + "jasmine", + "addon", + "machine", + "solution", + "dsl_", + "dsl-", + "dsl.", + "episode", + "menu", + "theme", + "best", + "adapter", + "debugger", + "chrome", + "tutorial", + "life", + "step", + "people", + "joomla", + "paypal", + "developer", + "solver", + "team", + "current", + "love", + "visual", + "date", + "data", + "canva", + "container", + "future", + "xml_", + "xml-", + "xml.", + "twig", + "nagio", + "spatial", + "original", + "sync", + "archived", + "refinery", + "science", + "mapping", + "gitlab", + "play", + "ext_", + "ext-", + "ext.", + "session", + "impact", + "set_", + "set-", + "set.", + "see_", + "see-", + "see.", + "migration", + "commit", + "community", + "shopify", + "what'", + "cucumber", + "statamic", + "mysql", + "location", + "tower", + "line", + "code", + "amqp", + "hello", + "send", + "index", + "high", + "notebook", + "alloy", + "python", + "field", + "document", + "soap", + "edition", + "email", + "php_", + "php-", + "php.", + "command", + "transport", + "official", + "upload", + "study", + "secure", + "angularj", + "akka", + "scalable", + "package", + "request", + "con_", + "con-", + "con.", + "flexible", + "security", + "comment", + "module", + "flask", + "graph", + "flash", + "apache", + "change", + "window", + "space", + "lambda", + "sheet", + "bookmark", + "carousel", + "friend", + "objective", + "jekyll", + "bootstrap", + "first", + "article", + "gwt_", + "gwt-", + "gwt.", + "classic", + "media", + "websocket", + "touch", + "desktop", + "real", + "read", + "recorder", + "moved", + "storage", + "validator", + "add-on", + "pusher", + "scs_", + "scs-", + "scs.", + "inline", + "asp_", + "asp-", + "asp.", + "timeline", + "base", + "encoding", + "ffmpeg", + "kindle", + "tinymce", + "pretty", + "jpa_", + "jpa-", + "jpa.", + "used", + "user", + "required", + "webhook", + "download", + "resque", + "espresso", + "cloud", + "mongo", + "benchmark", + "pure", + "cakephp", + "modx", + "mode", + "reactive", + "fuel", + "written", + "flickr", + "mail", + "brunch", + "meteor", + "dynamic", + "neo_", + "neo-", + "neo.", + "new_", + "new-", + "new.", + "net_", + "net-", + "net.", + "typo", + "type", + "keyboard", + "erlang", + "adobe", + "logging", + "ckeditor", + "message", + "iso_", + "iso-", + "iso.", + "hook", + "ldap", + "folder", + "reference", + "railscast", + "www_", + "www-", + "www.", + "tracker", + "azure", + "fork", + "form", + "digital", + "exporter", + "skin", + "string", + "template", + "designer", + "gollum", + "fluent", + "entity", + "language", + "alfred", + "summary", + "wiki", + "kernel", + "calendar", + "plupload", + "symfony", + "foundry", + "remote", + "talk", + "search", + "dev_", + "dev-", + "dev.", + "del_", + "del-", + "del.", + "token", + "idea", + "sencha", + "selector", + "interface", + "create", + "fun_", + "fun-", + "fun.", + "groovy", + "query", + "grail", + "red_", + "red-", + "red.", + "laravel", + "monkey", + "slack", + "supported", + "instant", + "value", + "center", + "latest", + "work", + "but_", + "but-", + "but.", + "bug_", + "bug-", + "bug.", + "virtual", + "tweet", + "statsd", + "studio", + "path", + "real-time", + "frontend", + "notifier", + "coding", + "tool", + "firmware", + "flow", + "random", + "mediawiki", + "bosh", + "been", + "beer", + "lightbox", + "theory", + "origin", + "redmine", + "hub_", + "hub-", + "hub.", + "require", + "pro_", + "pro-", + "pro.", + "ant_", + "ant-", + "ant.", + "any_", + "any-", + "any.", + "recipe", + "closure", + "mapper", + "event", + "todo", + "model", + "redi", + "provider", + "rvm_", + "rvm-", + "rvm.", + "program", + "memcached", + "rail", + "silex", + "foreman", + "activity", + "license", + "strategy", + "batch", + "streaming", + "fast", + "use_", + "use-", + "use.", + "usb_", + "usb-", + "usb.", + "impres", + "academy", + "slider", + "please", + "layer", + "cros", + "now_", + "now-", + "now.", + "miner", + "extension", + "own_", + "own-", + "own.", + "app_", + "app-", + "app.", + "debian", + "symphony", + "example", + "feature", + "serie", + "tree", + "project", + "runner", + "entry", + "leetcode", + "layout", + "webrtc", + "logic", + "login", + "worker", + "toolkit", + "mocha", + "support", + "back", + "inside", + "device", + "jenkin", + "contact", + "fake", + "awesome", + "ocaml", + "bit_", + "bit-", + "bit.", + "drive", + "screen", + "prototype", + "gist", + "binary", + "nosql", + "rest", + "overview", + "dart", + "dark", + "emac", + "mongoid", + "solarized", + "homepage", + "emulator", + "commander", + "django", + "yandex", + "gradle", + "xcode", + "writer", + "crm_", + "crm-", + "crm.", + "jade", + "startup", + "error", + "using", + "format", + "name", + "spring", + "parser", + "scratch", + "magic", + "try_", + "try-", + "try.", + "rack", + "directive", + "challenge", + "slim", + "counter", + "element", + "chosen", + "doc_", + "doc-", + "doc.", + "meta", + "should", + "button", + "packet", + "stream", + "hardware", + "android", + "infinite", + "password", + "software", + "ghost", + "xamarin", + "spec", + "chef", + "interview", + "hubot", + "mvc_", + "mvc-", + "mvc.", + "exercise", + "leaflet", + "launcher", + "air_", + "air-", + "air.", + "photo", + "board", + "boxen", + "way_", + "way-", + "way.", + "computing", + "welcome", + "notepad", + "portfolio", + "cat_", + "cat-", + "cat.", + "can_", + "can-", + "can.", + "magento", + "yaml", + "domain", + "card", + "yii_", + "yii-", + "yii.", + "checker", + "browser", + "upgrade", + "only", + "progres", + "aura", + "ruby_", + "ruby-", + "ruby.", + "polymer", + "util", + "lite", + "hackathon", + "rule", + "log_", + "log-", + "log.", + "opengl", + "stanford", + "skeleton", + "history", + "inspector", + "help", + "soon", + "selenium", + "lab_", + "lab-", + "lab.", + "scheme", + "schema", + "look", + "ready", + "leveldb", + "docker", + "game", + "minimal", + "logstash", + "messaging", + "within", + "heroku", + "mongodb", + "kata", + "suite", + "picker", + "win_", + "win-", + "win.", + "wip_", + "wip-", + "wip.", + "panel", + "started", + "starter", + "front-end", + "detector", + "deploy", + "editing", + "based", + "admin", + "capture", + "spree", + "page", + "bundle", + "goal", + "rpg_", + "rpg-", + "rpg.", + "setup", + "side", + "mean", + "reader", + "cookbook", + "mini", + "modern", + "seed", + "dom_", + "dom-", + "dom.", + "doc_", + "doc-", + "doc.", + "dot_", + "dot-", + "dot.", + "syntax", + "sugar", + "loader", + "website", + "make", + "kit_", + "kit-", + "kit.", + "protocol", + "human", + "daemon", + "golang", + "manager", + "countdown", + "connector", + "swagger", + "map_", + "map-", + "map.", + "mac_", + "mac-", + "mac.", + "man_", + "man-", + "man.", + "orm_", + "orm-", + "orm.", + "org_", + "org-", + "org.", + "little", + "zsh_", + "zsh-", + "zsh.", + "shop", + "show", + "workshop", + "money", + "grid", + "server", + "octopres", + "svn_", + "svn-", + "svn.", + "ember", + "embed", + "general", + "file", + "important", + "dropbox", + "portable", + "public", + "docpad", + "fish", + "sbt_", + "sbt-", + "sbt.", + "done", + "para", + "network", + "common", + "readme", + "popup", + "simple", + "purpose", + "mirror", + "single", + "cordova", + "exchange", + "object", + "design", + "gateway", + "account", + "lamp", + "intellij", + "math", + "mit_", + "mit-", + "mit.", + "control", + "enhanced", + "emitter", + "multi", + "add_", + "add-", + "add.", + "about", + "socket", + "preview", + "vagrant", + "cli_", + "cli-", + "cli.", + "powerful", + "top_", + "top-", + "top.", + "radio", + "watch", + "fluid", + "amazon", + "report", + "couchbase", + "automatic", + "detection", + "sprite", + "pyramid", + "portal", + "advanced", + "plu_", + "plu-", + "plu.", + "runtime", + "git_", + "git-", + "git.", + "uri_", + "uri-", + "uri.", + "haml", + "node", + "sql_", + "sql-", + "sql.", + "cool", + "core", + "obsolete", + "handler", + "iphone", + "extractor", + "array", + "copy", + "nlp_", + "nlp-", + "nlp.", + "reveal", + "pop_", + "pop-", + "pop.", + "engine", + "parse", + "check", + "html", + "nest", + "all_", + "all-", + "all.", + "chinese", + "buildpack", + "what", + "tag_", + "tag-", + "tag.", + "proxy", + "style", + "cookie", + "feed", + "restful", + "compiler", + "creating", + "prelude", + "context", + "java", + "rspec", + "mock", + "backbone", + "light", + "spotify", + "flex", + "related", + "shell", + "which", + "clas", + "webapp", + "swift", + "ansible", + "unity", + "console", + "tumblr", + "export", + "campfire", + "conway'", + "made", + "riak", + "hero", + "here", + "unix", + "unit", + "glas", + "smtp", + "how_", + "how-", + "how.", + "hot_", + "hot-", + "hot.", + "debug", + "release", + "diff", + "player", + "easy", + "right", + "old_", + "old-", + "old.", + "animate", + "time", + "push", + "explorer", + "course", + "training", + "nette", + "router", + "draft", + "structure", + "note", + "salt", + "where", + "spark", + "trello", + "power", + "method", + "social", + "via_", + "via-", + "via.", + "vim_", + "vim-", + "vim.", + "select", + "webkit", + "github", + "ftp_", + "ftp-", + "ftp.", + "creator", + "mongoose", + "led_", + "led-", + "led.", + "movie", + "currently", + "pdf_", + "pdf-", + "pdf.", + "load", + "markdown", + "phalcon", + "input", + "custom", + "atom", + "oracle", + "phonegap", + "ubuntu", + "great", + "rdf_", + "rdf-", + "rdf.", + "popcorn", + "firefox", + "zip_", + "zip-", + "zip.", + "cuda", + "dotfile", + "static", + "openwrt", + "viewer", + "powered", + "graphic", + "les_", + "les-", + "les.", + "doe_", + "doe-", + "doe.", + "maven", + "word", + "eclipse", + "lab_", + "lab-", + "lab.", + "hacking", + "steam", + "analytic", + "option", + "abstract", + "archive", + "reality", + "switcher", + "club", + "write", + "kafka", + "arduino", + "angular", + "online", + "title", + "don't", + "contao", + "notice", + "analyzer", + "learning", + "zend", + "external", + "staging", + "busines", + "tdd_", + "tdd-", + "tdd.", + "scanner", + "building", + "snippet", + "modular", + "bower", + "stm_", + "stm-", + "stm.", + "lib_", + "lib-", + "lib.", + "alpha", + "mobile", + "clean", + "linux", + "nginx", + "manifest", + "some", + "raspberry", + "gnome", + "ide_", + "ide-", + "ide.", + "block", + "statistic", + "info", + "drag", + "youtube", + "koan", + "facebook", + "paperclip", + "art_", + "art-", + "art.", + "quality", + "tab_", + "tab-", + "tab.", + "need", + "dojo", + "shield", + "computer", + "stat", + "state", + "twitter", + "utility", + "converter", + "hosting", + "devise", + "liferay", + "updated", + "force", + "tip_", + "tip-", + "tip.", + "behavior", + "active", + "call", + "answer", + "deck", + "better", + "principle", + "ches", + "bar_", + "bar-", + "bar.", + "reddit", + "three", + "haxe", + "just", + "plug-in", + "agile", + "manual", + "tetri", + "super", + "beta", + "parsing", + "doctrine", + "minecraft", + "useful", + "perl", + "sharing", + "agent", + "switch", + "view", + "dash", + "channel", + "repo", + "pebble", + "profiler", + "warning", + "cluster", + "running", + "markup", + "evented", + "mod_", + "mod-", + "mod.", + "share", + "csv_", + "csv-", + "csv.", + "response", + "good", + "house", + "connect", + "built", + "build", + "find", + "ipython", + "webgl", + "big_", + "big-", + "big.", + "google", + "scala", + "sdl_", + "sdl-", + "sdl.", + "sdk_", + "sdk-", + "sdk.", + "native", + "day_", + "day-", + "day.", + "puppet", + "text", + "routing", + "helper", + "linkedin", + "crawler", + "host", + "guard", + "merchant", + "poker", + "over", + "writing", + "free", + "classe", + "component", + "craft", + "nodej", + "phoenix", + "longer", + "quick", + "lazy", + "memory", + "clone", + "hacker", + "middleman", + "factory", + "motion", + "multiple", + "tornado", + "hack", + "ssh_", + "ssh-", + "ssh.", + "review", + "vimrc", + "driver", + "driven", + "blog", + "particle", + "table", + "intro", + "importer", + "thrift", + "xmpp", + "framework", + "refresh", + "react", + "font", + "librarie", + "variou", + "formatter", + "analysi", + "karma", + "scroll", + "tut_", + "tut-", + "tut.", + "apple", + "tag_", + "tag-", + "tag.", + "tab_", + "tab-", + "tab.", + "category", + "ionic", + "cache", + "homebrew", + "reverse", + "english", + "getting", + "shipping", + "clojure", + "boot", + "book", + "branch", + "combination", + "combo", +] +[[rules]] +description = "GitHub App Token" +id = "github-app-token" +regex = '''(ghu|ghs)_[0-9a-zA-Z]{36}''' +keywords = [ + "ghu_","ghs_", +] + +[[rules]] +description = "GitHub Fine-Grained Personal Access Token" +id = "github-fine-grained-pat" +regex = '''github_pat_[0-9a-zA-Z_]{82}''' +keywords = [ + "github_pat_", +] + +[[rules]] +description = "GitHub OAuth Access Token" +id = "github-oauth" +regex = '''gho_[0-9a-zA-Z]{36}''' +keywords = [ + "gho_", +] + +[[rules]] +description = "GitHub Personal Access Token" +id = "github-pat" +regex = '''ghp_[0-9a-zA-Z]{36}''' +keywords = [ + "ghp_", +] + +[[rules]] +description = "GitHub Refresh Token" +id = "github-refresh-token" +regex = '''ghr_[0-9a-zA-Z]{36}''' +keywords = [ + "ghr_", +] + +[[rules]] +description = "GitLab Personal Access Token" +id = "gitlab-pat" +regex = '''glpat-[0-9a-zA-Z\-\_]{20}''' +keywords = [ + "glpat-", +] + +[[rules]] +description = "GitLab Pipeline Trigger Token" +id = "gitlab-ptt" +regex = '''glptt-[0-9a-f]{40}''' +keywords = [ + "glptt-", +] + +[[rules]] +description = "GitLab Runner Registration Token" +id = "gitlab-rrt" +regex = '''GR1348941[0-9a-zA-Z\-\_]{20}''' +keywords = [ + "gr1348941", +] + +[[rules]] +description = "Gitter Access Token" +id = "gitter-access-token" +regex = '''(?i)(?:gitter)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "gitter", +] + +[[rules]] +description = "GoCardless API token" +id = "gocardless-api-token" +regex = '''(?i)(?:gocardless)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(live_(?i)[a-z0-9\-_=]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "live_","gocardless", +] + +[[rules]] +description = "Grafana api key (or Grafana cloud api key)" +id = "grafana-api-key" +regex = '''(?i)\b(eyJrIjoi[A-Za-z0-9]{70,400}={0,2})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "eyjrijoi", +] + +[[rules]] +description = "Grafana cloud api token" +id = "grafana-cloud-api-token" +regex = '''(?i)\b(glc_[A-Za-z0-9+/]{32,400}={0,2})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "glc_", +] + +[[rules]] +description = "Grafana service account token" +id = "grafana-service-account-token" +regex = '''(?i)\b(glsa_[A-Za-z0-9]{32}_[A-Fa-f0-9]{8})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "glsa_", +] + +[[rules]] +description = "HashiCorp Terraform user/org API token" +id = "hashicorp-tf-api-token" +regex = '''(?i)[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}''' +keywords = [ + "atlasv1", +] + +[[rules]] +description = "Heroku API Key" +id = "heroku-api-key" +regex = '''(?i)(?:heroku)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "heroku", +] + +[[rules]] +description = "HubSpot API Token" +id = "hubspot-api-key" +regex = '''(?i)(?:hubspot)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "hubspot", +] + +[[rules]] +description = "Intercom API Token" +id = "intercom-api-key" +regex = '''(?i)(?:intercom)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{60})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "intercom", +] + +[[rules]] +description = "JSON Web Token" +id = "jwt" +regex = '''(?i)\b(ey[0-9a-z]{30,34}\.ey[0-9a-z-\/_]{30,500}\.[0-9a-zA-Z-\/_]{10,200}={0,2})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "ey", +] + +[[rules]] +description = "Kraken Access Token" +id = "kraken-access-token" +regex = '''(?i)(?:kraken)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9\/=_\+\-]{80,90})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "kraken", +] + +[[rules]] +description = "Kucoin Access Token" +id = "kucoin-access-token" +regex = '''(?i)(?:kucoin)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{24})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "kucoin", +] + +[[rules]] +description = "Kucoin Secret Key" +id = "kucoin-secret-key" +regex = '''(?i)(?:kucoin)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "kucoin", +] + +[[rules]] +description = "Launchdarkly Access Token" +id = "launchdarkly-access-token" +regex = '''(?i)(?:launchdarkly)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "launchdarkly", +] + +[[rules]] +description = "Linear API Token" +id = "linear-api-key" +regex = '''lin_api_(?i)[a-z0-9]{40}''' +keywords = [ + "lin_api_", +] + +[[rules]] +description = "Linear Client Secret" +id = "linear-client-secret" +regex = '''(?i)(?:linear)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "linear", +] + +[[rules]] +description = "LinkedIn Client ID" +id = "linkedin-client-id" +regex = '''(?i)(?:linkedin|linked-in)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{14})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "linkedin","linked-in", +] + +[[rules]] +description = "LinkedIn Client secret" +id = "linkedin-client-secret" +regex = '''(?i)(?:linkedin|linked-in)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{16})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "linkedin","linked-in", +] + +[[rules]] +description = "Lob API Key" +id = "lob-api-key" +regex = '''(?i)(?:lob)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}((live|test)_[a-f0-9]{35})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "test_","live_", +] + +[[rules]] +description = "Lob Publishable API Key" +id = "lob-pub-api-key" +regex = '''(?i)(?:lob)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}((test|live)_pub_[a-f0-9]{31})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "test_pub","live_pub","_pub", +] + +[[rules]] +description = "Mailchimp API key" +id = "mailchimp-api-key" +regex = '''(?i)(?:mailchimp)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{32}-us20)(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "mailchimp", +] + +[[rules]] +description = "Mailgun private API token" +id = "mailgun-private-api-token" +regex = '''(?i)(?:mailgun)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(key-[a-f0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "mailgun", +] + +[[rules]] +description = "Mailgun public validation key" +id = "mailgun-pub-key" +regex = '''(?i)(?:mailgun)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(pubkey-[a-f0-9]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "mailgun", +] + +[[rules]] +description = "Mailgun webhook signing key" +id = "mailgun-signing-key" +regex = '''(?i)(?:mailgun)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-h0-9]{32}-[a-h0-9]{8}-[a-h0-9]{8})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "mailgun", +] + +[[rules]] +description = "MapBox API token" +id = "mapbox-api-token" +regex = '''(?i)(?:mapbox)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(pk\.[a-z0-9]{60}\.[a-z0-9]{22})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "mapbox", +] + +[[rules]] +description = "Mattermost Access Token" +id = "mattermost-access-token" +regex = '''(?i)(?:mattermost)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{26})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "mattermost", +] + +[[rules]] +description = "MessageBird API token" +id = "messagebird-api-token" +regex = '''(?i)(?:messagebird|message-bird|message_bird)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{25})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "messagebird","message-bird","message_bird", +] + +[[rules]] +description = "MessageBird client ID" +id = "messagebird-client-id" +regex = '''(?i)(?:messagebird|message-bird|message_bird)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "messagebird","message-bird","message_bird", +] + +[[rules]] +description = "Microsoft Teams Webhook" +id = "microsoft-teams-webhook" +regex = '''https:\/\/[a-z0-9]+\.webhook\.office\.com\/webhookb2\/[a-z0-9]{8}-([a-z0-9]{4}-){3}[a-z0-9]{12}@[a-z0-9]{8}-([a-z0-9]{4}-){3}[a-z0-9]{12}\/IncomingWebhook\/[a-z0-9]{32}\/[a-z0-9]{8}-([a-z0-9]{4}-){3}[a-z0-9]{12}''' +keywords = [ + "webhook.office.com","webhookb2","incomingwebhook", +] + +[[rules]] +description = "Netlify Access Token" +id = "netlify-access-token" +regex = '''(?i)(?:netlify)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{40,46})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "netlify", +] + +[[rules]] +description = "New Relic ingest browser API token" +id = "new-relic-browser-api-token" +regex = '''(?i)(?:new-relic|newrelic|new_relic)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(NRJS-[a-f0-9]{19})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "nrjs-", +] + +[[rules]] +description = "New Relic user API ID" +id = "new-relic-user-api-id" +regex = '''(?i)(?:new-relic|newrelic|new_relic)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "new-relic","newrelic","new_relic", +] + +[[rules]] +description = "New Relic user API Key" +id = "new-relic-user-api-key" +regex = '''(?i)(?:new-relic|newrelic|new_relic)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(NRAK-[a-z0-9]{27})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "nrak", +] + +[[rules]] +description = "npm access token" +id = "npm-access-token" +regex = '''(?i)\b(npm_[a-z0-9]{36})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "npm_", +] + +[[rules]] +description = "Nytimes Access Token" +id = "nytimes-access-token" +regex = '''(?i)(?:nytimes|new-york-times,|newyorktimes)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "nytimes","new-york-times","newyorktimes", +] + +[[rules]] +description = "Okta Access Token" +id = "okta-access-token" +regex = '''(?i)(?:okta)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9=_\-]{42})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "okta", +] + +[[rules]] +description = "Plaid API Token" +id = "plaid-api-token" +regex = '''(?i)(?:plaid)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(access-(?:sandbox|development|production)-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "plaid", +] + +[[rules]] +description = "Plaid Client ID" +id = "plaid-client-id" +regex = '''(?i)(?:plaid)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{24})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "plaid", +] + +[[rules]] +description = "Plaid Secret key" +id = "plaid-secret-key" +regex = '''(?i)(?:plaid)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{30})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "plaid", +] + +[[rules]] +description = "PlanetScale API token" +id = "planetscale-api-token" +regex = '''(?i)\b(pscale_tkn_(?i)[a-z0-9=\-_\.]{32,64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "pscale_tkn_", +] + +[[rules]] +description = "PlanetScale OAuth token" +id = "planetscale-oauth-token" +regex = '''(?i)\b(pscale_oauth_(?i)[a-z0-9=\-_\.]{32,64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "pscale_oauth_", +] + +[[rules]] +description = "PlanetScale password" +id = "planetscale-password" +regex = '''(?i)\b(pscale_pw_(?i)[a-z0-9=\-_\.]{32,64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "pscale_pw_", +] + +[[rules]] +description = "Postman API token" +id = "postman-api-token" +regex = '''(?i)\b(PMAK-(?i)[a-f0-9]{24}\-[a-f0-9]{34})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "pmak-", +] + +[[rules]] +description = "Prefect API token" +id = "prefect-api-token" +regex = '''(?i)\b(pnu_[a-z0-9]{36})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "pnu_", +] + +[[rules]] +description = "Private Key" +id = "private-key" +regex = '''(?i)-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY( BLOCK)?-----[\s\S-]*KEY( BLOCK)?----''' +keywords = [ + "-----begin", +] + +[[rules]] +description = "Pulumi API token" +id = "pulumi-api-token" +regex = '''(?i)\b(pul-[a-f0-9]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "pul-", +] + +[[rules]] +description = "PyPI upload token" +id = "pypi-upload-token" +regex = '''pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}''' +keywords = [ + "pypi-ageichlwas5vcmc", +] + +[[rules]] +description = "RapidAPI Access Token" +id = "rapidapi-access-token" +regex = '''(?i)(?:rapidapi)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{50})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "rapidapi", +] + +[[rules]] +description = "Readme API token" +id = "readme-api-token" +regex = '''(?i)\b(rdme_[a-z0-9]{70})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "rdme_", +] + +[[rules]] +description = "Rubygem API token" +id = "rubygems-api-token" +regex = '''(?i)\b(rubygems_[a-f0-9]{48})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "rubygems_", +] + +[[rules]] +description = "Sendbird Access ID" +id = "sendbird-access-id" +regex = '''(?i)(?:sendbird)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sendbird", +] + +[[rules]] +description = "Sendbird Access Token" +id = "sendbird-access-token" +regex = '''(?i)(?:sendbird)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sendbird", +] + +[[rules]] +description = "SendGrid API token" +id = "sendgrid-api-token" +regex = '''(?i)\b(SG\.(?i)[a-z0-9=_\-\.]{66})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sg.", +] + +[[rules]] +description = "Sendinblue API token" +id = "sendinblue-api-token" +regex = '''(?i)\b(xkeysib-[a-f0-9]{64}\-(?i)[a-z0-9]{16})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "xkeysib-", +] + +[[rules]] +description = "Sentry Access Token" +id = "sentry-access-token" +regex = '''(?i)(?:sentry)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sentry", +] + +[[rules]] +description = "Shippo API token" +id = "shippo-api-token" +regex = '''(?i)\b(shippo_(live|test)_[a-f0-9]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "shippo_", +] + +[[rules]] +description = "Shopify access token" +id = "shopify-access-token" +regex = '''shpat_[a-fA-F0-9]{32}''' +keywords = [ + "shpat_", +] + +[[rules]] +description = "Shopify custom access token" +id = "shopify-custom-access-token" +regex = '''shpca_[a-fA-F0-9]{32}''' +keywords = [ + "shpca_", +] + +[[rules]] +description = "Shopify private app access token" +id = "shopify-private-app-access-token" +regex = '''shppa_[a-fA-F0-9]{32}''' +keywords = [ + "shppa_", +] + +[[rules]] +description = "Shopify shared secret" +id = "shopify-shared-secret" +regex = '''shpss_[a-fA-F0-9]{32}''' +keywords = [ + "shpss_", +] + +[[rules]] +description = "Sidekiq Secret" +id = "sidekiq-secret" +regex = '''(?i)(?:BUNDLE_ENTERPRISE__CONTRIBSYS__COM|BUNDLE_GEMS__CONTRIBSYS__COM)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-f0-9]{8}:[a-f0-9]{8})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "bundle_enterprise__contribsys__com","bundle_gems__contribsys__com", +] + +[[rules]] +description = "Sidekiq Sensitive URL" +id = "sidekiq-sensitive-url" +regex = '''(?i)\b(http(?:s??):\/\/)([a-f0-9]{8}:[a-f0-9]{8})@(?:gems.contribsys.com|enterprise.contribsys.com)(?:[\/|\#|\?|:]|$)''' +secretGroup = 2 +keywords = [ + "gems.contribsys.com","enterprise.contribsys.com", +] + +[[rules]] +description = "Slack token" +id = "slack-access-token" +regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})''' +keywords = [ + "xoxb","xoxa","xoxp","xoxr","xoxs", +] + +[[rules]] +description = "Slack Webhook" +id = "slack-web-hook" +regex = '''https:\/\/hooks.slack.com\/(services|workflows)\/[A-Za-z0-9+\/]{44,46}''' +keywords = [ + "hooks.slack.com", +] + +[[rules]] +description = "Square Access Token" +id = "square-access-token" +regex = '''(?i)\b(sq0atp-[0-9A-Za-z\-_]{22})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "sq0atp-", +] + +[[rules]] +description = "Squarespace Access Token" +id = "squarespace-access-token" +regex = '''(?i)(?:squarespace)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "squarespace", +] + +[[rules]] +description = "Stripe Access Token" +id = "stripe-access-token" +regex = '''(?i)(sk|pk)_(test|live)_[0-9a-z]{10,32}''' +keywords = [ + "sk_test","pk_test","sk_live","pk_live", +] + +[[rules]] +description = "SumoLogic Access ID" +id = "sumologic-access-id" +regex = '''(?i)(?:sumo)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{14})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sumo", +] + +[[rules]] +description = "SumoLogic Access Token" +id = "sumologic-access-token" +regex = '''(?i)(?:sumo)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{64})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "sumo", +] + +[[rules]] +description = "Telegram Bot API Token" +id = "telegram-bot-api-token" +regex = '''(?i)(?:^|[^0-9])([0-9]{5,16}:A[a-zA-Z0-9_\-]{34})(?:$|[^a-zA-Z0-9_\-])''' +secretGroup = 1 +keywords = [ + "telegram","api","bot","token","url", +] + +[[rules]] +description = "Travis CI Access Token" +id = "travisci-access-token" +regex = '''(?i)(?:travis)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{22})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "travis", +] + +[[rules]] +description = "Twilio API Key" +id = "twilio-api-key" +regex = '''SK[0-9a-fA-F]{32}''' +keywords = [ + "twilio", +] + +[[rules]] +description = "Twitch API token" +id = "twitch-api-token" +regex = '''(?i)(?:twitch)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{30})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "twitch", +] + +[[rules]] +description = "Twitter Access Secret" +id = "twitter-access-secret" +regex = '''(?i)(?:twitter)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{45})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "twitter", +] + +[[rules]] +description = "Twitter Access Token" +id = "twitter-access-token" +regex = '''(?i)(?:twitter)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9]{15,25}-[a-zA-Z0-9]{20,40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "twitter", +] + +[[rules]] +description = "Twitter API Key" +id = "twitter-api-key" +regex = '''(?i)(?:twitter)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{25})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "twitter", +] + +[[rules]] +description = "Twitter API Secret" +id = "twitter-api-secret" +regex = '''(?i)(?:twitter)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{50})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "twitter", +] + +[[rules]] +description = "Twitter Bearer Token" +id = "twitter-bearer-token" +regex = '''(?i)(?:twitter)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(A{22}[a-zA-Z0-9%]{80,100})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "twitter", +] + +[[rules]] +description = "Typeform API token" +id = "typeform-api-token" +regex = '''(?i)(?:typeform)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(tfp_[a-z0-9\-_\.=]{59})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "tfp_", +] + +[[rules]] +description = "Vault Batch Token" +id = "vault-batch-token" +regex = '''(?i)\b(hvb\.[a-z0-9_-]{138,212})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "hvb", +] + +[[rules]] +description = "Vault Service Token" +id = "vault-service-token" +regex = '''(?i)\b(hvs\.[a-z0-9_-]{90,100})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +keywords = [ + "hvs", +] + +[[rules]] +description = "Yandex Access Token" +id = "yandex-access-token" +regex = '''(?i)(?:yandex)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(t1\.[A-Z0-9a-z_-]+[=]{0,2}\.[A-Z0-9a-z_-]{86}[=]{0,2})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "yandex", +] + +[[rules]] +description = "Yandex API Key" +id = "yandex-api-key" +regex = '''(?i)(?:yandex)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(AQVN[A-Za-z0-9_\-]{35,38})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "yandex", +] + +[[rules]] +description = "Yandex AWS Access Token" +id = "yandex-aws-access-token" +regex = '''(?i)(?:yandex)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}(YC[a-zA-Z0-9_\-]{38})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "yandex", +] + +[[rules]] +description = "Zendesk Secret Key" +id = "zendesk-secret-key" +regex = '''(?i)(?:zendesk)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9]{40})(?:['|\"|\n|\r|\s|\x60|;]|$)''' +secretGroup = 1 +keywords = [ + "zendesk", +] + + diff --git a/sensleak-rs/src/api.rs b/sensleak-rs/src/api.rs new file mode 100644 index 00000000..b8d0d1a7 --- /dev/null +++ b/sensleak-rs/src/api.rs @@ -0,0 +1,9 @@ +use sensleak::start; +#[tokio::main] +async fn main() -> Result<(), Box> { + println!("The API document is located at http://localhost:7000/swagger-ui/#/"); + start().await?; + Ok(()) +} + + \ No newline at end of file diff --git a/sensleak-rs/src/entity/mod.rs b/sensleak-rs/src/entity/mod.rs new file mode 100644 index 00000000..c446ac88 --- /dev/null +++ b/sensleak-rs/src/entity/mod.rs @@ -0,0 +1 @@ +pub mod models; diff --git a/sensleak-rs/src/entity/models.rs b/sensleak-rs/src/entity/models.rs new file mode 100644 index 00000000..21f09da5 --- /dev/null +++ b/sensleak-rs/src/entity/models.rs @@ -0,0 +1,358 @@ +use chrono::{DateTime, FixedOffset}; +use clap::Parser; +use serde::{Deserialize, Serialize}; +use utoipa::{ToSchema}; +/// Represents the configuration for sensleaks tool. +#[derive(Parser, Debug)] +#[command( + author = "yjchen", + version = "0.1.0", + about = "sensleaks-rs", + long_about = "sensleaks: A tool to detect sensitive information in Git repository", + after_help = "run 'cargo run --bin api' to get REST API.\nRepository: https://github.com/open-rust-initiative/sensleak-rs" +)] +#[derive(Deserialize, Serialize)] +pub struct Config { + /// Target repository. + #[arg(long)] + pub repo: String, + + /// Config path + #[arg(long, default_value = "gitleaks.toml")] + pub config: String, + + /// Maximum number of threads sensleak spawns + #[arg(long, default_value = "10")] + pub threads: Option, + + /// The number of files processed in each batch + #[arg(long, default_value = "10")] + pub chunk: Option, + + /// Path to write json leaks file. + #[arg(long)] + pub report: Option, + + /// json, csv, sarif + #[arg(long, default_value = "json")] + pub report_format: Option, + + /// Show verbose output from scan. + #[arg(short, long, default_value = "false")] + pub verbose: bool, + + /// Pretty print json if leaks are present. + #[arg(long, default_value = "false")] + pub pretty: bool, + + /// sha of commit to scan + #[arg(long)] + pub commit: Option, + + /// comma separated list of a commits to scan + #[arg(long)] + pub commits: Option, + + /// file of new line separated list of a commits to scan + #[arg(long)] + pub commits_file: Option, + + /// Scan commits more recent than a specific date. Ex: '2006-01-02' or '2023-01-02T15:04:05-0700' format. + #[arg(long)] + pub commit_since: Option, + + /// Scan commits older than a specific date. Ex: '2006-01-02' or '2006-10-02T15:04:05-0700' format. + #[arg(long)] + pub commit_until: Option, + + /// Commit to start scan from + #[arg(long)] + pub commit_from: Option, + + /// Commit to stop scan + #[arg(long)] + pub commit_to: Option, + + /// Branch to scan + #[arg(long)] + pub branch: Option, + + /// Run sensleak on uncommitted code + #[arg(long, default_value = "false")] + // pub uncommitted: bool , + pub uncommitted: bool, + + /// Set user to scan + #[arg(long, default_value = "")] + pub user: Option, + + /// Load config from target repo. Config file must be ".gitleaks.toml" or "gitleaks.toml" + #[arg(long)] + pub repo_config: bool, + + /// log debug messages. + #[arg(long, default_value = "false")] + pub debug: bool, + + /// Clones repo(s) to disk. + #[arg(long)] + pub disk: Option, + + // /// Start API + // #[arg(long, default_value = "false")] + // pub api: bool, +} +impl Default for Config { + fn default() -> Self { + Config { + repo: String::default(), + config: String::from("gitleaks.toml"), + threads: Some(50), + chunk: Some(10), + report: None, + report_format: Some(String::from("json")), + verbose: false, + pretty: false, + commit: None, + commits: None, + commits_file: None, + commit_since: None, + commit_until: None, + commit_from: None, + commit_to: None, + branch: None, + uncommitted: false, + user: Some("".to_string()), + repo_config: false, + debug: false, + disk: None, + // api: false, + } + } +} + +/// # An array of tables that contain information that define instructions on how to detect secrets. +#[derive(Debug, Serialize, Clone, Deserialize,ToSchema)] +pub struct Rule { + /// Short human readable description of the rule. + pub description: String, + + /// Unique identifier for this rule. + pub id: String, + + /// Regular expression used to detect secrets. + pub regex: String, + + /// Float representing the minimum shannon entropy a regex group must have to be considered a secret. + // pub entropy: Option, + + /// Keywords are used for pre-regex check filtering. Rules that contain keywords will perform a quick string compare check to make sure the keyword(s) are in the content being scanned. Ideally these values should either be part of the idenitifer or unique strings specific to the rule's regex + pub keywords: Vec, + + /// You can include an allowlist table for a single rule to reduce false positives or ignore commits with known/rotated secrets. + pub allowlist: Option, +} + +impl Rule { + pub fn new() -> Rule { + Rule { + description: String::from("11"), + id: String::from("11"), + regex: String::from("(?i)(?:key|api|token|secret|client|passwd|password|auth|access)"), + // entropy: Some(3.1), + keywords: Vec::new(), + allowlist: None, + } + } +} + +impl Default for Rule { + fn default() -> Self { + Self::new() + } +} + +/// Skip the allowlist +#[derive(Debug, Deserialize, Serialize, Clone,ToSchema)] +pub struct Allowlist { + /// Skip the paths. + pub paths: Vec, + + /// Skip the commits. + pub commits: Vec, + + /// Acceptable values for regexTarget are "match" and "line". + pub regex_target: String, + + /// Skip the secrets that satisfy the regexes. + pub regexes: Vec, + + /// Skip the secrets that contain the stopwords. + pub stopwords: Vec, +} +impl Allowlist { + pub fn new() -> Allowlist { + Allowlist { + paths: Vec::new(), + commits: Vec::new(), + regex_target: String::from("match"), + regexes: Vec::new(), + stopwords: Vec::new(), + } + } +} +impl Default for Allowlist { + fn default() -> Self { + Self::new() + } +} +/// Represents an item in the scanned output. +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Leak { + /// The line containing the sensitive information. + pub line: String, + + /// The line number where the sensitive information is found. + pub line_number: u32, + + /// The sensitive information detected. + pub offender: String, + + /// The commit info. + pub commit: String, + + /// The repository where the sensitive information is found. + pub repo: String, + + /// The rule used to detect the sensitive information. + pub rule: String, + + /// The commit message associated with the sensitive information. + pub commit_message: String, + + /// The author of the commit. + pub author: String, + + /// The email of the commit author. + pub email: String, + + /// The file path where the sensitive information is found. + pub file: String, + + /// The date of the commit. + pub date: String, +} + +/// The scan condition +#[derive(Debug, Clone)] +pub struct Scan { + /// allow list + pub allowlist: Allowlist, + + /// the rules list + pub ruleslist: Vec, + + pub threads: Option, + pub chunk: Option, +} +impl Scan { + pub fn new() -> Self { + Scan { + allowlist: Allowlist::new(), + ruleslist: Vec::new(), + // keywords:Vec::new(), + threads: Some(10), + chunk: Some(10), + } + } +} + +impl Default for Scan { + fn default() -> Self { + Self::new() + } +} + +/// The commit info +#[derive(Debug, Clone)] +pub struct CommitInfo { + /// repo name + pub repo: String, + + /// commit id + pub commit: git2::Oid, + + /// author name + pub author: String, + + /// the email of author + pub email: String, + + /// commit message + pub commit_message: String, + + /// commit date + pub date: DateTime, + + /// file + pub files: Vec<(String, String)>, +} + +/// The Results of the project +#[derive(Debug)] +pub struct Results { + /// The number of commits being scanned + pub commits_number: usize, + + /// The leaks + pub outputs: Vec, +} +impl Results { + pub fn new() -> Self { + Results { + commits_number: 0, + outputs: Vec::new(), + } + } +} +impl Default for Results { + fn default() -> Self { + Self::new() + } +} +/// CSV Object +#[derive(Debug, Serialize, Deserialize)] +pub struct CsvResult { + /// The line containing the sensitive information. + pub line: String, + + /// The line number where the sensitive information is found. + pub line_number: u32, + + /// The sensitive information detected. + pub offender: String, + + /// The commit info. + pub commit: String, + + /// The repository where the sensitive information is found. + pub repo: String, + + /// The rule used to detect the sensitive information. + pub rule: String, + + /// The commit message associated with the sensitive information. + pub commit_message: String, + + /// The author of the commit. + pub author: String, + + /// The email of the commit author. + pub email: String, + + /// The file path where the sensitive information is found. + pub file: String, + + /// The date of the commit. + pub date: String, +} diff --git a/sensleak-rs/src/errors.rs b/sensleak-rs/src/errors.rs new file mode 100644 index 00000000..462904f9 --- /dev/null +++ b/sensleak-rs/src/errors.rs @@ -0,0 +1,70 @@ +use std::error::Error; +use std::fmt; + +/// CustomError represents custom errors that can occur in the application. +#[derive(Debug)] +pub enum CustomError { + EmptyFileError, + EmptyConfigFileError, + + FailDeleteDir, + FailCreateDir, + FailLoadRepo, + FailCloneRepo, + InvalidRepoName, + ObjectNotFound, + RepoInternalError, + ObjectNotAccess, + ObjectConvertFail, + AccessWalkerError, + RepoCommitError, + WalkerSortError, + PushWalkerHeadError, + InvalidDateFormat, + InvalidTimeFormat, + InvalidTomlFile, + + ExportCsvError, + ExportSarifError, + ExportJsonError, +} + +impl fmt::Display for CustomError { + /// Formats the error message for display. + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let error_message = match *self { + CustomError::EmptyFileError => "Empty file", + CustomError::EmptyConfigFileError => "Empty Config file", + + CustomError::ExportCsvError => "Export CSV Error", + CustomError::ExportSarifError => "Export Sarif Error", + CustomError::ExportJsonError => "Export Json Error", + + CustomError::FailDeleteDir => "Failed to delete directory", + CustomError::FailCreateDir => "Failed to create directory", + CustomError::FailLoadRepo => "Failed to load repository", + CustomError::FailCloneRepo => "Failed to clone repository", + CustomError::InvalidRepoName => "Invalid repository name", + CustomError::RepoInternalError => "Internal error within the repository", + CustomError::ObjectNotFound => { + "Failure to find a blob or tree object in the repository" + } + CustomError::ObjectNotAccess => "Failed to access the repository's object database", + CustomError::ObjectConvertFail => "Failed to convert object to commit", + CustomError::AccessWalkerError => "Failure to create or access the revision walker", + CustomError::RepoCommitError => "Failed to find a commit in the repository", + CustomError::WalkerSortError => { + "Failed to set the sorting order of the revision walker" + } + CustomError::PushWalkerHeadError => { + "Failed to push the HEAD reference to the revision walker" + } + CustomError::InvalidDateFormat => "Invalid date format", + CustomError::InvalidTimeFormat => "Invalid time format", + CustomError::InvalidTomlFile => "Invalid TOML file", + }; + write!(f, "{}", error_message) + } +} + +impl Error for CustomError {} diff --git a/sensleak-rs/src/lib.rs b/sensleak-rs/src/lib.rs new file mode 100644 index 00000000..60549fab --- /dev/null +++ b/sensleak-rs/src/lib.rs @@ -0,0 +1,92 @@ +mod errors; + +mod utils { + pub mod detect_utils; + pub mod git_util; + +} +pub mod entity{ + pub mod models; +} +pub mod service{ + pub mod detect_service; + pub mod git_service; +} + + pub use entity::models; + + +pub use errors::*; + + +pub use utils::detect_utils; +pub use utils::git_util; + +pub use git_util::*; +pub use models::*; + + + + +use std::{ + net::{Ipv4Addr, SocketAddr}, + +}; + +use axum::{routing, Router, Server}; +use hyper::Error; +use utoipa::{ + OpenApi, +}; +use utoipa_swagger_ui::SwaggerUi; + + +mod routes{ + pub mod scan; + pub mod rules; +} +pub use routes::scan::*; +pub use routes::rules::*; + + use crate::routes::*; + + + + +pub async fn start() -> Result<(), Error> { + #[derive(OpenApi)] + #[openapi( + paths( + scan::scan_repo, + rules::get_all, + rules::add_rules, + rules::delete_rules_by_id, + rules::update_rules + ), + components( + schemas(ConfigDto,ScanResponse,RulesDto,JsonResponse,Rule,Allowlist) + ), + + tags( + (name = "scan", description = "Scan Git repositories API"), + (name = "rules", description = "Rules management API"), + + ) + )] + struct ApiDoc; + + + let app = Router::new() + .merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", ApiDoc::openapi())) + .route("/scan", routing::post(scan_repo)) + .route("/rules/get_all", routing::post(get_all)) + .route("/rules/add_rules", routing::post(add_rules)) + .route("/rules/delete_rules_by_id", routing::post(delete_rules_by_id)) + .route("/rules/update", routing::post(update_rules)); + + + let address = SocketAddr::from((Ipv4Addr::UNSPECIFIED, 7000)); + Server::bind(&address).serve(app.into_make_service()).await +} + + \ No newline at end of file diff --git a/sensleak-rs/src/main.rs b/sensleak-rs/src/main.rs new file mode 100644 index 00000000..a7521159 --- /dev/null +++ b/sensleak-rs/src/main.rs @@ -0,0 +1,8 @@ +use sensleak::service::detect_service::sensleaks; + +/// The entry of the project +fn main() { + sensleaks(); +} + + \ No newline at end of file diff --git a/sensleak-rs/src/routes/rules.rs b/sensleak-rs/src/routes/rules.rs new file mode 100644 index 00000000..9af16c7f --- /dev/null +++ b/sensleak-rs/src/routes/rules.rs @@ -0,0 +1,185 @@ +use crate::utils::detect_utils::*; +use crate::models::{Allowlist, Rule}; +use axum::Json; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +/// Rules Dto +#[derive(Serialize, Deserialize, Debug, ToSchema)] +pub struct RulesDto { + config: String, + rule: Option, + rule_id: Option, +} + +/// The response object +#[derive(Serialize, ToSchema)] +pub struct JsonResponse { + code: usize, + allowlist: Option, + ruleslist: Option>, + message: Option, +} +/// Load the rules +/// +/// Load the allowlists and ruleslist. +#[utoipa::path( + post, + path = "/rule/get_rules", + request_body = RulesDto, + responses( + (status = 200, description = "success", body = JsonResponse), + (status = 400, description = "fail", body = JsonResponse) + ) +)] +pub async fn get_all(Json(body): Json) -> Json { + match load_config_file(&body.config) { + Ok(scan) => Json(JsonResponse { + code: 200, + allowlist: Some(scan.allowlist), + ruleslist: Some(scan.ruleslist), + message: None, + }), + Err(err) => Json(JsonResponse { + code: 400, + message: Some(err.to_string()), + allowlist: None, + ruleslist: None, + // message: Some(String::from("Failed to load the configuration file.")), + }), + } +} + +/// Add rules. +/// +/// Add one single rule. +#[utoipa::path( + post, + path = "/rule/add_rules", + request_body = RulesDto, + responses( + (status = 200, description = "success", body = JsonResponse), + (status = 400, description = "fail", body = JsonResponse) + ) +)] +pub async fn add_rules(Json(body): Json) -> Json { + let rule: Rule = match body.rule { + Some(value) => value, + None => { + return Json(JsonResponse { + code: 400, + message: Some("It is not a Rule struct".to_string()), + allowlist: None, + ruleslist: None, + }) + } + }; + + match append_rule_to_toml(&rule, &body.config) { + Ok(_) => Json(JsonResponse { + code: 200, + message: Some("success".to_string()), + allowlist: None, + ruleslist: None, + }), + Err(err) => Json(JsonResponse { + code: 400, + message: Some(err.to_string()), + allowlist: None, + ruleslist: None, + }), + } +} + +/// Delete rules. +/// +/// Delete one rule by id. +#[utoipa::path( + post, + path = "/rule/delete_rules_by_id", + request_body = RulesDto, + responses( + (status = 200, description = "success", body = JsonResponse), + (status = 400, description = "fail", body = JsonResponse) + ) +)] +pub async fn delete_rules_by_id(Json(body): Json) -> Json { + let rule_id = match body.rule_id { + Some(value) => value, + None => { + return Json(JsonResponse { + code: 400, + message: Some("It is not a rule id".to_string()), + allowlist: None, + ruleslist: None, + }) + } + }; + + match delete_rule_by_id(&body.config, &rule_id) { + Ok(_) => Json(JsonResponse { + code: 200, + message: Some("success".to_string()), + allowlist: None, + ruleslist: None, + }), + Err(err) => Json(JsonResponse { + code: 400, + message: Some(err.to_string()), + allowlist: None, + ruleslist: None, + }), + } +} + + +/// Update rules. +/// +/// Update one rule by id. +#[utoipa::path( + post, + path = "/rule/update", + request_body = RulesDto, + responses( + (status = 200, description = "success", body = JsonResponse), + (status = 400, description = "fail", body = JsonResponse) + ) +)] +pub async fn update_rules(Json(body): Json) -> Json { + let rule_id = match body.rule_id { + Some(value) => value, + None => { + return Json(JsonResponse { + code: 400, + message: Some("It is not a rule id".to_string()), + allowlist: None, + ruleslist: None, + }) + } + }; + let rule: Rule = match body.rule { + Some(value) => value, + None => { + return Json(JsonResponse { + code: 400, + message: Some("It is not a Rule struct".to_string()), + allowlist: None, + ruleslist: None, + }) + } + }; + + match update_rule_by_id(&body.config, &rule_id,&rule) { + Ok(_) => Json(JsonResponse { + code: 200, + message: Some("success".to_string()), + allowlist: None, + ruleslist: None, + }), + Err(err) => Json(JsonResponse { + code: 400, + message: Some(err.to_string()), + allowlist: None, + ruleslist: None, + }), + } +} diff --git a/sensleak-rs/src/routes/scan.rs b/sensleak-rs/src/routes/scan.rs new file mode 100644 index 00000000..0916842d --- /dev/null +++ b/sensleak-rs/src/routes/scan.rs @@ -0,0 +1,159 @@ +use axum::Json; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; + +use crate::service::detect_service::detect; +use crate::{Config, Leak}; + +/// The scan configuration +#[derive(Deserialize, Serialize, ToSchema)] +pub struct ConfigDto { + /// Target repository. + pub repo: String, + /// Config path + pub config: String, + /// Maximum number of threads sensleak spawns + pub report: Option, + /// The number of git files processed in each batch + pub report_format: Option, + /// Path to write json leaks file. + pub repo_config: Option, + /// json, csv, sarif + pub threads: Option, + /// Show verbose output from scan. + pub chunk: Option, + /// Pretty print json if leaks are present. + pub commit: Option, + /// comma separated list of a commits to scan + pub commits: Option, + /// file of new line separated list of a commits to scan + pub commits_file: Option, + /// Scan commits more recent than a specific date. Ex: '2006-01-02' or '2023-01-02T15:04:05-0700' format. + pub commit_since: Option, + /// Scan commits older than a specific date. Ex: '2006-01-02' or '2006-10-02T15:04:05-0700' format. + pub commit_until: Option, + + /// Commit to start scan from + pub commit_from: Option, + /// Commit to stop scan + pub commit_to: Option, + /// Branch to scan + pub branch: Option, + /// Run sensleak on uncommitted code + pub uncommitted: Option, + /// Set user to scan + pub user: Option, + + /// Clones repo(s) to disk. + pub disk: Option, +} + +/// The return results of the scan. +#[derive(Deserialize, Serialize, ToSchema)] +pub struct ScanResponse { + /// 200-success, 400-fail + code: usize, + /// the leaks number + leaks_number: Option, + /// the number of scanned commits + commits_number: Option, + /// leaks + leaks: Option>, + /// message + message: Option, +} + +/// Scan the repo. +/// +/// Scan Git repositories for sensitive data. +#[utoipa::path( + post, + path = "/scan", + request_body = ConfigDto, + responses( + (status = 200, description = "success", body = ScanResponse), + (status = 400, description = "fail", body = ScanResponse) + ) +)] +pub async fn scan_repo(Json(json_config): Json) -> Json { + let mut config: Config = Default::default(); + config.repo = json_config.repo; + config.config = json_config.config; + config.report = json_config.report; + config.threads = json_config.threads; + config.chunk = json_config.chunk; + config.report_format = json_config.report_format; + config.commit = json_config.commit; + config.commits = json_config.commits; + config.commit_from = json_config.commit_from; + config.commit_to = json_config.commit_to; + config.commit_since = json_config.commit_since; + config.commits_file = json_config.commits_file; + config.branch = json_config.branch; + config.uncommitted = false; + config.user = json_config.user; + config.disk = json_config.disk; + config.repo_config = json_config.repo_config.unwrap_or(false); + + match detect(config) { + Ok(results) => Json(ScanResponse { + code: 200, + leaks_number: Some(results.outputs.len()), + commits_number: Some(results.commits_number), + leaks: Some(results.outputs), + message: None, + }), + Err(err) => Json(ScanResponse { + code: 400, + message: Some(err.to_string()), + leaks_number: None, + commits_number: None, + leaks: None, + }), + } +} + + + +#[cfg(test)] +mod tests { + // use super::*; + // use axum::{ + // extract::Json, + + // }; + + + // #[tokio::test] + // async fn test_scan_repo_success() { + // let config = ConfigDto { + // repo: String::from("example/repo"), + // config: String::from("example/config"), + // report: Some(String::from("example/report")), + // report_format: Some(String::from("json")), + // repo_config: Some(true), + // threads: Some(4), + // chunk: Some(10), + // commit: Some(String::from("abcd1234")), + // commits: Some(String::from("commit1,commit2")), + // commits_file: Some(String::from("path/to/file")), + // commit_since: Some(String::from("2023-01-01")), + // commit_until: Some(String::from("2023-01-31")), + // commit_from: Some(String::from("abcd1234")), + // commit_to: Some(String::from("efgh5678")), + // branch: Some(String::from("main")), + // uncommitted: Some(false), + // user: Some(String::from("john")), + // disk: Some(String::from("path/to/disk")), + // }; + + // let json_config = Json(config); + // let response = scan_repo(json_config).await; + + // assert_eq!(response.code, 200); + // assert_eq!(response.leaks_number, Some(10)); + // assert_eq!(response.commits_number, Some(2)); + // assert_eq!(response.message, None); + + // } +} \ No newline at end of file diff --git a/sensleak-rs/src/service/detect_service.rs b/sensleak-rs/src/service/detect_service.rs new file mode 100644 index 00000000..ae13af9b --- /dev/null +++ b/sensleak-rs/src/service/detect_service.rs @@ -0,0 +1,723 @@ +use crate::errors::CustomError; +use crate::models::{Allowlist, CommitInfo, Config, Leak, Results, Rule, Scan}; +use crate::service::git_service::*; +use crate::utils::detect_utils::{ + is_commit_in_allowlist, is_contains_strs, is_link, is_path_in_allowlist, is_string_matched, + load_config, remove_duplicates, write_csv_report, write_json_report, write_sarif_report, +}; +use crate::utils::git_util::{clone_or_load_repository, extract_repo_name}; +use chrono::Local; +use clap::Parser; +use git2::Repository; +use rayon::ThreadPoolBuilder; +use regex::Regex; +use std::error::Error; +use std::fs; +use std::sync::{Arc, Mutex}; +use std::time::Instant; + +/// Starts the Git detector application. +pub fn sensleaks() { + let args = Config::parse(); + + match detect(args) { + Ok(results) => results, + Err(err) => { + eprintln!("Application: {}", err); + std::process::exit(0); + } + }; +} + +/// Searches for sensitive information in a repository. +/// +/// # Arguments +/// +/// * `config` - A `Config` struct containing the configuration settings for the detection process. +/// +/// # Returns +/// +/// Returns the detection results as a `Result` containing the scan results or an error. +/// +pub fn detect(config: Config) -> Result> { + // load repo and record the time of clone repo + let start_clone_repo = Instant::now(); + let repo = clone_or_load_repository(&config)?; + let duration_repo: std::time::Duration = Instant::now().duration_since(start_clone_repo); + + // load scan, which contains allowlist, ruleslist, keywords + let mut scan = load_config(&repo, &config)?; + + // Set threads and chunk in scan + scan.threads = config.threads; + scan.chunk = config.chunk; + + // Record the start time of the scan + let start_scan = Instant::now(); + + // Scan + let results = process_scan(&config, repo, scan)?; + + // To output content in the console. + config_info_after_detect(&config, &results, start_scan, duration_repo)?; + + Ok(results) +} + +/// Processes the scan based on the provided configuration, repository, and scan settings. +/// +/// # Arguments +/// +/// * `config` - A reference to the `Config` object containing the scan configuration settings. +/// * `repo` - The `Repository` object representing the repository to scan. +/// * `scan` - The `Scan` object containing additional scan settings such as allowlist, ruleslist, and keywords. +/// +/// # Returns +/// +/// Returns the scan results as a `Result` containing the `Results` or an error. +fn process_scan(config: &Config, repo: Repository, scan: Scan) -> Result> { + // Scan the files that have not been submitted. + if config.uncommitted { + return handle_uncommitted_files(repo, &config.repo, scan); + } + + match ( + &config.commit, + &config.commits, + &config.commits_file, + &config.commit_since, + &config.commit_until, + &config.commit_from, + &config.commit_to, + &config.uncommitted, + &config.user, + &config.branch, + ) { + (Some(commit), _, _, _, _, _, _, _, Some(user), _) => { + handle_single_commit(repo, commit, scan, user) + } + (_, Some(commits), _, _, _, _, _, _, Some(user), _) => { + let commit_ids: Vec<&str> = commits.split(',').collect(); + handle_multiple_commits(repo, &commit_ids, scan, user) + } + (_, _, Some(file_path), _, _, _, _, _, Some(user), _) => { + handle_commits_file(repo, file_path, scan, user) + } + (_, _, _, Some(since), Some(until), _, _, _, Some(user), _) => { + handle_commit_range_by_time(repo, since, until, scan, user) + } + (_, _, _, _, _, Some(commit_from), Some(commit_to), _, Some(user), _) => { + handle_commit_range( + repo, + Some(commit_from.clone()), + Some(commit_to.clone()), + scan, + user, + ) + } + (_, _, _, _, _, _, _, _, Some(_user), Some(branch)) => { + handle_branches_by_name(repo, branch, scan) + } + (_, _, _, _, _, _, _, _, Some(user), _) => handle_all_commits(repo, scan, user), + + _ => handle_all_commits(repo, scan, ""), + } +} + +/// Detects leaks in the provided file contents based on the specified rules and configurations. +/// +/// +/// The function utilizes a thread pool to execute detection operations concurrently, improving performance. +/// Detected leaks are stored in a shared mutable vector wrapped in an `Arc`. +/// +/// # Arguments +/// +/// * `contents` - The contents of the file to be scanned for leaks. +/// * `path` - The path to the file being scanned. +/// * `ruleslist` - A slice of `Rule` objects representing the rules to be applied during the detection process. +/// * `allowlist` - An `Allowlist` object containing patterns to exclude from the detection process. +/// * `commit_info` - A reference to the `CommitInfo` object containing information about the commit associated with the file. +/// * `threads` - An optional `usize` value specifying the number of threads to use in the thread pool. Default is 50. +/// +/// # Returns +/// +/// Returns a `Result` containing a cloned vector of `Leak` objects representing the detected leaks, or an error. +/// +/// # Errors +/// +/// This function can return an error if there are any issues during the detection process. +/// +pub fn detect_file( + contents: &str, + path: &str, + ruleslist: &[Rule], + allowlist: &Allowlist, + commit_info: &CommitInfo, + threads: Option, +) -> Result, Box> { + // Create a shared mutable vector to store detection results + let detect_info: Arc>> = Arc::new(Mutex::new(Vec::new())); + + // Create a thread pool with the setting threads + let thread_pool = ThreadPoolBuilder::new() + .num_threads(threads.unwrap_or(50)) + .build() + .unwrap(); + + // Use the thread pool to execute the detection operations + thread_pool.scope(|s| { + for rule in ruleslist { + // Check if the contents contain any keywords from the rule + if is_contains_strs(&rule.keywords, contents) { + let cloned_path = path.to_string(); + let cloned_rule = rule.clone(); + let cloned_contents = contents.to_string(); + let cloned_allowlist = allowlist.clone(); + let cloned_commits = commit_info.commit.to_string(); + let cloned_commit_info = commit_info.clone(); + let detect_info_clone = Arc::clone(&detect_info); + + // Spawn a thread to perform the detection using regex + s.spawn(move |_| { + let results = detect_by_regex( + &cloned_path, + &cloned_rule, + &cloned_contents, + &cloned_allowlist, + &cloned_commits, + ); + + // Acquire the lock for detection results and update the vector + let mut detect_info = detect_info_clone.lock().unwrap(); + for (line_number, line, matched) in results.iter() { + let output_item = Leak { + line: line.to_string(), + line_number: *line_number as u32, + offender: matched.to_string(), + commit: cloned_commit_info.commit.to_string(), + repo: cloned_commit_info.repo.to_string(), + rule: cloned_rule.description.to_string(), + commit_message: cloned_commit_info.commit_message.to_string(), + author: cloned_commit_info.author.to_string(), + email: cloned_commit_info.email.to_string(), + file: cloned_path.to_string(), + date: cloned_commit_info.date.to_string(), + }; + detect_info.push(output_item); + } + }); + } + } + }); + + // Acquire the lock for detection results and return a clone of the results + let detect_info = detect_info.lock().unwrap(); + Ok(detect_info.clone()) +} + +/// Searches a string for matches of a given regular expression and returns a vector of tuples. +/// +/// # Arguments +/// +/// * `path` - The path to the file being searched. This is used for allowlist checks. +/// * `rules` - A `Rule` object representing the rule to apply during the detection process. It contains the regular expression to match against. +/// * `contents` - A string containing the contents to search for matches. +/// * `allowlist` - An `Allowlist` object containing the allowlist configurations. +/// +/// # Returns +/// +/// A vector of tuples `(usize, &str, &str)`, where each tuple represents a match found in the string. +/// The first element of the tuple is the line number (1-indexed), the second element is the matched line, and the third element is the matched substring. +/// +fn detect_by_regex<'a>( + path: &str, + rules: &Rule, + contents: &'a str, + allowlist: &Allowlist, + commits: &str, +) -> Vec<(usize, &'a str, &'a str)> { + // Create a regular expression object. + let regex = Regex::new(&rules.regex).unwrap(); + + // Iterate over the lines in the string. + let results: Vec<(usize, &str, &str)> = contents + .lines() + .enumerate() + .filter_map(|(i, line)| { + // Match the regular expression against each line. + regex + .captures(line) + .and_then(|captures| captures.get(0)) + .map(|matched| (i + 1, line, matched.as_str())) + }) + .collect(); + if results.is_empty() { + return Vec::new(); + } + + // The secrets that should be skipped + let mut filtered_results: Vec<(usize, &str, &str)> = Vec::new(); + + // Handle global allowlist + if allowlist.regex_target == "line" { + for (line_number, line, matched) in &results { + if (allowlist.regexes.is_empty() || allowlist.stopwords.is_empty()) + && (is_string_matched(&allowlist.regexes, line) + || is_contains_strs(&allowlist.stopwords, line)) + { + filtered_results.push((*line_number, line, matched)); + } + } + } else { + for (line_number, line, matched) in &results { + if (allowlist.regexes.is_empty() || allowlist.stopwords.is_empty()) + && (is_string_matched(&allowlist.regexes, matched) + || is_contains_strs(&allowlist.stopwords, matched)) + { + filtered_results.push((*line_number, line, matched)); + } + } + } + + // Handle rules.allowlist + if let Some(rules_allowlist) = &rules.allowlist { + // check commits and paths + if (is_path_in_allowlist(path, &rules_allowlist.paths)) + || (is_commit_in_allowlist(commits, &rules_allowlist.commits)) + { + return vec![]; + } + + // check regexes and stopwords + if rules_allowlist.regex_target == "line" { + for (line_number, line, matched) in &results { + if (rules_allowlist.regexes.is_empty() || rules_allowlist.stopwords.is_empty()) + && (is_string_matched(&rules_allowlist.regexes, line) + || is_contains_strs(&rules_allowlist.stopwords, line)) + { + filtered_results.push((*line_number, line, matched)); + } + } + } else { + for (line_number, line, matched) in &results { + if (rules_allowlist.regexes.is_empty() || rules_allowlist.stopwords.is_empty()) + && (is_string_matched(&rules_allowlist.regexes, matched) + || is_contains_strs(&rules_allowlist.stopwords, matched)) + { + filtered_results.push((*line_number, line, matched)); + } + } + } + } + + if filtered_results.is_empty() { + results + } else { + remove_duplicates(results, filtered_results) + } +} + +/// Detects uncommitted files for sensitive information leaks. +/// +/// # Arguments +/// +/// * `contents` - A string slice representing the contents of the file. +/// * `path` - A string slice representing the path of the file. +/// * `ruleslist` - A reference to a slice of `Rule` objects to match against. +/// * `allowlist` - A reference to an `Allowlist` object for paths that should be skipped. +/// +/// # Returns +/// +/// Returns a `Result` containing a vector of `Leak` objects if sensitive information leaks are detected, +/// or an empty vector if no leaks are found. +pub fn detect_uncommitted_file( + contents: &str, + path: &str, + ruleslist: &[Rule], + allowlist: &Allowlist, + threads: Option, +) -> Result, Box> { + // Create a shared mutable vector to store detection results + let detect_info: Arc>> = Arc::new(Mutex::new(Vec::new())); + + // Create a thread pool with the setting threads + let thread_pool = ThreadPoolBuilder::new() + .num_threads(threads.unwrap_or(50)) + .build() + .unwrap(); + + // Use the thread pool to execute the detection operations + thread_pool.scope(|s| { + for rule in ruleslist { + // Check if the contents contain any keywords from the rule + if is_contains_strs(&rule.keywords, contents) { + let cloned_path = path.to_string(); + let cloned_rule = rule.clone(); + let cloned_contents = contents.to_string(); + let cloned_allowlist = allowlist.clone(); + let detect_info_clone = Arc::clone(&detect_info); + + // Spawn a thread to perform the detection using regex + s.spawn(move |_| { + let results = detect_by_regex( + &cloned_path, + &cloned_rule, + &cloned_contents, + &cloned_allowlist, + "", + ); + + // Acquire the lock for detection results and update the vector + let mut detect_info = detect_info_clone.lock().unwrap(); + for (line_number, line, matched) in results.iter() { + let output_item = Leak { + line: line.to_string(), + line_number: *line_number as u32, + offender: matched.to_string(), + commit: "".to_string(), + repo: "".to_string(), + rule: cloned_rule.description.to_string(), + commit_message: "".to_string(), + author: "".to_string(), + email: "".to_string(), + file: cloned_path.to_string(), + date: "".to_string(), + }; + detect_info.push(output_item); + } + }); + } + } + }); + + // Acquire the lock for detection results and return a clone of the results + let detect_info = detect_info.lock().unwrap(); + Ok(detect_info.clone()) +} + +/// Handles post-detection configuration information and performs actions based on the configuration settings. +/// +/// # Arguments +/// +/// * `config` - A reference to the `Config` object containing the scan configuration settings. +/// * `results` - A reference to the `Results` object containing the detection results. +/// * `start_scan` - The start time of the scan as an `Instant` object. +/// * `duration_repo` - The duration of the repository scanning process as a `std::time::Duration` object. +/// +/// # Returns +/// +/// Returns `Ok(())` if the post-detection actions are performed successfully, or an error of type `Box` if any issues occur. +/// +/// # Errors +/// +/// This function can return an error if there are any issues during the post-detection actions, such as writing reports. +/// +fn config_info_after_detect( + config: &Config, + results: &Results, + start_scan: Instant, + duration_repo: std::time::Duration, +) -> Result<(), Box> { + // Calculate the scan duration + let duration_scan = Instant::now().duration_since(start_scan); + + // If the verbose flag is set, print the scan results to the console + if config.verbose { + if config.pretty { + println!("{:#?}", results.outputs); + } else { + println!("{:?}", results.outputs); + } + } + + // If the debug flag is set, print the scan results to the console + if config.debug { + debug_info(duration_repo, duration_scan, results.commits_number); + } + + // Write output report + match &config.report { + Some(report) => { + match &config.report_format { + Some(format) => { + if format == "sarif" { + if write_sarif_report(report, &results.outputs).is_err() { + return Err(Box::new(CustomError::ExportSarifError)); + } + } else if format == "csv" { + if write_csv_report(report, &results.outputs).is_err() { + return Err(Box::new(CustomError::ExportCsvError)); + } + } else if write_json_report(report, &results.outputs).is_err() { + return Err(Box::new(CustomError::ExportJsonError)); + } + } + None => {} + }; + } + None => {} + } + + println!( + "\x1b[38;5;208m[WARN]\x1b[0m[{}]{} leaks detected. {} commits scanned in {:?}", + Local::now().format("%Y-%m-%d %H:%M:%S"), + results.outputs.len(), + results.commits_number, + duration_scan + ); + + match &config.disk { + Some(_disk) => {} + None => { + if is_link(&config.repo) { + let dest = "workplace/"; + let mut repo_path = String::new(); + if let Some(name) = extract_repo_name(&config.repo) { + repo_path = format!("{}{}", dest, name); + } + match fs::remove_dir_all(repo_path) { + Ok(_) => {} + Err(e) => eprintln!("Delete dir fail: {}", e), + } + } + } + }; + Ok(()) +} + +/// Prints debug information. +/// +/// # Arguments +/// +/// * `total_clone_time` - The total time taken for repository cloning, represented as a `Duration` object. +/// * `total_scan_time` - The total time taken for the scan, represented as a `Duration` object. +/// * `commits` - The number of commits. +fn debug_info( + total_clone_time: std::time::Duration, + total_scan_time: std::time::Duration, + commits: usize, +) { + let timestamp = Local::now().format("%Y-%m-%dT%H:%M:%S%.3f%:z").to_string(); + println!( + "\x1b[34m[DEBUG]\x1b[0m[{}] -------------------------", + timestamp + ); + println!( + "\x1b[34m[DEBUG]\x1b[0m[{}] | Times and Commit Counts|", + timestamp + ); + println!( + "\x1b[34m[DEBUG]\x1b[0m[{}] -------------------------", + timestamp + ); + println!("totalScanTime: {:?}", total_scan_time); + println!("totalCloneTime: {:?}", total_clone_time); + println!("totalCommits: {}", commits); +} + +#[cfg(test)] +mod tests { + use super::*; + extern crate git2; + + use chrono::DateTime; + // Helper function to create a mock scan + fn create_mock_scan() -> Scan { + let rule = Rule { + description: String::from("Stripe Access Token"), + id: String::from("stripe-access-token"), + regex: String::from(r"(?i)(sk|pk)_(test|live)_[0-9a-z]{10,32}"), + // entropy: Some(0.5), + keywords: vec![ + String::from("sk_test"), + String::from("pk_test"), + String::from("sk_live"), + String::from("pk_live"), + ], + allowlist: None, + }; + let ruleslist: Vec = vec![rule]; + + + let allowlist = Allowlist { + paths: vec![], + commits: vec![], + regex_target: String::from("match"), + regexes: vec![], + stopwords: vec![], + }; + + let scan = Scan { + allowlist, + ruleslist, + + threads: Some(50), + chunk: Some(10), + }; + scan + } + + // test detect_file + static PATH: &str = "tests/files/testdir/test.txt"; + #[test] + fn test_detect_file() { + let scan = create_mock_scan(); + let content = "twilio_api_key = SK12345678901234567890123456789012"; + let commit_info = CommitInfo { + repo: "example/repo".to_string(), + commit: git2::Oid::from_str("1234567890abcdef1234567890abcdef12345678").unwrap(), + author: "John Doe".to_string(), + email: "johndoe@example.com".to_string(), + commit_message: "Example commit message".to_string(), + date: DateTime::parse_from_rfc3339("2023-05-26T12:34:56+00:00") + .unwrap() + .into(), + files: vec![ + ("/path/to/file1".to_string(), "File 1 contents".to_string()), + ("/path/to/file2".to_string(), "File 2 contents".to_string()), + ], + }; + // Call the detect_file function + let result = detect_file( + PATH, + content, + &scan.ruleslist, + &scan.allowlist, + &commit_info, + scan.threads, + ); + + // Assert that the result is as expected + let output = result.unwrap(); + assert_eq!(output.len(), 0); + } + // test detect_by_regex + + #[test] + fn test_detect_by_regex() { + let rules = Rule { + description: "Digits".to_string(), + id: "key".to_string(), + regex: r"\d+".to_string(), + // entropy: None, + keywords: vec![], + allowlist: None, + }; + let contents = "123\n456\n789\naaaaaxwsd\ntoken=wkwk121"; + let allowlist = Allowlist { + commits: vec![], + paths: vec![], + regex_target: String::new(), + regexes: vec![], + stopwords: vec![], + }; + + let result = detect_by_regex(PATH, &rules, contents, &allowlist, ""); + + assert_eq!(result.len(), 4); + assert_eq!(result[0], (1, "123", "123")); + assert_eq!(result[1], (2, "456", "456")); + assert_eq!(result[2], (3, "789", "789")); + assert_eq!(result[3], (5, "token=wkwk121", "121")); + } + + #[test] + fn test_detect_by_regex_with_rules_allowlist_regex_target_match() { + let rules = Rule { + description: "Digits".to_string(), + id: "key".to_string(), + regex: r"\d+".to_string(), + // entropy: None, + keywords: vec![], + allowlist: Some(Allowlist { + commits: vec![], + paths: vec!["tests/files/test90.txt".to_string()], + regex_target: "match".to_string(), + regexes: vec![], + stopwords: vec!["token".to_string()], + }), + }; + let contents = "123\n456\n789\naaaaaxwsd\ntoken=wkwk121"; + let allowlist = Allowlist { + commits: vec![], + paths: vec![], + regex_target: String::new(), + regexes: vec![], + stopwords: vec![], + }; + + let result = detect_by_regex(PATH, &rules, contents, &allowlist, ""); + println!("{:?}", result); + assert_eq!(result.len(), 4); + assert_eq!(result[0], (1, "123", "123")); + assert_eq!(result[1], (2, "456", "456")); + assert_eq!(result[2], (3, "789", "789")); + assert_eq!(result[3], (5, "token=wkwk121", "121")); + } + + #[test] + fn test_detect_by_regex_with_rules_allowlist_regex_target_line() { + let rules = Rule { + description: "Digits".to_string(), + id: "key".to_string(), + regex: r"\d+".to_string(), + // entropy: None, + keywords: vec![], + allowlist: Some(Allowlist { + commits: vec![], + paths: vec!["tests/files/test90.txt".to_string()], + regex_target: "line".to_string(), + regexes: vec![], + stopwords: vec!["token".to_string()], + }), + }; + let contents = "123\n456\n789\naaaaaxwsd\ntoken=wkwk121"; + let allowlist = Allowlist { + commits: vec![], + paths: vec![], + regex_target: String::new(), + regexes: vec![], + stopwords: vec![], + }; + + let result = detect_by_regex(PATH, &rules, contents, &allowlist, ""); + println!("{:?}", result); + assert_eq!(result.len(), 3); + assert_eq!(result[0], (1, "123", "123")); + assert_eq!(result[1], (2, "456", "456")); + assert_eq!(result[2], (3, "789", "789")); + } + + #[test] + fn test_detect_by_regex_with_global_allowlist() { + let rules = Rule { + description: "Digits".to_string(), + id: "key".to_string(), + regex: r"\d+".to_string(), + // entropy: None, + keywords: vec![], + allowlist: Some(Allowlist { + commits: vec![], + paths: vec!["tests/files/test90.txt".to_string()], + regex_target: "line".to_string(), + regexes: vec![], + stopwords: vec!["token".to_string()], + }), + }; + let contents = "123\n456\n789\naaaaaxwsd\ntoken=wkwk121\nclient22222\n22"; + let allowlist = Allowlist { + commits: vec![], + paths: vec![], + regex_target: "line".to_string(), + regexes: vec![], + stopwords: vec!["client".to_string()], + }; + + let result = detect_by_regex(PATH, &rules, contents, &allowlist, ""); + assert_eq!(result.len(), 4); + assert_eq!(result[0], (1, "123", "123")); + assert_eq!(result[1], (2, "456", "456")); + assert_eq!(result[2], (3, "789", "789")); + assert_eq!(result[3], (7, "22", "22")); + } +} diff --git a/sensleak-rs/src/service/git_service.rs b/sensleak-rs/src/service/git_service.rs new file mode 100644 index 00000000..de9bb8bb --- /dev/null +++ b/sensleak-rs/src/service/git_service.rs @@ -0,0 +1,672 @@ +extern crate chrono; +extern crate git2; +use chrono::{DateTime, FixedOffset, TimeZone, Utc}; + +use git2::{BranchType, Repository, StatusOptions}; +use std::sync::{Arc, Mutex}; +use rayon::prelude::*; + +use crate::models::{CommitInfo, Leak, Results, Scan}; +use std::collections::HashSet; +use std::error::Error; +use std::fs; +use std::fs::File; + +use crate::errors::CustomError; +use crate::service::detect_service::{detect_file, detect_uncommitted_file}; +use crate::utils::git_util::{ + config_commit_info, is_valid_date_format, load_all_commits, load_commits_by_conditions, + parse_date_to_datetime, +}; + +use std::io::{BufRead, BufReader, Read}; + +/// Handles a single commit by scanning its content. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the Git repository. +/// * `commit_id` - The ID of the commit to handle, provided as a string. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +pub fn handle_single_commit( + repo: Repository, + commit_id: &str, + scan: Scan, + user: &str, +) -> Result> { + let commit = repo.find_commit(git2::Oid::from_str(commit_id)?)?; + if !user.is_empty() && user != commit.author().name().unwrap_or("") { + return Ok(Results::new()); + } + let commit_info = config_commit_info(&repo, &commit, &scan)?; + let commits_list = vec![commit_info]; + + // Handle the commit information and perform the scan + handle_commit_info(&commits_list, scan) +} + +/// Handles multiple commits by scanning their content. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the Git repository. +/// * `commit_ids` - An array slice of commit IDs to handle, provided as strings. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +pub fn handle_multiple_commits( + repo: Repository, + commit_ids: &[&str], + scan: Scan, + user: &str, +) -> Result> { + let mut commits_list = vec![]; + + // Iterate over each commit ID + for commit_id in commit_ids { + let commit = repo.find_commit(git2::Oid::from_str(commit_id)?)?; + if user.is_empty() || user == commit.author().name().unwrap_or("") { + let commit_info = config_commit_info(&repo, &commit, &scan)?; + commits_list.push(commit_info); + } + } + if commits_list.is_empty() { + return Ok(Results::new()); + } + // Handle the commit information and perform the scan + handle_commit_info(&commits_list, scan) +} + +/// Handles commits from a file by scanning their content. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the Git repository. +/// * `file_name` - The name of the file containing commit IDs, provided as a string. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +pub fn handle_commits_file( + repo: Repository, + file_name: &str, + scan: Scan, + user: &str, +) -> Result> { + // Open the commits file + let file = fs::File::open(file_name).expect("Failed to open commits file"); + let reader = BufReader::new(file); + + let mut commits: Vec = Vec::new(); + + // Read each line from the file and store it in the commits vector + for line in reader.lines().flatten() { + commits.push(line); + } + + // Convert commit IDs to a vector of string slices + let commit_ids: Vec<&str> = commits.iter().map(|s| s.as_str()).collect(); + + // Handle multiple commits using the commit IDs and perform the scan + handle_multiple_commits(repo, &commit_ids, scan, user) +} + +/// Handles commits within a specified time range by scanning their content. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the Git repository. +/// * `since` - The starting time of the commit range, provided as a string. +/// * `until` - The ending time of the commit range, provided as a string. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +#[allow(deprecated)] +pub fn handle_commit_range_by_time( + repo: Repository, + since: &str, + until: &str, + scan: Scan, + user: &str, +) -> Result> { + let excluded_commits: Vec = vec![]; + let is_since_rfc3339 = DateTime::parse_from_rfc3339(since).is_ok(); + let is_until_rfc3339 = DateTime::parse_from_rfc3339(until).is_ok(); + + let is_since_date = is_valid_date_format(since); + let is_until_date = is_valid_date_format(until); + + if is_since_date && is_until_date { + // Convert since and until to start_time and end_time + let start_time = match parse_date_to_datetime(since, "start") { + Ok(datetime) => datetime.with_timezone(&FixedOffset::east(0)), + Err(err) => { + return Err(err); + } + }; + + let end_time = match parse_date_to_datetime(until, "until") { + Ok(datetime) => datetime.with_timezone(&FixedOffset::east(0)), + Err(err) => { + return Err(err); + } + }; + + handle_multiple_commits_by_time(&repo, &excluded_commits, start_time, end_time, scan, user) + } else if is_since_rfc3339 && is_until_rfc3339 { + let start_time = DateTime::parse_from_rfc3339(since).unwrap(); + let end_time = DateTime::parse_from_rfc3339(until).unwrap(); + + handle_multiple_commits_by_time(&repo, &excluded_commits, start_time, end_time, scan, user) + } else { + return Err(Box::new(CustomError::InvalidDateFormat)); + } +} + +/// Handles multiple commits within a specified time range by scanning their content. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object representing the Git repository. +/// * `excluded_commits` - An array slice of excluded commit IDs, provided as `git2::Oid`. +/// * `start_time` - The starting time of the commit range, provided as `DateTime`. +/// * `end_time` - The ending time of the commit range, provided as `DateTime`. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +#[allow(deprecated)] +pub fn handle_multiple_commits_by_time( + repo: &Repository, + excluded_commits: &[git2::Oid], + start_time: DateTime, + end_time: DateTime, + scan: Scan, + user: &str, +) -> Result> { + // Get the head commit + let head = repo.head()?; + let obj = head.peel(git2::ObjectType::Commit)?; + let commit = if let Some(commit) = obj.as_commit() { + commit.clone() + } else { + return Err(Box::new(CustomError::ObjectConvertFail)); + }; + + // Create a revision walker and set sorting options + let mut revwalk = repo.revwalk()?; + revwalk.push(commit.id())?; + revwalk.set_sorting(git2::Sort::TOPOLOGICAL)?; + + let mut commits = Vec::new(); + let excluded_commits: HashSet<_> = excluded_commits.iter().cloned().collect(); + + // Iterate over each commit ID in the revision walker + for commit_id in revwalk { + let oid = commit_id?; + if excluded_commits.contains(&oid) { + continue; // Skip excluded commits + } + + let commit = repo.find_commit(oid)?; + + if user.is_empty() || user == commit.author().name().unwrap_or("") { + // Get the commit's time and convert it to the appropriate time zone + let commit_time = Utc.timestamp(commit.time().seconds(), 0); + let commit_offset = FixedOffset::west(commit.time().offset_minutes() * 60); + let commit_date = commit_offset.from_utc_datetime(&commit_time.naive_utc()); + + // Check if the commit is within the specified time range + if commit_date >= start_time && commit_date <= end_time { + let commit_info = config_commit_info(repo, &commit, &scan)?; + commits.push(commit_info); + } + } + } + + // Handle the commit information and perform the scan + handle_commit_info(&commits, scan) +} + +/// Handles branches by name, scanning the commits in the matching branches. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the Git repository. +/// * `branch_name` - The name or partial name of the branches to match. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +pub fn handle_branches_by_name( + repo: Repository, + branch_name: &str, + scan: Scan, +) -> Result> { + let branches = repo.branches(Some(BranchType::Local))?; + + let mut commits = Vec::new(); + + // Iterate over each branch in the repository + for branch in branches { + let (branch, _) = branch?; + let branch_reference = branch.into_reference(); + let branch_name_str = branch_reference.name().unwrap_or(""); + + // Check if the branch name contains the provided name or partial name + if branch_name_str.contains(branch_name) { + let commit_oid = branch_reference + .target() + .ok_or_else(|| git2::Error::from_str("Failed to get branch commit"))?; + + let commit = repo.find_commit(commit_oid)?; + let commit_info = config_commit_info(&repo, &commit, &scan)?; + + commits.push(commit_info); + } + } + + // Handle the commit information and perform the scan + handle_commit_info(&commits, scan) +} + +/// Handles a commit range, scanning the commits between the specified commit IDs. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the Git repository. +/// * `commit_from` - An optional string representing the starting commit ID. +/// * `commit_to` - An optional string representing the ending commit ID. +/// * `scan` - A `Scan` object representing the scanning configuration. +/// +/// # Returns +/// +/// A `Result` containing the scanning results (`Results`) if successful, +/// otherwise an error (`Box`). +pub fn handle_commit_range( + repo: Repository, + commit_from: Option, + commit_to: Option, + scan: Scan, + user: &str, +) -> Result> { + // Load all commits in the repository + let all_commits = match load_all_commits(&repo) { + Ok(all_commits) => all_commits, + Err(_e) => { + return Err(Box::new(CustomError::ObjectConvertFail)); + } + }; + + // Load the commits within the specified commit range + let results = load_commits_by_conditions(commit_from, commit_to, &all_commits); + let commit_ids: Vec<&str> = results.iter().map(|s| s.as_str()).collect(); + + // Handle multiple commits and perform the scan + handle_multiple_commits(repo, &commit_ids, scan, user) +} + +/// Handles uncommitted files in the repository and performs a scan for potential leaks. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the repository. +/// * `repo_path` - The path to the repository. +/// * `scan` - A `Scan` object containing the rules, keywords, and allowlist for the scan. +/// +/// # Returns +/// +/// Returns a `Result` containing a `Results` object if the operation is successful, or an error if an error occurs during the process. +/// +/// # Errors +/// +/// This function may return an error if any of the following operations fail: +/// +/// * Opening a file for reading. +/// * Reading the contents of a file. +/// * Detecting uncommitted files using `detect_uncommitted_file` function. +/// +pub fn handle_uncommitted_files( + repo: Repository, + repo_path: &str, + scan: Scan, +) -> Result> { + let mut options = StatusOptions::new(); + options.include_untracked(true); + options.include_unmodified(false); + options.exclude_submodules(true); + + let statuses = repo.statuses(Some(&mut options))?; + + let mut uncommitted_files = Vec::new(); + for entry in statuses.iter() { + if let Some(path) = entry.path() { + let ab_path = format!("{}/{}", repo_path, path); + let mut file = File::open(ab_path)?; + let mut contents = String::new(); + file.read_to_string(&mut contents)?; + uncommitted_files.push((path.to_string(), contents)); + } + } + let mut results = Vec::new(); + for (path, content) in uncommitted_files.iter() { + let result = detect_uncommitted_file( + content, + path, + &scan.ruleslist, + &scan.allowlist, + scan.threads, + ); + if let Ok(output) = result { + if !output.is_empty() { + results.push(output); + } + } else if let Err(err) = result { + return Err(err); + } + } + let flattened: Vec = results.into_iter().flatten().collect(); + let returns = Results { + commits_number: 0, + outputs: flattened, + }; + Ok(returns) +} + +/// Handles all commits in the repository and performs a scan for potential leaks. +/// +/// # Arguments +/// +/// * `repo` - A `Repository` object representing the repository. +/// * `scan` - A `Scan` object containing the rules, keywords, and allowlist for the scan. +/// * `user` - A string representing the user performing the scan. +/// +/// # Returns +/// +/// Returns a `Result` containing a `Results` object if the operation is successful, or an error if an error occurs during the process. +/// +/// # Errors +/// +/// This function may return an error if any of the following operations fail: +/// +/// * Loading all commits in the repository using the `load_all_commits` function. +/// * Handling multiple commits using the `handle_multiple_commits` function. +/// +pub fn handle_all_commits( + repo: Repository, + scan: Scan, + user: &str, +) -> Result> { + // Load all commits in the repository + let all_commits = match load_all_commits(&repo) { + Ok(all_commits) => all_commits, + Err(_) => { + return Err(Box::new(CustomError::ObjectConvertFail)); + } + }; + let commit_ids: Vec<&str> = all_commits.iter().map(|s| s.as_str()).collect(); + handle_multiple_commits(repo, &commit_ids, scan, user) +} + +/// Handle the commit information by searching for secrets in the commit files. +/// +/// +/// # Arguments +/// +/// * `commit_info_list` - A slice of `CommitInfo` objects representing the commit information. +/// * `scan` - A `Scan` object containing the rules, keywords, and allowlist for secret detection. +/// +/// # Errors +/// +/// This function returns an `Err` variant if any error occurs during the secret detection process. +/// The error type is a boxed `dyn Error`, which allows for returning different types of error objects. +/// +pub fn handle_commit_info( + commit_info_list: &[CommitInfo], + scan: Scan, +) -> Result> { + let ruleslist = scan.ruleslist; + let allowlist = scan.allowlist; + let threads = scan.threads; + let chunk=scan.chunk.unwrap_or(10); + let results: Arc>> = Arc::new(Mutex::new(Vec::new())); + + commit_info_list.par_iter().for_each(|commit_info| { + let commit_results: Vec = commit_info + .files + .par_chunks(chunk) + .flat_map(|files_chunk| { + files_chunk + .iter() + .filter_map(|(file, content)| { + match detect_file(content, file, &ruleslist, &allowlist, commit_info, threads) { + Ok(output) => Some(output), + Err(_) => None, + } + }) + .flatten() + .collect::>() + }) + .collect(); + + let mut results = results.lock().unwrap(); + results.extend(commit_results); + }); + + let flattened: Vec = results + .lock() + .unwrap() + .clone(); + + let returns = Results { + commits_number: commit_info_list.len(), + outputs: flattened, + }; + + Ok(returns) +} + +// NOTE: The commented-out function can be tested after specifying the repo file +// #[cfg(test)] +// mod tests { +// use super::*; +// static VALID_PATH: &str = "tests/TestGitOperation"; + +// // Helper function to create a mock repository +// fn create_mock_repository() -> Repository { +// let repo = match load_repository(VALID_PATH) { +// Ok(repo) => repo, +// Err(e) => { +// panic!("Failed to load repository"); +// } +// }; +// repo +// } + +// // Helper function to create a mock scan +// fn create_mock_scan() -> Scan { +// let rule = Rule { +// description: String::from("Stripe Access Token"), +// id: String::from("stripe-access-token"), +// regex: String::from(r"(?i)(sk|pk)_(test|live)_[0-9a-z]{10,32}"), + +// keywords: vec![String::from("sk_test"), String::from("pk_test"),String::from("sk_live"), String::from("pk_live")], +// allowlist: None, +// }; +// let ruleslist:Vec=vec![rule]; + +// let keywords = vec![ +// String::from("pk_live"), +// String::from("sk_live"), +// String::from("sk_test"), +// String::from("pk_test"),]; + +// let allowlist = Allowlist { +// paths: vec![], +// commits: vec![ ], +// regex_target: String::from("match"), +// regexes: vec![ ], +// stopwords: vec![], +// }; + +// let scan=Scan{ +// allowlist, +// ruleslist, +// keywords +// }; +// scan +// } + +// // test handle_single_commit +// #[test] +// fn test_handle_single_commit() { +// let repo = create_mock_repository(); +// let scan = create_mock_scan(); +// let result = handle_single_commit(repo, "8bdca802af0514ce29947e20c6be1719974ad866", scan,""); +// assert!(result.is_ok()); +// match result { +// Ok(output_items) => { +// assert_eq!(5, output_items.outputs[0].line_number); +// } +// Err(err) => { +// println!("Error: {}", err); +// assert!(false); +// } +// } +// } + +// // test handle_multiple_commits +// #[test] +// fn test_handle_multiple_commits() { + +// let repo = create_mock_repository(); +// let commit_ids = vec!["8bdca802af0514ce29947e20c6be1719974ad866", "25bc64b31ee8920e1cb1f4ea287b174df5cd9782",]; +// let scan = create_mock_scan(); +// let result = handle_multiple_commits(repo, &commit_ids, scan,""); + +// assert!(result.is_ok()); +// match result { +// Ok(output_items) => { +// assert_eq!(2, output_items.commits_number); +// } +// Err(err) => { +// println!("Error: {}", err); +// assert!(false); +// } +// } +// } + +// // test handle_commits_file +// #[test] +// fn test_handle_commits_file() { + +// let repo = create_mock_repository(); +// let file_name = "tests/files/commits.txt"; +// let scan = create_mock_scan(); + +// // Perform the handle_commits_file function +// let result = handle_commits_file(repo , file_name, scan,""); + +// assert!(result.is_ok()); +// match result { +// Ok(output_items) => { +// assert_eq!(2, output_items.commits_number); +// } +// Err(err) => { +// println!("Error: {}", err); +// assert!(false); +// } +// } +// } + +// // test handle_commit_range_by_time +// #[test] +// fn test_handle_commit_range_by_time() { +// let repo = create_mock_repository(); +// let since = "2023-05-20T00:00:00Z"; +// let until = "2023-05-26T00:00:00Z"; +// let scan = create_mock_scan(); +// let result = handle_commit_range_by_time(repo, since, until, scan,""); + +// // Assert the result +// assert!(result.is_ok()); +// match result { +// Ok(output_items) => { +// assert_eq!(8, output_items.commits_number); +// } +// Err(err) => { +// println!("Error: {}", err); +// assert!(false); +// } +// } +// } + +// // test test_handle_branches_by_name +// #[test] +// fn test_handle_branches_by_name() { +// let repo = create_mock_repository(); +// let branch_name = "secret"; +// let scan = create_mock_scan(); +// let result = handle_branches_by_name(repo, branch_name, scan); +// assert!(result.is_ok()); +// match result { +// Ok(output_items) => { +// assert_eq!(1, output_items.commits_number); +// } +// Err(err) => { +// println!("Error: {}", err); +// assert!(false); +// } +// } +// } + +// // rest handle_commit_range +// #[test] +// fn test_handle_commit_range() { + +// let repo = create_mock_repository(); +// let commit_from = Some("547b550d3ec4d1f24c12f7a4d4c8c0aaa045bd7b".to_string()); +// let commit_to = Some("42c8c6a9c48bc4d9406750f4d15b0d0cd5ab7597".to_string()); +// let scan = create_mock_scan(); +// let result = handle_commit_range(repo, commit_from, commit_to, scan,""); + +// assert!(result.is_ok()); +// match result { +// Ok(output_items) => { +// assert_eq!(4, output_items.commits_number); +// } +// Err(err) => { +// println!("Error: {}", err); +// assert!(false); +// } +// } +// } +// #[test] +// fn test_handle_all_commits() { + +// let repo = create_mock_repository(); +// let scan = create_mock_scan(); +// let user = "sonichen"; + +// let result = handle_all_commits(repo, scan, user); +// assert!(result.is_ok()); + +// } +// } diff --git a/sensleak-rs/src/service/mod.rs b/sensleak-rs/src/service/mod.rs new file mode 100644 index 00000000..737c5fd7 --- /dev/null +++ b/sensleak-rs/src/service/mod.rs @@ -0,0 +1,2 @@ +pub mod detect_service; +pub mod git_service; \ No newline at end of file diff --git a/sensleak-rs/src/utils/detect_utils.rs b/sensleak-rs/src/utils/detect_utils.rs new file mode 100644 index 00000000..15408d57 --- /dev/null +++ b/sensleak-rs/src/utils/detect_utils.rs @@ -0,0 +1,1071 @@ +use crate::errors::CustomError; +use crate::models::{Allowlist, Config, CsvResult, Leak, Rule, Scan}; +use csv::Writer; +use git2::Repository; +use regex::Regex; +use serde_json::json; +use std::collections::HashSet; +use std::error::Error; +use std::fs; +use std::fs::{File, OpenOptions}; +use std::io::{Seek, SeekFrom, Write}; +use toml::{to_string_pretty, Value}; + +/// Loads the scan configuration based on the specified repository and configuration settings. +/// +/// # Arguments +/// +/// * `repo` - A reference to the `Repository` object representing the target repository. +/// * `config` - A reference to the `Config` object containing the scan configuration settings. +/// +/// # Returns +/// +/// Returns a `Result` containing the loaded `Scan` object if successful, or an error of type `Box` if any issues occur. +/// +pub fn load_config(repo: &Repository, config: &Config) -> Result> { + let scan_result = if config.repo_config { + // Load config from target repo. Config file must be ".gitleaks.toml" or "gitleaks.toml" + let content = load_config_content_from_target_repo(repo)?; + match content { + Some(content) => load_config_from_target_repo(&content), + None => { + return Err(Box::new(CustomError::EmptyFileError)); + } + } + } else { + // Specify the search rule file. + load_config_file(&config.config) + }?; + + Ok(scan_result) +} + +/// Loads the content of a configuration file (`.gitleaks.toml` or `gitleaks.toml`) from the target repository. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object representing the target repository. +/// +/// # Returns +/// +/// Returns a `Result` containing an `Option` with the content of the configuration file if found, or `None` if the configuration file is not found in any commit. +/// +/// # Errors +/// +/// This function may return an error if any error occurs during the repository traversal or object retrieval. +/// +fn load_config_content_from_target_repo( + repo: &Repository, +) -> Result, Box> { + let head_commit = repo.head()?.peel_to_commit()?; + let mut walker = repo.revwalk()?; + walker.push(head_commit.id())?; + + // Iterate over all commits in the repository + for commit_id in walker { + let commit = repo.find_commit(commit_id?)?; + let tree = commit.tree()?; + + // Iterate over all entries in the tree + for entry in tree.iter() { + let file_name = entry.name().unwrap_or(""); + if file_name == ".gitleaks.toml" || file_name == "gitleaks.toml" { + let blob = entry.to_object(repo)?.peel_to_blob()?; + let content = String::from_utf8_lossy(blob.content()); + return Ok(Some(content.into())); + } + } + } + + Ok(None) +} + +/// Loads the configuration file and extracts the allowlist, ruleslist. +/// +/// # Arguments +/// +/// * `config_file_path` - The path to the configuration file. +/// * `repo_file_path` - The path of the repository file. +/// +/// # Returns +/// +/// Returns an `Ok` variant containing a tuple with the extracted allowlist, ruleslist, and keywords. +/// +/// # Errors +/// +/// Returns an `Err` variant if the configuration file cannot be loaded or if there are any errors during parsing. +/// +pub fn load_config_file(config_file_path: &str) -> Result> { + // Load config file + let toml_str = fs::read_to_string(config_file_path) + .map_err(|_| Box::new(CustomError::EmptyConfigFileError))?; + + // Parse config file + let config_file_content: Value = toml::from_str(&toml_str)?; + + // Config allowlist + let allowlist = config_allowlist(&config_file_content)?; + + // Config ruleslist and keywords + let ruleslist= config_ruleslist_and_keywords(&config_file_content)?; + + let scan = Scan { + allowlist, + ruleslist, + threads: None, + chunk: None, + }; + + Ok(scan) +} + +/// Loads the configuration from the target repository. +/// +/// # Arguments +/// +/// * `toml_str` - A TOML string representing the configuration file from the target repository. +/// +/// # Returns +/// +/// Returns an `Ok` variant containing a tuple with the extracted allowlist, ruleslist, and keywords. +/// +/// # Errors +/// +/// Returns an `Err` variant if there are any errors during parsing or extraction. +/// +fn load_config_from_target_repo(toml_str: &str) -> Result> { + // Load config file + let config_file_content: Value = toml::from_str(toml_str)?; + + // Config allowlist + let allowlist = config_allowlist(&config_file_content)?; + + // Config ruleslist and keywords + let ruleslist= config_ruleslist_and_keywords(&config_file_content)?; + + let scan = Scan { + allowlist, + ruleslist, + threads: None, + chunk: None, + }; + + Ok(scan) +} + +/// Extracts the allowlist from the config file. +/// +/// # Arguments +/// +/// * `config_file_content` - The TOML content of the configuration file. +/// * `repo_file_path` - The path of the repository file. +/// +/// # Returns +/// +/// Returns an `Ok` variant containing the extracted `Allowlist` object. +/// +fn config_allowlist(config_file_content: &Value) -> Result> { + let mut allowlist = Allowlist { + paths: Vec::new(), + commits: Vec::new(), + regex_target: String::from(""), + regexes: Vec::new(), + stopwords: Vec::new(), + }; + + // Get paths + if let Some(file_list) = config_file_content + .get("allowlist") + .and_then(|v| v.get("paths").and_then(|v| v.as_array())) + { + for path in file_list.iter() { + let path_str = path + .as_str() + .ok_or_else(|| Box::::from(CustomError::InvalidTomlFile))? + .to_string(); + allowlist.paths.push(path_str); + } + } + + // Get commit + if let Some(regex_list) = config_file_content + .get("allowlist") + .and_then(|v| v.get("commits").and_then(|v| v.as_array())) + { + allowlist.commits = regex_list + .iter() + .filter_map(|r| r.as_str()) + .map(|s| s.to_string()) + .collect(); + } + + // Get regex target (default to "match") + if let Some(target) = config_file_content + .get("allowlist") + .and_then(|v| v.get("regexTarget").and_then(|v| v.as_str())) + { + allowlist.regex_target = target.to_string(); + } + + // Get regexes + if let Some(regex_list) = config_file_content + .get("allowlist") + .and_then(|v| v.get("regexes").and_then(|v| v.as_array())) + { + allowlist.regexes = regex_list + .iter() + .filter_map(|r| r.as_str()) + .map(|s| s.to_string()) + .collect(); + } + + // Get stopwords + if let Some(stopwords_list) = config_file_content + .get("allowlist") + .and_then(|v| v.get("stopwords").and_then(|v| v.as_array())) + { + allowlist.stopwords = stopwords_list + .iter() + .filter_map(|r| r.as_str()) + .map(|s| s.to_string()) + .collect(); + } + + Ok(allowlist) +} + +/// Extracts the rules list and keywords from the config file. +/// +/// # Arguments +/// +/// * `config_file_content` - The TOML content of the configuration file. +/// * `repo_file_path` - The path of the repository file. +/// +/// # Returns +/// +/// Returns a tuple containing the extracted `ruleslist` and `keywords`. +/// * `ruleslist` - A vector of `Rule` objects representing the rules for detection. +/// * `keywords` - A vector of strings representing the keywords used for detection. +/// +fn config_ruleslist_and_keywords( + config_file_content: &Value, +) -> Result, Box> { + let mut ruleslist = vec![]; + + let regex_array = config_file_content + .get("rules") + .and_then(|v| v.as_array()) + .ok_or_else(|| Box::::from(CustomError::InvalidTomlFile))?; + + for rule in regex_array { + let description = rule + .get("description") + .and_then(|v| v.as_str().map(|s| s.to_string())) + .ok_or_else(|| Box::::from(CustomError::InvalidTomlFile))?; + let id = rule + .get("id") + .and_then(|v| v.as_str().map(|s| s.to_string())) + .ok_or_else(|| Box::::from(CustomError::InvalidTomlFile))?; + let regex = rule + .get("regex") + .and_then(|v| v.as_str().map(|s| s.to_string())) + .ok_or_else(|| Box::::from(CustomError::InvalidTomlFile))?; + // let entropy: Option = rule.get("entropy").map(|e| e.as_float().unwrap()); + let keywords_array = rule + .get("keywords") + .and_then(|v| v.as_array()) + .ok_or_else(|| Box::::from(CustomError::InvalidTomlFile))?; + + let mut rules_allowlist = Allowlist { + commits: vec![], + paths: vec![], + regex_target: String::new(), + regexes: vec![], + stopwords: vec![], + }; + + if rule.get("allowlist").is_none() { + let rule = Rule { + description, + id, + regex, + keywords: keywords_array + .iter() + .map(|kw| kw.as_str().unwrap().to_string()) + .collect(), + allowlist: None, + }; + ruleslist.push(rule); + continue; + } + + if let Some(allowlist_table) = rule.get("allowlist") { + if let Some(commits_array) = allowlist_table.get("commits").and_then(|v| v.as_array()) { + for commit in commits_array { + if let Some(commit_str) = commit.as_str() { + rules_allowlist.commits.push(commit_str.to_string()); + } + } + } + + if let Some(paths_array) = allowlist_table.get("paths").and_then(|v| v.as_array()) { + for path in paths_array { + if let Some(path_str) = path.as_str() { + rules_allowlist.paths.push(path_str.to_string()); + } + } + } + + rules_allowlist.regex_target = allowlist_table + .get("regexTarget") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + + if let Some(regexes_array) = allowlist_table.get("regexes").and_then(|v| v.as_array()) { + for regex in regexes_array { + if let Some(regex_str) = regex.as_str() { + rules_allowlist.regexes.push(regex_str.to_string()); + } + } + } + + if let Some(stopwords_array) = + allowlist_table.get("stopwords").and_then(|v| v.as_array()) + { + for stopword in stopwords_array { + if let Some(stopword_str) = stopword.as_str() { + rules_allowlist.stopwords.push(stopword_str.to_string()); + } + } + } + } + + let rule = Rule { + description, + id, + regex, + keywords: keywords_array + .iter() + .map(|kw| kw.as_str().unwrap().to_string()) + .collect(), + allowlist: Some(rules_allowlist), + }; + ruleslist.push(rule); + } + + Ok(ruleslist) +} + +/// Appends a rule to a TOML file. +/// +/// # Arguments +/// +/// * `rule` - A reference to the `Rule` object to be appended to the TOML file. +/// * `filename` - The name of the TOML file to which the rule should be appended. +/// +/// # Returns +/// +/// Returns `Ok(())` if the rule is successfully appended to the TOML file, or an error of type `Box` +/// if any issues occur. +/// +/// # Errors +/// +/// This function can return an error if there are any issues during the file operations, such as opening the file, +/// moving the file pointer, or writing the rule contents. +/// +pub fn append_rule_to_toml(rule: &Rule, filename: &str) -> Result<(), Box> { + // Open the file with read, write, and append options + let mut file = OpenOptions::new() + .read(true) + .write(true) + .append(true) + .open(filename)?; + + // Move the file pointer to the end of the file + file.seek(SeekFrom::End(0))?; + + // Write the start marker for a new [[rules]] section + file.write_all(b"[[rules]]\n")?; + + // Serialize the Rule struct to a TOML string + let toml_string = toml::to_string(rule)?; + + // Write the contents of the Rule + file.write_all(toml_string.as_bytes())?; + + // Write a newline character to separate different [[rules]] + file.write_all(b"\n")?; + + Ok(()) +} + +/// Deletes a rule with the specified ID from a TOML file. +/// +/// # Arguments +/// +/// * `file_path` - A string slice representing the path to the TOML file. +/// * `rule_id` - A string slice representing the ID of the rule to be deleted. +/// +/// # Returns +/// +/// Returns `Ok(())` if the rule with the specified ID is successfully deleted from the TOML file, or an error of +/// type `Box` if any issues occur. +/// +/// # Errors +/// +/// This function can return an error if there are any issues during the file operations, such as reading the file, +/// parsing the TOML content, modifying the data, or writing the modified TOML to the file. +/// +pub fn delete_rule_by_id(file_path: &str, rule_id: &str) -> Result<(), Box> { + // Read the content of the TOML file + let toml_content = fs::read_to_string(file_path)?; + + // Parse the TOML content + let mut toml_data: Value = toml::from_str(&toml_content)?; + + // Delete rules with the specified id + if let Some(rules) = toml_data.get_mut("rules") { + if let Some(rules_array) = rules.as_array_mut() { + rules_array.retain(|rule| { + if let Some(id) = rule.get("id") { + // Delete the rule based on the id + let rule_id_value = id.as_str().unwrap(); + rule_id_value != rule_id + } else { + true + } + }); + } + } + + // Convert the modified TOML data back to a string + let modified_toml = to_string_pretty(&toml_data)?; + + // Write the modified TOML to the file + fs::write(file_path, modified_toml)?; + + Ok(()) +} + + +/// Updates a rule with the specified ID in a TOML file. +/// +/// # Arguments +/// +/// * `file_path` - A string slice representing the path to the TOML file. +/// * `rule_id` - A string slice representing the ID of the rule to be updated. +/// * `new_rule` - A reference to the updated `Rule` object. +/// +/// # Returns +/// +/// Returns `Ok(())` if the rule with the specified ID is successfully updated in the TOML file, or an error of +/// type `Box` if any issues occur. +/// +pub fn update_rule_by_id(file_path: &str, rule_id: &str, new_rule: &Rule) -> Result<(), Box> { + + let toml_content = fs::read_to_string(file_path)?; + + let mut toml_data: toml::Value = toml::from_str(&toml_content)?; + + // Update rules with the specified ID + if let Some(rules) = toml_data.get_mut("rules") { + if let Some(rules_array) = rules.as_array_mut() { + for rule in rules_array.iter_mut() { + if let Some(id) = rule.get("id") { + let rule_id_value = id.as_str().unwrap(); + if rule_id_value == rule_id { + // Update the rule with the new values + *rule = toml::value::Value::try_from(new_rule)?; + break; + } + } + } + } + } + + + let modified_toml = toml::to_string_pretty(&toml_data)?; + + + fs::write(file_path, modified_toml)?; + + Ok(()) +} + + + + +/// Writes a JSON report with the provided `Leak` results to the specified file path. +/// +/// # Arguments +/// +/// * `file_path` - The file path where the JSON report will be written. +/// * `results` - A slice containing the `Leak` results to be included in the report. +/// +/// # Returns +/// +/// * `Result<(), Box>` - Returns `Ok(())` if the JSON report is written successfully, +/// or an `Err` variant containing the error information. +/// +pub fn write_json_report(file_path: &str, results: &[Leak]) -> Result<(), Box> { + let json_result = serde_json::to_string_pretty(results)?; + let mut file = File::create(file_path)?; + file.write_all(json_result.as_bytes())?; + Ok(()) +} + +/// Writes a SARIF report with the provided `Leak` results to the specified file path. +/// +/// # Arguments +/// +/// * `file_path` - The file path where the SARIF report will be written. +/// * `results` - A slice containing the `Leak` results to be included in the report. +/// +/// # Returns +/// +/// * `Result<(), Box>` - Returns `Ok(())` if the SARIF report is written successfully, +/// or an `Err` variant containing the error information. +/// +pub fn write_sarif_report(file_path: &str, results: &[Leak]) -> Result<(), Box> { + let sarif_result = convert_to_sarif(results)?; + let mut file = File::create(file_path)?; + file.write_all(sarif_result.as_bytes())?; + Ok(()) +} + +/// Converts the provided `Leak` results into a SARIF JSON string. +/// +/// # Arguments +/// +/// * `results` - A slice containing the `Leak` results to be converted. +/// +/// # Returns +/// +/// * `Result` - Returns a `String` containing the SARIF JSON if the conversion is +/// successful, or an `Error` if the conversion fails. +/// +fn convert_to_sarif(results: &[Leak]) -> Result { + let mut run_results = vec![]; + for result in results { + let location = json!({ + "physicalLocation": { + "artifactLocation": { + "uri": result.file + }, + "region": { + "startLine": result.line_number, + "snippet": { + "text": result.line + } + } + } + }); + + let run_result = json!({ + "message": { + "text": format!("{} {}", result.rule,"detected!") + }, + "properties": { + "commit": result.commit, + "offender": result.offender, + "date": result.date, + "author": result.author, + "email": result.email, + "commitMessage": result.commit_message, + + "repo": result.repo + }, + "locations": [location] + }); + + run_results.push(run_result); + } + + let sarif_json = json!({ + "$schema": "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0-rtm.5.json", + "version": "2.1.0", + "runs": [ + { + "tool": { + "driver": { + "name": "Gitleaks", + "semanticVersion": "v6.2.0", + "rules": [] + } + }, + "results": run_results + } + ] + }); + + serde_json::to_string_pretty(&sarif_json) +} + +/// Writes a CSV report with the provided results to the specified file path. +/// +/// # Arguments +/// +/// * `file_path` - The file path where the CSV report will be written. +/// * `results` - A slice containing the `Leak` results to be written to the CSV. +/// +/// # Returns +/// +/// * `Result<(), Box>` - Returns `Ok(())` if the CSV report is written successfully, +/// or an `Err` variant containing the error information. +pub fn write_csv_report(file_path: &str, results: &[Leak]) -> Result<(), Box> { + let mut data: Vec = vec![]; + for leak in results { + let item = CsvResult { + repo: leak.repo.clone(), + line_number: leak.line_number, + line: leak.line.clone(), + offender: leak.offender.clone(), + commit: leak.commit.clone(), + rule: leak.rule.clone(), + commit_message: leak.commit_message.clone(), + author: leak.author.clone(), + email: leak.email.clone(), + file: leak.file.clone(), + date: leak.date.clone(), + }; + data.push(item); + } + let file = File::create(file_path)?; + let mut writer = Writer::from_writer(file); + for item in data { + writer.serialize(item)?; + } + writer.flush()?; + + Ok(()) +} + +/// Check if the provided `path` is in the allowlist of paths. +/// +/// +/// # Arguments +/// +/// * `path` - The path to check against the allowlist paths. +/// * `allowlist_paths` - A slice of strings representing the allowlist paths. +/// +/// # Returns +/// +/// Returns `true` if the `path` is found in the allowlist paths, otherwise `false`. +/// +pub fn is_path_in_allowlist(path: &str, allowlist_paths: &[String]) -> bool { + for allowlist_path in allowlist_paths { + if is_regex(allowlist_path) { + let allowlist_regex = Regex::new(allowlist_path).unwrap(); + if allowlist_regex.is_match(path) { + return true; + } + } else { + for allowlist_path in allowlist_paths { + if allowlist_path == path { + return true; + } + } + } + } + false +} + +/// Checks if a commit is present in the allowlist of commits. +/// +/// # Arguments +/// +/// * `commit` - The commit to check. +/// * `allow_commits` - A slice containing the allowlist of commits. +/// +/// # Returns +/// +/// * `bool` - Returns `true` if the commit is found in the allowlist, otherwise `false`. +/// +pub fn is_commit_in_allowlist(commit: &str, allow_commits: &[String]) -> bool { + for allowlist_commit in allow_commits { + if commit == allowlist_commit { + return true; + } + } + false +} + +/// Check if the provided `test_string` matches any of the regular expressions in the `regex_array`. +/// +/// # Arguments +/// +/// * `regex_array` - A vector of regular expression strings to check against the `test_string`. +/// * `test_string` - The string to test against the regular expressions in `regex_array`. +/// +/// # Returns +/// +/// Returns `true` if the `test_string` matches any of the regular expressions in `regex_array`, otherwise `false`. +/// +pub fn is_string_matched(regex_array: &[String], test_string: &str) -> bool { + for regex_str in regex_array.iter() { + let regex = Regex::new(regex_str).unwrap(); + if regex.is_match(test_string) { + return true; + } + } + false +} + +/// Check if the provided `content` contains any of the strings in the given `array`. It is used to find stopswords. +/// +/// # Arguments +/// +/// * `array` - A vector of strings to check against the `content`. +/// * `content` - The string to check for the presence of any of the strings in `array`. +/// +/// # Returns +/// +/// Returns `true` if any of the strings in `array` is found in the `content`, otherwise `false`. +/// +pub fn is_contains_strs(array: &[String], content: &str) -> bool { + for item in array.iter() { + if content.contains(item) { + return true; + } + } + false +} + +/// Checks if a given text is a link. +/// +/// # Arguments +/// +/// * `text` - The text to check for links. +/// +/// # Returns +/// +/// * `bool` - Returns `true` if the text contains a link, otherwise `false`. +/// +pub fn is_link(text: &str) -> bool { + let re = Regex::new(r"(?i)\b((?:https?://|www\.)\S+)\b").unwrap(); + re.is_match(text) +} + +/// Check if the given string is a regular expression. +/// +/// +/// # Arguments +/// +/// * `s` - The string to check for regular expression syntax. +/// +/// # Returns +/// +/// Returns `true` if the string is a regular expression, otherwise `false`. +/// +fn is_regex(s: &str) -> bool { + //TODO: Improve regular expression check + s.starts_with('(') && s.ends_with('$')&&!s.starts_with('/') + +} + +/// Removes duplicates from `array1` based on the elements in `array2`. +/// +/// # Arguments +/// +/// * `array1` - The first vector containing elements to remove duplicates from. +/// * `array2` - The second vector used to determine the duplicates. +/// +/// # Type Constraints +/// +/// `T` must implement the `Eq`, `std::hash::Hash`, and `Clone` traits. +/// +/// # Returns +/// +/// Returns a new vector that contains the elements from `array1` without the duplicates +/// that are present in `array2`. +/// +pub fn remove_duplicates( + array1: Vec, + array2: Vec, +) -> Vec { + let set: HashSet<_> = array2.into_iter().collect(); + array1.into_iter().filter(|x| !set.contains(x)).collect() +} + +#[cfg(test)] +mod tests { + use super::*; + static CONFIG_FILE_PATH: &str = "examples/test_gitleaks.toml"; + + fn mock_config_content() -> Value { + toml::from_str::( + r#" + [[rules]] + description = "Rule 1" + id = "rule1" + regex = "\\d+" + entropy = 0.5 + keywords = ["keyword1", "keyword2"] + + [[rules]] + description = "Rule 2" + id = "rule2" + regex = "[A-Z]+" + entropy = 0.3 + keywords = ["keyword3"] + + [[rules]] + description = "Rule 3" + id = "rule3" + regex = "[a-z]+" + entropy = 0.2 + keywords = ["keyword4", "keyword5"] + + [[rules]] + description = "Rule 4" + id = "rule4" + regex = "\\w+" + entropy = 0.4 + keywords = ["keyword6"] + "#, + ) + .unwrap() + } + + fn mock_leaks() -> Vec { + vec![Leak { + line: "Sensitive information".to_string(), + line_number: 42, + offender: "John Doe".to_string(), + commit: "abcd1234".to_string(), + repo: "my-repo".to_string(), + rule: "password_leak".to_string(), + commit_message: "Fix security issue".to_string(), + author: "John Doe".to_string(), + email: "john@example.com".to_string(), + file: "path/to/file.txt".to_string(), + date: "2023-05-30".to_string(), + }] + } + #[test] + fn test_load_config() { + let result = load_config_file(CONFIG_FILE_PATH); + assert!(result.is_ok()); + } + + #[test] + fn test_config_allowlist_valid_config() { + let result = config_allowlist(&mock_config_content()); + assert!(result.is_ok()); + } + #[test] + fn test_config_ruleslist_and_keywords() { + let result = config_ruleslist_and_keywords(&mock_config_content()); + + assert!(result.is_ok()); + let ruleslist = result.unwrap(); + + assert_eq!(ruleslist.len(), 4); + + let rule1 = &ruleslist[0]; + assert_eq!(rule1.description, "Rule 1"); + assert_eq!(rule1.id, "rule1"); + assert_eq!(rule1.regex, "\\d+"); + assert_eq!(rule1.keywords, vec!["keyword1", "keyword2"]); + assert!(rule1.allowlist.is_none()); + + let rule2 = &ruleslist[1]; + assert_eq!(rule2.description, "Rule 2"); + assert_eq!(rule2.id, "rule2"); + assert_eq!(rule2.regex, "[A-Z]+"); + assert_eq!(rule2.keywords, vec!["keyword3"]); + assert!(rule2.allowlist.is_none()); + + let rule3 = &ruleslist[2]; + assert_eq!(rule3.description, "Rule 3"); + assert_eq!(rule3.id, "rule3"); + assert_eq!(rule3.regex, "[a-z]+"); + assert_eq!(rule3.keywords, vec!["keyword4", "keyword5"]); + assert!(rule3.allowlist.is_none()); + + let rule4 = &ruleslist[3]; + assert_eq!(rule4.description, "Rule 4"); + assert_eq!(rule4.id, "rule4"); + assert_eq!(rule4.regex, "\\w+"); + assert_eq!(rule4.keywords, vec!["keyword6"]); + assert!(rule4.allowlist.is_none()); + } + + #[test] + fn test_write_rule_to_toml() { + let rule = Rule { + description: "Adafruit API Key".to_string(), + id: "adafruit-api-key".to_string(), + regex: r#"(?i)(?:adafruit)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)"#.to_string(), + keywords: vec!["adafruit".to_string()], + allowlist: None, + }; + let result = append_rule_to_toml(&rule, CONFIG_FILE_PATH); + assert!(result.is_ok()); + } + + #[test] + fn test_delete_rule_by_id() { + if let Err(err) = delete_rule_by_id(CONFIG_FILE_PATH, "adafruit-api-key") { + eprintln!("Error: {}", err); + } + } + + #[test] + fn test_update_rule_by_id() { + let rule = Rule { + description: "11111111111".to_string(), + id: "stripe-access-token".to_string(), + regex: r#"(?i)(?:adafruit)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['|\"|\n|\r|\s|\x60|;]|$)"#.to_string(), + keywords: vec!["adafruit".to_string()], + allowlist: None, + }; + let result = update_rule_by_id( CONFIG_FILE_PATH,&rule.id,&rule,); + + assert!(result.is_ok()); + } + #[test] + fn test_is_path_in_allowlist_regex_not_match() { + let path = "/path/to/file.txt"; + let allowlist_paths = vec!["/other/.*\\.txt".to_string()]; + let result = is_path_in_allowlist(path, &allowlist_paths); + assert_eq!(result, false); + } + + #[test] + fn test_is_path_in_allowlist_exact_match() { + let path = "tests/files/gitleaks.toml"; + let allowlist_paths = vec!["tests/files/gitleaks.toml".to_string()]; + let result = is_path_in_allowlist(path, &allowlist_paths); + assert_eq!(result, true); + } + + #[test] + fn test_is_string_matched_match() { + let regex_array = vec!["^hello".to_string(), "world$".to_string()]; + let test_string = "hello, world!"; + let result = is_string_matched(®ex_array, test_string); + assert_eq!(result, true); + } + + #[test] + fn test_is_string_matched_not_match() { + let regex_array = vec!["^hello".to_string(), "world$".to_string()]; + let test_string = "goodbye"; + let result = is_string_matched(®ex_array, test_string); + assert_eq!(result, false); + } + + #[test] + fn test_is_contains_strs_contains() { + let array = vec![ + "apple".to_string(), + "banana".to_string(), + "orange".to_string(), + ]; + let content = "I like to eat bananas"; + let result = is_contains_strs(&array, content); + assert_eq!(result, true); + } + + #[test] + fn test_is_contains_strs_not_contains() { + let array = vec![ + "apple".to_string(), + "banana".to_string(), + "orange".to_string(), + ]; + let content = "I like to eat grapes"; + let result = is_contains_strs(&array, content); + assert_eq!(result, false); + } + + #[test] + fn test_is_regex_valid_case() { + let input = "(regex$"; + let result = is_regex(input); + assert_eq!(result, true); + } + + #[test] + fn test_is_regex_invalid_case() { + let input = "(regex"; + let result = is_regex(input); + assert_eq!(result, false); + } + + #[test] + fn test_is_regex_empty_string() { + let input = ""; + let result = is_regex(input); + assert_eq!(result, false); + } + + #[test] + fn test_remove_duplicates() { + // Test case 1 + let array1 = vec![1, 1, 2, 3, 4, 5]; + let array2 = vec![3, 4, 5, 6, 7]; + let result = remove_duplicates(array1, array2); + assert_eq!(result, vec![1, 1, 2]); + } + + #[test] + fn test_is_link_with_valid_links() { + assert!(is_link("https://www.example.com")); + assert!(is_link("http://example.com")); + assert!(is_link("www.example.com")); + assert!(is_link("www.example.com/path")); + assert!(is_link("www.example.com?q=query")); + } + + #[test] + fn test_is_link_with_invalid_links() { + assert!(!is_link("example.com")); + assert!(!is_link("example.com/path")); + assert!(!is_link("example.com?q=query")); + assert!(!is_link("not a link")); + } + + // test report functions + #[test] + fn test_write_json_report() { + let temp_file = tempfile::NamedTempFile::new().unwrap(); + let file_path = temp_file.path().to_str().unwrap(); + + write_json_report(file_path, &&mock_leaks()).unwrap(); + + let json_content = fs::read_to_string(file_path).unwrap(); + + assert!(json_content.contains("Sensitive information")); + assert!(json_content.contains("path/to/file.txt")); + } + + #[test] + fn test_write_sarif_report() { + let temp_file = tempfile::NamedTempFile::new().unwrap(); + let file_path = temp_file.path().to_str().unwrap(); + + write_sarif_report(file_path, &mock_leaks()).unwrap(); + + let sarif_content = fs::read_to_string(file_path).unwrap(); + + assert!(sarif_content.contains("Sensitive information")); + assert!(sarif_content.contains("path/to/file.txt")); + + } + + #[test] + fn test_write_csv_report() { + let temp_file = tempfile::NamedTempFile::new().unwrap(); + let file_path = temp_file.path().to_str().unwrap(); + + write_csv_report(file_path, &&mock_leaks()).unwrap(); + + let csv_content = fs::read_to_string(file_path).unwrap(); + + assert!(csv_content.contains("Sensitive information")); + assert!(csv_content.contains("path/to/file.txt")); + } +} diff --git a/sensleak-rs/src/utils/git_util.rs b/sensleak-rs/src/utils/git_util.rs new file mode 100644 index 00000000..7d7009a4 --- /dev/null +++ b/sensleak-rs/src/utils/git_util.rs @@ -0,0 +1,661 @@ +extern crate chrono; +extern crate git2; +use crate::errors::CustomError; +use crate::models::{CommitInfo, Config, Scan}; +use crate::utils::detect_utils::{is_commit_in_allowlist, is_link, is_path_in_allowlist}; +use chrono::Local; +use chrono::{DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc}; +use git2::Repository; +use regex::Regex; +use std::error::Error; +use std::fs; + +/// Loads a repository from the specified path. +/// +/// # Arguments +/// +/// * `repo_path` - A string slice that represents the path to the repository. +/// +/// # Returns +/// +/// Returns a `Result` containing a `Repository` if the repository is loaded successfully, or an error if the repository fails to load. +/// +pub fn load_repository(repo_path: &str) -> Result> { + let repo = match Repository::open(repo_path) { + Ok(repo) => repo, + Err(_) => { + return Err(Box::new(CustomError::FailLoadRepo)); + } + }; + + Ok(repo) +} + +/// Retrieves the name of the repository from the provided `Repository` object. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object. +/// +/// # Returns +/// +/// Returns a `Result` containing the name of the repository as a `String` if successful, or an error if the repository name is invalid or cannot be determined. +/// +/// # Errors +/// +/// This function may return the following errors: +/// +/// * `CustomError::InvalidRepoName` - Indicates that the repository name is invalid. +/// +pub fn config_repo_name(repo: &Repository) -> Result> { + let repo_path = repo.path(); + let repo_dir = repo_path.parent().ok_or(CustomError::InvalidRepoName)?; + + let repo_name = repo_dir + .file_name() + .and_then(|s| s.to_str()) + .unwrap_or("") + .to_string(); + if repo_name.ends_with(".git") { + Ok(repo_name[..repo_name.len() - 4].to_string()) + } else { + Ok(repo_name) + } +} + +/// Traverse the tree in batches and collect file paths and contents. +/// +/// Parameters: +/// - `repo`: Reference to the repository. +/// - `tree`: Reference to the tree object. +/// - `path`: Path of the current tree. +/// - `files`: Mutable vector to store the file paths and contents. +/// - `scan`: Reference to the Scan object. +/// - `commit_id`: Commit ID. +/// +/// Returns: +/// - `Ok(())`: If the traversal is successful. +/// - `Err(Box)`: If an error occurs during traversal. +pub fn traverse_tree( + repo: &Repository, + tree: &git2::Tree, + path: &str, + files: &mut Vec<(String, String)>, + scan: &Scan, + commit_id: git2::Oid, + +) -> Result<(), Box> { + for entry in tree.iter() { + let entry_path = format!("{}/{}", path, entry.name().unwrap()); + // Skip entry if it is in the allowlist paths, in the allowlist commits, or is an ignored path + if (is_path_in_allowlist(&entry_path, &scan.allowlist.paths)) + || (is_commit_in_allowlist(&commit_id.to_string(), &scan.allowlist.commits)) + || is_ignored_path(&entry_path) + { + continue; + } + if entry.kind() == Some(git2::ObjectType::Blob) { + let blob = repo + .find_blob(entry.id()) + .map_err(|_| CustomError::ObjectNotFound)?; + let content = String::from_utf8_lossy(blob.content()); + files.push((entry_path, content.to_string())); + } else if entry.kind() == Some(git2::ObjectType::Tree) { + let subtree = repo + .find_tree(entry.id()) + .map_err(|_| CustomError::RepoInternalError)?; + traverse_tree(repo, &subtree, &entry_path, files,scan,commit_id)?; + } + } + Ok(()) +} + +/// skip the files or directories begin with "." +fn is_ignored_path(path: &str) -> bool { + let path_segments: Vec<&str> = path.split('/').collect(); + for item in path_segments { + if item.starts_with('.') { + return true; + } + } + false +} + +/// Retrieves commit information from the given `Repository` and `Commit`. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object. +/// * `commit` - A reference to a `Commit` object representing the commit to retrieve information from. +/// +/// # Returns +/// +/// Returns a `Result` containing a `CommitInfo` struct if the retrieval is successful, or an error if an error occurs during the retrieval. +/// +/// # Errors +/// +/// This function may return the following errors: +/// +/// * `CustomError::InvalidRepoName` - Indicates that the repository name is invalid. +/// * `CustomError::ObjectNotFound` - Indicates that an object in the repository is not found. +/// * `CustomError::RepoInternalError` - Indicates an internal error in the repository. +/// +#[allow(deprecated)] +pub fn config_commit_info( + repo: &Repository, + commit: &git2::Commit, + scan: &Scan, +) -> Result> { + // Config info + let commit_id = commit.id(); + let author = commit.author(); + let email = author.email().unwrap_or("").to_string(); + let commit_message = commit.message().unwrap_or("").to_string(); + let date = Utc.timestamp(commit.time().seconds(), 0); + let offset = FixedOffset::west(commit.time().offset_minutes() * 60); + let date = offset.from_utc_datetime(&date.naive_utc()); + let mut files = Vec::new(); + + let repo_name = match config_repo_name(repo) { + Ok(repo_name) => repo_name, + Err(_) => { + return Err(Box::new(CustomError::InvalidRepoName)); + } + }; + + // Retrieve the tree of the commit + let tree = commit.tree().map_err(|_| CustomError::ObjectNotFound)?; + + // Traverse the tree to get the file paths and content + traverse_tree( + repo, + &tree, + "", + &mut files, + scan, + commit_id, + + ) + .map_err(|_| CustomError::RepoInternalError)?; + let commit_info = CommitInfo { + repo: repo_name, + commit: commit_id, + author: author.name().unwrap_or("").to_string(), + email, + commit_message, + date, + files, + }; + + Ok(commit_info) +} + +/// Loads all commit IDs from the repository in topological order. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object representing the repository. +/// +/// # Returns +/// +/// Returns a `Result` containing a vector of commit IDs (`Vec`) if the operation is successful, or an error if an error occurs during the process. +/// +/// # Errors +/// +/// This function may return the following errors: +/// +/// * `CustomError::AccessWalkerError` - Indicates an error occurred while accessing the revision walker. +/// * `CustomError::PushWalkerHeadError` - Indicates an error occurred while pushing the head commit to the revision walker or setting the sorting order. +/// * `CustomError::WalkerSortError` - Indicates an error occurred while sorting the revision walker. +/// * `CustomError::RepoCommitError` - Indicates an error occurred while finding a commit in the repository. +/// +pub fn load_all_commits(repo: &Repository) -> Result, Box> { + let mut revwalk = repo.revwalk().map_err(|_| CustomError::AccessWalkerError)?; + + revwalk + .push_head() + .map_err(|_| CustomError::PushWalkerHeadError)?; + revwalk + .set_sorting(git2::Sort::TOPOLOGICAL) + .map_err(|_| CustomError::PushWalkerHeadError)?; + + let mut commits = Vec::new(); + + for oid in revwalk { + let oid = oid.map_err(|_| CustomError::WalkerSortError)?; + let commit = repo + .find_commit(oid) + .map_err(|_| CustomError::RepoCommitError)?; + let commit_id = commit.id().to_string(); + commits.push(commit_id); + } + + Ok(commits) +} + +/// Loads a subset of commits based on specified conditions. +/// +/// # Arguments +/// +/// * `commit_from` - An optional string representing the start commit ID. +/// * `commit_to` - An optional string representing the end commit ID. +/// * `commits` - A slice of strings representing the available commit IDs. +/// +/// # Returns +/// +/// Returns a vector of commit IDs as strings, representing the subset of commits based on +/// the specified conditions. If the start commit is after the end commit or if either commit +/// is not found in the input commits, an empty vector is returned. +/// +pub fn load_commits_by_conditions( + commit_from: Option, + commit_to: Option, + commits: &[String], +) -> Vec { + match (commit_from, commit_to) { + (Some(start_commit), Some(end_commit)) => { + let start_index = commits.iter().position(|commit| *commit == start_commit); + let end_index = commits.iter().position(|commit| *commit == end_commit); + + if let (Some(start), Some(end)) = (start_index, end_index) { + if start <= end { + commits[start..=end].to_vec() + } else { + Vec::new() + } + } else { + Vec::new() + } + } + _ => Vec::new(), + } +} + +/// Loads all commit IDs from the given `Repository`. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object. +/// +/// # Returns +/// +/// Returns a `Result` containing a vector of commit IDs as strings if the loading is successful, or an error if an error occurs during the loading. +/// +/// # Errors +/// +/// This function may return the following errors: +/// +/// * `CustomError::AccessWalkerError` - Indicates an error in accessing the commit walker. +/// * `CustomError::PushWalkerHeadError` - Indicates an error in pushing the head to the commit walker. +/// * `CustomError::WalkerSortError` - Indicates an error in sorting the commit walker. +/// * `CustomError::RepoCommitError` - Indicates an error in finding a commit in the repository. +/// +pub fn load_all_object_ids(repo: &Repository) -> Result, Box> { + let mut object_ids = Vec::new(); + let odb = repo.odb().map_err(|_| CustomError::ObjectNotAccess)?; + + odb.foreach(|id| { + object_ids.push(*id); + true + }) + .map_err(|_| CustomError::RepoInternalError)?; + + Ok(object_ids) +} + +/// Parses a date string into a `DateTime` object. +/// +/// # Arguments +/// +/// * `input` - A string slice representing the date to parse. The expected format is "%Y-%m-%d". +/// * `mytype` - A string slice indicating the type of datetime to create. It can be either "start" or any other value. +/// +/// # Returns +/// +/// Returns a `Result` containing a `DateTime` object if the parsing is successful, or an error if an error occurs during the parsing. +/// +/// # Errors +/// +/// This function may return the following errors: +/// +/// * `CustomError::InvalidDateFormat` - Indicates that the input date format is invalid. +/// * `CustomError::InvalidTimeFormat` - Indicates that the time format is invalid. +/// +pub fn parse_date_to_datetime(input: &str, mytype: &str) -> Result, Box> { + let date = + NaiveDate::parse_from_str(input, "%Y-%m-%d").map_err(|_| CustomError::InvalidDateFormat)?; + + let time: NaiveTime; + if mytype == "start" { + if let Some(t) = NaiveTime::from_hms_opt(0, 0, 0) { + time = t; + } else { + return Err(Box::new(CustomError::InvalidTimeFormat)); + } + } else if let Some(t) = NaiveTime::from_hms_opt(23, 59, 59) { + time = t; + } else { + return Err(Box::new(CustomError::InvalidTimeFormat)); + } + + let datetime = NaiveDateTime::new(date, time); + let datetime_utc = DateTime::from_utc(datetime, Utc); + Ok(datetime_utc) +} + +/// Checks if the input string has a valid date format of "YYYY-MM-DD". +/// +/// # Arguments +/// +/// * `input` - The string to be checked for date format validity. +/// +/// # Returns +/// +/// Returns `true` if the input string has a valid date format, otherwise `false`. +pub fn is_valid_date_format(input: &str) -> bool { + if let Ok(date) = NaiveDate::parse_from_str(input, "%Y-%m-%d") { + let formatted = date.format("%Y-%m-%d").to_string(); + return formatted == input; + } + false +} + +/// Loads the content of a configuration file (`.gitleaks.toml` or `gitleaks.toml`) from the target repository. +/// +/// # Arguments +/// +/// * `repo` - A reference to a `Repository` object representing the target repository. +/// +/// # Returns +/// +/// Returns a `Result` containing an `Option` with the content of the configuration file if found, or `None` if the configuration file is not found in any commit. +/// +/// # Errors +/// +/// This function may return an error if any error occurs during the repository traversal or object retrieval. +/// +pub fn load_config_content_from_target_repo( + repo: &Repository, +) -> Result, Box> { + let head_commit = repo.head()?.peel_to_commit()?; + let mut walker = repo.revwalk()?; + walker.push(head_commit.id())?; + + // Iterate over all commits in the repository + for commit_id in walker { + let commit = repo.find_commit(commit_id?)?; + let tree = commit.tree()?; + + // Iterate over all entries in the tree + for entry in tree.iter() { + let file_name = entry.name().unwrap_or(""); + if file_name == ".gitleaks.toml" || file_name == "gitleaks.toml" { + let blob = entry.to_object(repo)?.peel_to_blob()?; + let content = String::from_utf8_lossy(blob.content()); + return Ok(Some(content.into())); + } + } + } + + Ok(None) +} + +/// Extracts the repository name from a given URL. +/// +/// # Arguments +/// +/// * `url` - A string slice representing the URL of the repository. +/// +/// # Returns +/// +/// Returns an `Option` containing the extracted repository name if it matches the expected format, or `None` if the extraction fails. +/// +pub fn extract_repo_name(url: &str) -> Option { + let re = Regex::new(r"/([^/]+)\.git$").unwrap(); + if let Some(captures) = re.captures(url) { + if let Some(repo_name) = captures.get(1) { + return Some(repo_name.as_str().to_string()); + } + } + None +} + +/// Clones or loads a repository based on the provided configuration. +/// +/// # Arguments +/// +/// * `config` - A reference to a `Config` object containing the repository information. +/// +/// # Returns +/// +/// Returns a `Result` containing a `Repository` object if the operation is successful, or an error if an error occurs during cloning or loading. +/// +/// # Errors +/// +/// This function may return the following errors: +/// +/// * `CustomError::FailDeteleDir` - Indicates that the directory removal operation failed. +/// * `CustomError::FailCreateDir` - Indicates that the directory creation operation failed. +/// * `CustomError::FailCloneRepo` - Indicates that the repository cloning operation failed. +/// * `CustomError::FailLoadRepo` - Indicates that the repository loading operation failed. +/// +#[warn(clippy::needless_return)] +pub fn clone_or_load_repository(config: &Config) -> Result> { + if is_link(&config.repo) { + let repo_path = match &config.disk { + Some(disk) => disk.to_string(), + None => { + let dest = "workplace/"; + let mut repo_path = String::new(); + if let Some(name) = extract_repo_name(&config.repo) { + repo_path = format!("{}{}", dest, name); + } + + if fs::metadata(&repo_path).is_ok() { + match fs::remove_dir_all(&repo_path) { + Ok(_) => {} + Err(_) => { + return Err(Box::new(CustomError::FailDeleteDir)); + } + } + } + + match fs::create_dir(&repo_path) { + Ok(_) => {} + Err(_) => { + return Err(Box::new(CustomError::FailCreateDir)); + } + } + repo_path + } + }; + match Repository::clone(&config.repo, repo_path) { + Ok(repo) => { + println!( + "\x1b[34m[INFO]\x1b[0m[{}] Clone repo ...", + Local::now().format("%Y-%m-%d %H:%M:%S"), + ); + + Ok(repo) + } + Err(_) => Err(Box::new(CustomError::FailCloneRepo)), + } + } else { + match load_repository(&config.repo) { + Ok(repo) => { + println!( + "\x1b[34m[INFO]\x1b[0m[{}] Clone repo ...", + Local::now().format("%Y-%m-%d %H:%M:%S"), + ); + + Ok(repo) + } + + Err(_) => Err(Box::new(CustomError::FailLoadRepo)), + } + } +} + +// NOTE: The commented-out function can be tested after specifying the repo file +#[cfg(test)] +mod tests { + + use super::*; + // static VALID_PATH: &str = "D:/Workplace/Git/TestGitOperation"; + // static INVALID_PATH: &str = "D:/Workplace/Git/TestGitOperation222"; + + // // test load_repository + // #[test] + // fn test_load_repository_valid_path() { + // let result = load_repository(VALID_PATH); + // assert!(result.is_ok()); + // } + + // #[test] + // fn test_load_repository_invalid_path() { + // let result = load_repository(INVALID_PATH); + // assert!(result.is_err()); + // } + + // NOTE: The commented-out function can be tested after specifying the repo file + // // test config_repo_name + // #[test] + // fn test_config_repo_name_valid_repo() { + // let repo = match load_repository(VALID_PATH) { + // Ok(repo) => repo, + // Err(_) => { + // panic!("Failed to load repository"); + // } + // }; + // let result = match config_repo_name(&repo) { + // Ok(result) => result, + // Err(e) => { + // panic!("Error:{}", e); + // } + // }; + // assert_eq!(result, "TestGitOperation"); + // } + + // // test load_all_commits + // #[test] + // fn test_load_all_commits_valid_repository() { + // let repo = match Repository::init(VALID_PATH) { + // Ok(repo) => repo, + // Err(e) => { + // eprintln!("{}", e); + // panic!("Failed to initialize repository"); + // } + // }; + + // let result = load_all_commits(&repo); + + // assert!(result.is_ok()); + // let commits = result.unwrap(); + // assert!(commits.contains(&"9e2fe5fc27b1bb8bd4de5574f8d9010164427051".to_string())); + // } + + // // test load_commits_by_conditions + // #[test] + // fn test_load_commits_by_conditions_valid_conditions() { + // let commits = vec![ + // "commit1".to_string(), + // "commit2".to_string(), + // "commit3".to_string(), + // "commit4".to_string(), + // "commit5".to_string(), + // ]; + // let commit_from = Some("commit2".to_string()); + // let commit_to = Some("commit4".to_string()); + + // let result = load_commits_by_conditions(commit_from, commit_to, &commits); + + // assert_eq!( + // result, + // vec![ + // "commit2".to_string(), + // "commit3".to_string(), + // "commit4".to_string(), + // ] + // ); + // } + + // // test load_all_object_ids + // #[test] + // fn test_load_all_object_ids_valid_repository() { + // let repo = match Repository::init(VALID_PATH) { + // Ok(repo) => repo, + // Err(e) => { + // eprintln!("{}", e); + // panic!("Failed to initialize repository"); + // } + // }; + + // let oid1 = repo.blob("Content 1".as_bytes()).unwrap(); + // let oid2 = repo.blob("Content 2".as_bytes()).unwrap(); + // let oid3 = repo.blob("Content 3".as_bytes()).unwrap(); + + // let result = load_all_object_ids(&repo); + + // assert!(result.is_ok()); + // let object_ids = result.unwrap(); + // assert!(object_ids.contains(&oid1)); + // assert!(object_ids.contains(&oid2)); + // assert!(object_ids.contains(&oid3)); + // } + + // test parse_date_to_datetime + #[test] + fn test_parse_date_to_datetime_valid_input_start() { + let valid_input = "2023-05-25"; + let mytype = "start"; + let result = parse_date_to_datetime(valid_input, mytype); + assert!(result.is_ok()); + assert_eq!(result.unwrap().to_rfc3339(), "2023-05-25T00:00:00+00:00"); + } + + #[test] + fn test_parse_date_to_datetime_valid_input_end() { + let valid_input = "2023-05-25"; + let mytype = "end"; + let result = parse_date_to_datetime(valid_input, mytype); + assert!(result.is_ok()); + assert_eq!(result.unwrap().to_rfc3339(), "2023-05-25T23:59:59+00:00"); + } + + #[test] + fn test_parse_date_to_datetime_invalid_input() { + let invalid_input = "2023-05-32"; + let mytype = "start"; + let result = parse_date_to_datetime(invalid_input, mytype); + assert!(result.is_err()); + } + + // test is_valid_date_format + #[test] + fn test_is_valid_date_format_valid_input() { + let valid_input = "2023-05-25"; + let result = is_valid_date_format(valid_input); + assert!(result); + } + + #[test] + fn test_is_valid_date_format_invalid_input() { + let invalid_input = "2023-05-32"; + let result = is_valid_date_format(invalid_input); + assert!(!result); + } + + // test extract_repo_name + #[test] + fn test_extract_repo_name() { + // Test with a valid URL + let url = "https://github.com/user/repo.git"; + let result = extract_repo_name(url); + assert_eq!(result, Some("repo".to_owned())); + + // Test with a URL without ".git" extension + let url = "https://github.com/user/repo"; + let result = extract_repo_name(url); + assert_eq!(result, None); + } +} diff --git a/sensleak-rs/src/utils/mod.rs b/sensleak-rs/src/utils/mod.rs new file mode 100644 index 00000000..1d1e4ccb --- /dev/null +++ b/sensleak-rs/src/utils/mod.rs @@ -0,0 +1,2 @@ +pub mod detect_utils; +pub mod git_util; \ No newline at end of file -- Gitee