diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 1084f7f02..676591fdd 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -22,6 +22,6 @@ assignees: '' ### Basic Information -- py42 version: +- pycpg version: - python version: - operating system: diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 6a92dd725..599ead4e5 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,6 +1,6 @@ --- name: Feature request -about: Suggest an idea for py42 +about: Suggest an idea for pycpg title: "[Enhancement] YOUR IDEA!" labels: enhancement assignees: '' diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5cad2fbc8..8984fafc4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,6 +7,7 @@ on: tags: - v* pull_request: + workflow_dispatch: jobs: build: @@ -14,12 +15,16 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: [3.9, "3.10", 3.11] + python: ["3.9", "3.10", "3.11"] + env: + CPG_HOST: ${{ vars.CPG_HOST }} + CPG_USER: ${{ vars.CPG_USER }} + CPG_API_CLIENT_ID: ${{ vars.CPG_API_CLIENT_ID }} steps: - uses: actions/checkout@v2 with: - path: py42 + path: pycpg - name: Setup Python uses: actions/setup-python@v1 with: @@ -27,11 +32,11 @@ jobs: - name: Install tox run: pip install tox==3.24.1 - name: Run Unit tests - run: cd py42; tox -e py # Run tox using the version of Python in `PATH` + run: cd pycpg; tox -e py # Run tox using the version of Python in `PATH` - name: Submit coverage report uses: codecov/codecov-action@v1.0.7 with: - file: py42/coverage.xml + file: pycpg/coverage.xml - name: Checkout mock servers uses: actions/checkout@v2 with: @@ -42,19 +47,11 @@ jobs: sudo tee -a /etc/hosts <=3.7 for your virtual environment. Use `pyenv --versions` to see all versions available for install. There are some known issues installing python 3.6 with pyenv on certain OS. +**Note**: Pycpg for end users supports Pythons versions <3.6 and <4 - However due to some of the build dependencies, you'll need a version >=3.7 for your virtual environment. Use `pyenv --versions` to see all versions available for install. There are some known issues installing python 3.6 with pyenv on certain OS. If running into issues on Big Sur(Version 11) while installing python 3.6 the below may work @@ -60,7 +60,7 @@ If running into issues on Big Sur(Version 11) while installing python 3.6 the be pyenv install --patch 3.6.14 < <(curl -sSL https://github.com/python/cpython/commit/8ea6353.patch) ``` -Use `source deactivate` to exit the virtual environment and `pyenv activate py42` to reactivate it. +Use `source deactivate` to exit the virtual environment and `pyenv activate pycpg` to reactivate it. ### Windows/Linux @@ -68,11 +68,11 @@ Install a version of python 3.6 or higher from [python.org](https://python.org). Next, in a directory somewhere outside the project, create and activate your virtual environment: ```bash -python -m venv py42 +python -m venv pycpg # macOS/Linux -source py42/bin/activate +source pycpg/bin/activate # Windows -.\py42\Scripts\Activate +.\pycpg\Scripts\Activate ``` To leave the virtual environment, simply use: @@ -82,7 +82,7 @@ deactivate ## Installation -Next, with your virtual environment activated, install py42 and its development dependencies. The `-e` option installs py42 in +Next, with your virtual environment activated, install pycpg and its development dependencies. The `-e` option installs pycpg in ["editable mode"](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs). ```bash @@ -159,8 +159,8 @@ def test_add_one_and_one_equals_two(): #### Integration tests -If not using the mock server, set the environment variables `C42_HOST`, `C42_USER`, -and `C42_PW` with CCA credentials. Otherwise, the integration tests default to using +If not using the mock server, set the environment variables `CPG_HOST`, `CPG_USER`, +and `CPG_PW` with CCA credentials. Otherwise, the integration tests default to using `http://127.0.0.1:4200`, which is the same address that the mock server is set to run on. To execute integration tests: @@ -176,7 +176,7 @@ Follow [Google's format](https://google.github.io/styleguide/pyguide.html#38-com ### Generating documentation -py42 uses [Sphinx](http://www.sphinx-doc.org/) to generate documentation. +pycpg uses [Sphinx](http://www.sphinx-doc.org/) to generate documentation. #### Performing a test build @@ -186,7 +186,7 @@ To simply test that the documentation build without errors, you can run: tox -e docs ``` -Note that the `myst-parser` dependency that contributes to building the docs requires python 3.7+. +Note that the `myst-parser` and `sphinx` dependency that contributes to building the docs requires python 3.11. #### Running the docs locally @@ -217,8 +217,4 @@ Document all notable consumer-affecting changes in CHANGELOG.md per principles a When you're satisfied with your changes, open a PR and fill out the pull request template file. We recommend prefixing the name of your branch and/or PR title with `bugfix`, `chore`, or `feature` to help quickly categorize your change. Your unit tests and other checks will run against all supported python versions when you do this. -For contributions from non-Code42 employees, we require you to agree to our [Contributor License Agreement](https://code42.github.io/code42-cla/Code42_Individual_Contributor_License_Agreement). - -On submission of your first PR, a GitHub action will run requiring you to reply in a comment with your affirmation of the CLA before the PR will be able to be merged. - A team member should get in contact with you shortly to help merge your PR to completion and get it ready for a release! diff --git a/LICENSE.md b/LICENSE.md index f8f3e46e5..dd15d4ff7 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2020 Code42 Software +Copyright (c) 2025 CrashPlan Software Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index c45331956..8fa161dff 100644 --- a/README.md +++ b/README.md @@ -1,32 +1,33 @@ +# CrashPlan fork of py42, this will become the official CrashPlan Python SDK -# py42, the official Code42 Python SDK -![Build status](https://github.com/code42/py42/workflows/build/badge.svg) -[![codecov.io](https://codecov.io/github/code42/py42/coverage.svg?branch=main)](https://codecov.io/github/code42/py42?branch=main) -[![versions](https://img.shields.io/pypi/pyversions/py42.svg)](https://pypi.org/project/py42/) + +![Build status](https://github.com/CrashPlan-Labs/pycpg/workflows/build/badge.svg) +[![codecov.io](https://codecov.io/github/CrashPlan-Labs/pycpg/coverage.svg?branch=main)](https://codecov.io/github/CrashPlan-Labs/pycpg?branch=main) +[![versions](https://img.shields.io/pypi/pyversions/pycpg.svg)](https://pypi.org/project/pycpg/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) -[![Documentation Status](https://readthedocs.org/projects/py42/badge/?version=latest)](https://py42docs.code42.com/en/latest/?badge=latest) +[![Documentation Status](https://readthedocs.org/projects/pycpg/badge/?version=latest)](https://pycpgdocs.crashplan.com/en/latest/?badge=latest) -`py42` is a Python wrapper around the Code42 REST APIs that also provides several other useful utility methods. -It is designed to be used for developing your own tools for working with Code42 data while avoiding the overhead +`pycpg` is a Python wrapper around the CrashPlan REST APIs that also provides several other useful utility methods. +It is designed to be used for developing your own tools for working with CrashPlan data while avoiding the overhead of session / authentication management. ## Requirements - Python 3.6.0+ -- Code42 Server 6.8.x+ or cloud environment (e.g. console.us.code42.com or crashplan.com) +- CrashPlan Cloud environment (e.g. console.us1.crashplan.com or console.us2.crashplan.com) ## Installation -Run the `setup.py` script to install the py42 package and its dependencies on your system. +Run the `setup.py` script to install the pycpg package and its dependencies on your system. You will likely need administrative privileges for this. ```bash $ python setup.py install ``` -## Hello, py42 +## Hello, pycpg Here's a simple example to verify the installation and your server/account. @@ -39,18 +40,18 @@ $ python Import a couple essentials ```python ->>> import py42.sdk ->>> import py42.util as util +>>> import pycpg.sdk +>>> import pycpg.util as util ``` Initialize the client. ```python ->>> sdk = py42.sdk.from_local_account("https://console.us.code42.com", "john.doe", "password") +>>> sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "john.doe", "password") ``` or alternatively ``` ->>> sdk = py42.sdk.from_jwt_provider("https://console.us.code42.com", jwt_provider_function) +>>> sdk = pycpg.sdk.from_jwt_provider("https://console.us1.crashplan.com", jwt_provider_function) ``` Get and print your user information. @@ -103,12 +104,12 @@ There are a few default settings that affect the behavior of the client. | debug.logger | Controls logger used | `logging.Logger` with `StreamHandler` sending to `sys.stderr` | items_per_page | Controls how many items are retrieved per request for methods that loops over several "pages" of items in order to collect them all. | 500 -To override these settings, import `py42.settings` and override values as necessary before creating the client. +To override these settings, import `pycpg.settings` and override values as necessary before creating the client. For example, to disable certificate validation in a dev environment: ```python -import py42.sdk -import py42.settings as settings +import pycpg.sdk +import pycpg.settings as settings import logging settings.verify_ssl_certs = False @@ -120,17 +121,17 @@ custom_logger.addHandler(handler) settings.debug.logger = custom_logger settings.debug.level = logging.DEBUG -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password") +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "my_username", "my_password") ``` ## Usage -The SDK object opens availability to APIs across the Code42 environment, including storage nodes. +The SDK object opens availability to APIs across the CrashPlan environment, including storage nodes. ```python -import py42.sdk +import pycpg.sdk -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password") +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "my_username", "my_password") # clients are organized by feature groups and accessible under the sdk object @@ -154,9 +155,9 @@ with open("/path/to/my/file", 'wb') as f: if chunk: f.write(chunk) -# search file events -from py42.sdk.queries.fileevents.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.filters import * +# search file events -- deprecated, to be replaced with another example +from pycpg.sdk.queries.fileevents.file_event_query import FileEventQuery +from pycpg.sdk.queries.fileevents.filters import * query = FileEventQuery.all(MD5.eq("e804d1eb229298b04522c5504b8131f0")) file_events = sdk.securitydata.search_file_events(query) @@ -164,7 +165,7 @@ file_events = sdk.securitydata.search_file_events(query) ## Additional Resources -For complete documentation on the Code42 web API that backs this SDK, here are some helpful resources: +For complete documentation on the CrashPlan web API that backs this SDK, here are some helpful resources: -- [Introduction to the Code42 API](https://support.code42.com/Administrator/Cloud/Monitoring_and_managing/Introduction_to_the_Code42_API) -- [Code42 API documentation viewers](https://support.code42.com/Administrator/Cloud/Monitoring_and_managing/Introduction_to_the_Code42_API/Code42_API_documentation_viewer) +- [Introduction to the CrashPlan API](https://support.crashplan.com/hc/en-us/articles/9057001723917--CrashPlan-API-syntax-and-usage) +- [CrashPlan API documentation viewers](https://support.crashplan.com/hc/en-us/articles/9057096803469--CrashPlan-API-documentation-viewer-reference) diff --git a/docs/conf.py b/docs/conf.py index d1653fa6a..e2f992531 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,25 +12,25 @@ import os import sys -import py42.__version__ as meta +import pycpg.__version__ as meta # -- Project information ----------------------------------------------------- -project = "py42" -copyright = "2022, Code42 Software" -author = "Code42 Software" +project = "pycpg" +copyright = "2025, CrashPlanGroup" +author = "CrashPlanGroup" # The short X.Y version -version = f"py42 v{meta.__version__}" +version = f"pycpg v{meta.__version__}" # The full version, including alpha/beta/rc tags -release = f"py42 v{meta.__version__}" +release = f"pycpg v{meta.__version__}" # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.4.0" +needs_sphinx = "8.2.3" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/docs/favicon.ico b/docs/favicon.ico index 9a23a8e7d..6d522c4fd 100644 Binary files a/docs/favicon.ico and b/docs/favicon.ico differ diff --git a/docs/index.md b/docs/index.md index 6c378a493..6db7a2862 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,4 +1,4 @@ -# py42, the Code42 Python SDK +# pycpg, the CrashPlan Python SDK ```{eval-rst} .. toctree:: @@ -17,19 +17,18 @@ ``` -[![license](https://img.shields.io/pypi/l/py42.svg)](https://pypi.org/project/py42/) -[![versions](https://img.shields.io/pypi/pyversions/py42.svg)](https://pypi.org/project/py42/) +[![license](https://img.shields.io/pypi/l/pycpg.svg)](https://pypi.org/project/pycpg/) +[![versions](https://img.shields.io/pypi/pyversions/pycpg.svg)](https://pypi.org/project/pycpg/) -`py42` is a Python wrapper around the Code42 REST APIs that also provides several utility methods. Use `py42` to -develop your own tools for working with Code42 data while avoiding the overhead +`pycpg` is a Python wrapper around the CrashPlan REST APIs that also provides several utility methods. Use `pycpg` to +develop your own tools for working with CrashPlan data while avoiding the overhead of session / authentication management. ## Features * Managing users, organizations, and devices. -* Searching file events, alerts and auditlogs. -* Adding/Removing employees from detection lists. -* Managing cases. +* Restoring files +* Managing and searching legal holds ## Content diff --git a/docs/logo.png b/docs/logo.png index 8443612d9..318dbe338 100644 Binary files a/docs/logo.png and b/docs/logo.png differ diff --git a/docs/methoddocs/alertrules.md b/docs/methoddocs/alertrules.md deleted file mode 100644 index 58d8702a3..000000000 --- a/docs/methoddocs/alertrules.md +++ /dev/null @@ -1,31 +0,0 @@ -# Alert Rules - -```{eval-rst} -.. autoclass:: py42.clients.alertrules.AlertRulesClient - :members: - :show-inheritance: -``` - -## Exfiltration rules - -```{eval-rst} -.. autoclass:: py42.services.alertrules.ExfiltrationService - :members: - :show-inheritance: -``` - -## Cloud share rules - -```{eval-rst} -.. autoclass:: py42.services.alertrules.CloudShareService - :members: - :show-inheritance: -``` - -## File type mismatch rules - -```{eval-rst} -.. autoclass:: py42.services.alertrules.FileTypeMismatchService - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/alerts.md b/docs/methoddocs/alerts.md deleted file mode 100644 index d46b8b809..000000000 --- a/docs/methoddocs/alerts.md +++ /dev/null @@ -1,27 +0,0 @@ -# Alerts - -```{eval-rst} -.. autoclass:: py42.clients.alerts.AlertsClient - :members: - :show-inheritance: -``` - -## Filter Classes - -The following classes construct filters for alert queries. Each filter class corresponds to an alert detail. -Call the appropriate classmethod on your desired filter class with the `value` you want to match and it will return a -`FilterGroup` object that can be passed to `AlertQuery`'s `all()` or `any()` methods to create complex queries -that match multiple filter rules. - -```{eval-rst} -.. automodule:: py42.sdk.queries.alerts.filters.alert_filter - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.alerts.alert_query.AlertQuery - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/archive.md b/docs/methoddocs/archive.md index 239cafa01..9927a039c 100644 --- a/docs/methoddocs/archive.md +++ b/docs/methoddocs/archive.md @@ -2,12 +2,12 @@ ```{eval-rst} .. important:: - You must be using py42 to version 1.25.1 or higher to use the `archive.stream_from_backup()` or `archive.stream_to_device()` methods. + You must be using pycpg to version 1.25.1 or higher to use the `archive.stream_from_backup()` or `archive.stream_to_device()` methods. ``` ```{eval-rst} -.. autoclass:: py42.clients.archive.ArchiveClient +.. autoclass:: pycpg.clients.archive.ArchiveClient :members: :show-inheritance: ``` diff --git a/docs/methoddocs/auditlogs.md b/docs/methoddocs/auditlogs.md index 968421f82..40e6ef015 100644 --- a/docs/methoddocs/auditlogs.md +++ b/docs/methoddocs/auditlogs.md @@ -1,7 +1,7 @@ # Audit Logs ```{eval-rst} -.. autoclass:: py42.clients.auditlogs.AuditLogsClient +.. autoclass:: pycpg.clients.auditlogs.AuditLogsClient :members: :show-inheritance: ``` diff --git a/docs/methoddocs/backupset.md b/docs/methoddocs/backupset.md index cfa7e00f0..bcecd4538 100644 --- a/docs/methoddocs/backupset.md +++ b/docs/methoddocs/backupset.md @@ -1,7 +1,7 @@ # Backup Sets ```{eval-rst} -.. autoclass:: py42.clients.settings.device_settings.BackupSet +.. autoclass:: pycpg.clients.settings.device_settings.BackupSet :members: :show-inheritance: ``` diff --git a/docs/methoddocs/cases.md b/docs/methoddocs/cases.md deleted file mode 100644 index 617d02541..000000000 --- a/docs/methoddocs/cases.md +++ /dev/null @@ -1,21 +0,0 @@ -# Cases - -```{eval-rst} -.. autoclass:: py42.clients.cases.CaseStatus - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.clients.cases.CasesClient - :members: - :show-inheritance: -``` - -## Cases File Events - -```{eval-rst} -.. autoclass:: py42.services.casesfileevents.CasesFileEventsService - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/constants.md b/docs/methoddocs/constants.md index a9ceb603f..b34bc4b50 100644 --- a/docs/methoddocs/constants.md +++ b/docs/methoddocs/constants.md @@ -1,25 +1,7 @@ # Shared Constants ```{eval-rst} -.. autoclass:: py42.constants.SortDirection - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.constants.CaseStatus - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.constants.TrustedActivityType - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.constants.WatchlistType +.. autoclass:: pycpg.constants.SortDirection :members: :show-inheritance: ``` diff --git a/docs/methoddocs/devices.md b/docs/methoddocs/devices.md index c822816e8..381618575 100644 --- a/docs/methoddocs/devices.md +++ b/docs/methoddocs/devices.md @@ -1,7 +1,7 @@ # Devices ```{eval-rst} -.. autoclass:: py42.services.devices.DeviceService +.. autoclass:: pycpg.services.devices.DeviceService :members: :show-inheritance: ``` diff --git a/docs/methoddocs/devicesettings.md b/docs/methoddocs/devicesettings.md index 375833275..a96c3a3f8 100644 --- a/docs/methoddocs/devicesettings.md +++ b/docs/methoddocs/devicesettings.md @@ -1,13 +1,7 @@ # Device Settings ```{eval-rst} -.. autoclass:: py42.clients.settings.device_settings.IncydrDeviceSettings - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.clients.settings.device_settings.DeviceSettings +.. autoclass:: pycpg.clients.settings.device_settings.DeviceSettings :members: :show-inheritance: :inherited-members: UserDict diff --git a/docs/methoddocs/exceptions.md b/docs/methoddocs/exceptions.md index e17b22133..2b3b02fed 100644 --- a/docs/methoddocs/exceptions.md +++ b/docs/methoddocs/exceptions.md @@ -1,7 +1,7 @@ # Exceptions ```{eval-rst} -.. automodule:: py42.exceptions +.. automodule:: pycpg.exceptions :members: :inherited-members: :show-inheritance: diff --git a/docs/methoddocs/fileeventqueries.md b/docs/methoddocs/fileeventqueries.md deleted file mode 100644 index 148483888..000000000 --- a/docs/methoddocs/fileeventqueries.md +++ /dev/null @@ -1,405 +0,0 @@ -# File Event Queries - V1 (DEPRECATED) - -```{eval-rst} -.. warning:: V1 file events, saved searches, and queries are **deprecated**. -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.file_event_query.FileEventQuery - :members: - :show-inheritance: -``` - -## Saved Searches - -```{eval-rst} -.. autoclass:: py42.services.savedsearch.SavedSearchService - :members: - :show-inheritance: -``` - -## Filter Classes - -The following classes construct filters for file event queries. Each filter class corresponds to a file event detail. -Call the appropriate classmethod on your desired filter class with the `value` you want to match and it will return a -`FilterGroup` object that can be passed to `FileEventQuery`'s `all()` or `any()` methods to create complex queries -that match multiple filter rules. - -Example: - -To search for events observed for certain set of documents, you can use the `FileName` and `MD5` filter classes to -construct `FilterGroup`s that will search for matching filenames or (in case someone renamed the sensitive file) the -known MD5 hashes of the files: - - filename_filter = FileName.is_in(['confidential_plans.docx', 'confidential_plan_projections.xlsx']) - md5_filter = MD5.is_in(['133765f4fff5e3038b9352a4d14e1532', 'ea16f0cbfc76f6eba292871f8a8c794b']) - -### Event Filters - -```{eval-rst} -.. automethod:: py42.sdk.queries.fileevents.util.create_exists_filter_group -``` - -```{eval-rst} -.. automethod:: py42.sdk.queries.fileevents.util.create_not_exists_filter_group -``` - -```{eval-rst} -.. automethod:: py42.sdk.queries.fileevents.util.create_greater_than_filter_group -``` - -```{eval-rst} -.. automethod:: py42.sdk.queries.fileevents.util.create_less_than_filter_group -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.event_filter.EventTimestamp - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.event_filter.EventType - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.event_filter.InsertionTimestamp - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.event_filter.Source - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.event_filter.MimeTypeMismatch - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.event_filter.OutsideActiveHours - :members: - :inherited-members: - :show-inheritance: -``` - -### File Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.FileCategory - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.FileName - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.FileOwner - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.FilePath - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.FileSize - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.MD5 - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.file_filter.SHA256 - :members: - :inherited-members: - :show-inheritance: -``` - -### Device Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.device_filter.DeviceUsername - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.device_filter.OSHostname - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.device_filter.PrivateIPAddress - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.device_filter.PublicIPAddress - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.device_filter.DeviceSignedInUserName - :members: - :inherited-members: - :show-inheritance: -``` - -### Cloud Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.cloud_filter.Actor - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.cloud_filter.DirectoryID - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.cloud_filter.Shared - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.cloud_filter.SharedWith - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.cloud_filter.SharingTypeAdded - :members: - :inherited-members: - :show-inheritance: -``` - -### Exposure Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.ExposureType - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.ProcessName - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.ProcessOwner - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.RemovableMediaName - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.RemovableMediaVendor - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.RemovableMediaMediaName - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.RemovableMediaVolumeName - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.RemovableMediaPartitionID - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.RemovableMediaSerialNumber - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.SyncDestination - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.SyncDestinationUsername - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.TabURL - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.exposure_filter.WindowTitle - :members: - :inherited-members: - :show-inheritance: -``` - -### Email Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.email_filter.EmailPolicyName - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.email_filter.EmailSubject - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.email_filter.EmailRecipients - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.email_filter.EmailSender - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.email_filter.EmailFrom - :members: - :inherited-members: - :show-inheritance: -``` - -### Activity Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.activity_filter.TrustedActivity - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.activity_filter.RemoteActivity - :members: - :inherited-members: - :show-inheritance: -``` - -### Printer Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.print_filter.Printer - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.print_filter.PrintJobName - :members: - :inherited-members: - :show-inheritance: -``` - -### Risk Filters - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.risk_filter.RiskIndicator - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.risk_filter.RiskSeverity - :members: - :inherited-members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.filters.risk_filter.RiskScore - :members: - :inherited-members: - :show-inheritance: -``` diff --git a/docs/methoddocs/fileeventqueriesv2.md b/docs/methoddocs/fileeventqueriesv2.md deleted file mode 100644 index b81ccb9ec..000000000 --- a/docs/methoddocs/fileeventqueriesv2.md +++ /dev/null @@ -1,122 +0,0 @@ -# File Event Queries - V2 - -```{eval-rst} -.. autoclass:: py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery - :members: - :show-inheritance: - :noindex: -``` - -## Saved Searches - -```{eval-rst} -.. important:: - Make sure to set the optional argument `use_v2=True` on saved search functions to get V2 file event data and queries. - -``` - -```{eval-rst} -.. autoclass:: py42.services.savedsearch.SavedSearchService - :members: - :show-inheritance: - :noindex: -``` - - -## Filter Classes - -The following classes construct filters for file event queries. Each filter class corresponds to a file event detail. -Call the appropriate class method on your desired filter class with the `value` you want to match and it will return a -`FilterGroup` object that can be passed to `FileEventQuery`'s `all()` or `any()` methods to create complex queries -that match multiple filter rules. - -Example: - -To search for events observed for certain set of documents, you can use the `file.Name` and `file.MD5` filter classes to -construct `FilterGroup`s that will search for matching filenames or (in case someone renamed the sensitive file) the -known MD5 hashes of the files: - - from py42.sdk.queries.fileevents.v2 import * - filename_filter = File.Name.is_in(['confidential_plans.docx', 'confidential_plan_projections.xlsx']) - md5_filter = File.MD5.is_in(['133765f4fff5e3038b9352a4d14e1532', 'ea16f0cbfc76f6eba292871f8a8c794b']) - -### Destination Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.destination - :members: - :inherited-members: - :show-inheritance: -``` - -### Event Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.event - :members: - :inherited-members: - :show-inheritance: -``` - -### File Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.file - :members: - :inherited-members: - :show-inheritance: -``` - -### Process Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.process - :members: - :inherited-members: - :show-inheritance: -``` - -### Report Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.report - :members: - :inherited-members: - :show-inheritance: -``` - -### Risk Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.risk - :members: - :inherited-members: - :show-inheritance: -``` - -### Source Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.source - :members: - :inherited-members: - :show-inheritance: -``` - -### Timestamp Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.timestamp - :members: - :inherited-members: - :show-inheritance: -``` - -### User Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.fileevents.v2.filters.user - :members: - :inherited-members: - :show-inheritance: -``` diff --git a/docs/methoddocs/legalhold.md b/docs/methoddocs/legalhold.md index 53ef46ff6..79256ac81 100644 --- a/docs/methoddocs/legalhold.md +++ b/docs/methoddocs/legalhold.md @@ -3,7 +3,7 @@ *For legal hold commands compatible with API Client authentication, see [Legal Hold - Api Clients](legalholdapiclient.md).* ```{eval-rst} -.. autoclass:: py42.services.legalhold.LegalHoldService +.. autoclass:: pycpg.services.legalhold.LegalHoldService :members: :show-inheritance: ``` diff --git a/docs/methoddocs/legalholdapiclient.md b/docs/methoddocs/legalholdapiclient.md index 0ce094eb7..e04c6ddb7 100644 --- a/docs/methoddocs/legalholdapiclient.md +++ b/docs/methoddocs/legalholdapiclient.md @@ -3,7 +3,7 @@ *These endpoints are only compatible with Api Client authentication. For legal hold commands compatible with other forms of authentication, see [Legal Hold](legalholdapiclient.md).* ```{eval-rst} -.. autoclass:: py42.services.legalholdapiclient.LegalHoldApiClientService +.. autoclass:: pycpg.services.legalholdapiclient.LegalHoldApiClientService :members: :show-inheritance: ``` diff --git a/docs/methoddocs/orgs.md b/docs/methoddocs/orgs.md index 821d2e95e..3320fb8c2 100644 --- a/docs/methoddocs/orgs.md +++ b/docs/methoddocs/orgs.md @@ -1,7 +1,7 @@ # Orgs ```{eval-rst} -.. autoclass:: py42.services.orgs.OrgService +.. autoclass:: pycpg.services.orgs.OrgService :members: :show-inheritance: ``` diff --git a/docs/methoddocs/orgsettings.md b/docs/methoddocs/orgsettings.md index 0ee829ccf..53e8c47d2 100644 --- a/docs/methoddocs/orgsettings.md +++ b/docs/methoddocs/orgsettings.md @@ -1,11 +1,11 @@ # Org Settings ```{eval-rst} -.. autoclass:: py42.clients.settings.org_settings.OrgSettings +.. autoclass:: pycpg.clients.settings.org_settings.OrgSettings :members: :show-inheritance: -.. autoclass:: py42.clients.settings.device_settings.DeviceSettingsDefaults +.. autoclass:: pycpg.clients.settings.device_settings.DeviceSettingsDefaults :members: :show-inheritance: ``` diff --git a/docs/methoddocs/response.md b/docs/methoddocs/response.md deleted file mode 100644 index b18677bfe..000000000 --- a/docs/methoddocs/response.md +++ /dev/null @@ -1,7 +0,0 @@ -# Response - -```{eval-rst} -.. autoclass:: py42.response.Py42Response - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/securitydata.md b/docs/methoddocs/securitydata.md deleted file mode 100644 index afb9f9a59..000000000 --- a/docs/methoddocs/securitydata.md +++ /dev/null @@ -1,11 +0,0 @@ -# Security Data - -```{eval-rst} -.. warning:: V1 file events, saved searches, and queries are **deprecated**. -``` - -```{eval-rst} -.. autoclass:: py42.clients.securitydata.SecurityDataClient - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/sharedqueryfilters.md b/docs/methoddocs/sharedqueryfilters.md deleted file mode 100644 index e7e028d47..000000000 --- a/docs/methoddocs/sharedqueryfilters.md +++ /dev/null @@ -1,8 +0,0 @@ -# Shared Query Filters - -```{eval-rst} -.. automodule:: py42.sdk.queries.query_filter - :members: - :inherited-members: - :show-inheritance: -``` diff --git a/docs/methoddocs/trustedactivities.md b/docs/methoddocs/trustedactivities.md deleted file mode 100644 index cf2595f14..000000000 --- a/docs/methoddocs/trustedactivities.md +++ /dev/null @@ -1,13 +0,0 @@ -# Trusted Activities - -```{eval-rst} -.. autoclass:: py42.clients.trustedactivities.TrustedActivityType - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.clients.trustedactivities.TrustedActivitiesClient - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/userriskprofile.md b/docs/methoddocs/userriskprofile.md deleted file mode 100644 index 54e6dff95..000000000 --- a/docs/methoddocs/userriskprofile.md +++ /dev/null @@ -1,7 +0,0 @@ -# User Risk Profiles - -```{eval-rst} -.. autoclass:: py42.clients.userriskprofile.UserRiskProfileClient - :members: - :show-inheritance: -``` diff --git a/docs/methoddocs/users.md b/docs/methoddocs/users.md index bc7a258b5..c3e42f3fd 100644 --- a/docs/methoddocs/users.md +++ b/docs/methoddocs/users.md @@ -1,13 +1,13 @@ # Users ```{eval-rst} -.. autoclass:: py42.services.users.UserService +.. autoclass:: pycpg.services.users.UserService :members: :show-inheritance: ``` ```{eval-rst} -.. autoclass:: py42.usercontext.UserContext +.. autoclass:: pycpg.usercontext.UserContext :members: :show-inheritance: ``` diff --git a/docs/methoddocs/util.md b/docs/methoddocs/util.md index f14ec2954..c7e25dbfa 100644 --- a/docs/methoddocs/util.md +++ b/docs/methoddocs/util.md @@ -1,6 +1,6 @@ # Util ```{eval-rst} -.. automodule:: py42.util +.. automodule:: pycpg.util :members: ``` diff --git a/docs/methoddocs/watchlists.md b/docs/methoddocs/watchlists.md deleted file mode 100644 index 9f4085071..000000000 --- a/docs/methoddocs/watchlists.md +++ /dev/null @@ -1,13 +0,0 @@ -# Watchlists - -```{eval-rst} -.. autoclass:: py42.clients.watchlists.WatchlistType - :members: - :show-inheritance: -``` - -```{eval-rst} -.. autoclass:: py42.clients.watchlists.WatchlistsClient - :members: - :show-inheritance: -``` diff --git a/docs/methods.md b/docs/methods.md index 0ae6a51d2..e1922f5c4 100644 --- a/docs/methods.md +++ b/docs/methods.md @@ -11,53 +11,42 @@ ``` The main SDK object by which all other methods are accessed is created by -calling `py42.sdk.from_local_account` or `py42.sdk.from_jwt_provider`. For example: +calling `pycpg.sdk.from_local_account` or `pycpg.sdk.from_jwt_provider`. For example: ```python -import py42.sdk +import pycpg.sdk -sdk = py42.sdk.from_local_account("console.us.code42.com", "john.doe@example.com", "my_pw") +sdk = pycpg.sdk.from_local_account("console.us1.crashplan.com", "john.doe@example.com", "my_pw") # access properties on 'sdk' to explore all the available methods ``` ```{eval-rst} .. important:: - `py42` only supports token-based authentication. + `pycpg` only supports token-based authentication. ``` -Explore the complete public documentation for `py42` below. +Explore the complete public documentation for `pycpg` below. -* [Alerts](methoddocs/alerts.md) -* [Alert Rules](methoddocs/alertrules.md) * [Archive](methoddocs/archive.md) * [Audit Logs](methoddocs/auditlogs.md) * [Backup Sets](methoddocs/backupset.md) -* [Cases](methoddocs/cases.md) * [Constants](methoddocs/constants.md) * [Devices](methoddocs/devices.md) * [Device Settings](methoddocs/devicesettings.md) * [Exceptions](methoddocs/exceptions.md) -* [(DEPRECATED) File Event Queries - V1](methoddocs/fileeventqueries.md) -* [File Event Queries - V2](methoddocs/fileeventqueriesv2.md) * [Legal Hold](methoddocs/legalhold.md) * [Legal Hold - API Clients](methoddocs/legalholdapiclient.md) * [Orgs](methoddocs/orgs.md) * [Org Settings](methoddocs/orgsettings.md) -* [Response](methoddocs/response.md) -* [Security Data](methoddocs/securitydata.md) -* [Shared Query Filters](methoddocs/sharedqueryfilters.md) -* [Trusted Activities](methoddocs/trustedactivities.md) * [Users](methoddocs/users.md) -* [User Risk Profiles](methoddocs/userriskprofile.md) * [Util](methoddocs/util.md) -* [Watchlists](methoddocs/watchlists.md) ```{eval-rst} -.. automodule:: py42.sdk +.. automodule:: pycpg.sdk :members: :show-inheritance: -.. autoclass:: py42.sdk.SDKClient +.. autoclass:: pycpg.sdk.SDKClient :members: :show-inheritance: ``` diff --git a/docs/userguides/backupsets.md b/docs/userguides/backupsets.md index 7fed86452..c97cc0007 100644 --- a/docs/userguides/backupsets.md +++ b/docs/userguides/backupsets.md @@ -1,11 +1,11 @@ # Configuring Backup Sets -Code42 devices' backup configurations are managed by "Backup Sets", which can be configured either at the individual +CrashPlan devices' backup configurations are managed by "Backup Sets", which can be configured either at the individual device level, or set as default configurations at the org level. -The py42 `BackupSet` class can be used to view and change the settings of a given backup set. +The pycpg `BackupSet` class can be used to view and change the settings of a given backup set. -`BackupSet` instances are automatically constructed by py42 and attached to their corresponding `DeviceSettings` or +`BackupSet` instances are automatically constructed by pycpg and attached to their corresponding `DeviceSettings` or `OrgSettings` objects, and stored in the `.backup_sets` properties (`DeviceSettings.backup_sets` or `OrgSettings.device_defaults.backup_sets`). diff --git a/docs/userguides/basics.md b/docs/userguides/basics.md index 574063c28..8e7315f92 100644 --- a/docs/userguides/basics.md +++ b/docs/userguides/basics.md @@ -1,31 +1,31 @@ -# py42 Basics +# pycpg Basics -This guide explains the basic concepts of py42. Learning these basics can help you gain confidence in writing your own +This guide explains the basic concepts of pycpg. Learning these basics can help you gain confidence in writing your own scripts. -- [py42 Basics](#py42-basics) +- [pycpg Basics](#pycpg-basics) - [Initialization](#initialization) - [Paging](#paging) - - [Py42Response](#py42response) + - [PycpgResponse](#pycpgresponse) - [Dates](#dates) - [Exceptions](#exceptions) -The examples from this guide are intended as blanket concepts that apply to other areas in py42. For example, paging +The examples from this guide are intended as blanket concepts that apply to other areas in pycpg. For example, paging over users and devices works the same way as over departing employees and alerts. ## Initialization -To use py42, you must initialize the SDK: +To use pycpg, you must initialize the SDK: ```python -import py42.sdk +import pycpg.sdk -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password") +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "my_username", "my_password") ``` -If your account uses [two-factor authentication](https://support.code42.com/Administrator/Cloud/Configuring/Two-factor_authentication_for_local_users), include the time-based one-time password: +If your account uses [two-factor authentication](https://support.crashplan.com/hc/en-us/articles/8720828072717-Two-Factor-Authentication-for-CrashPlan), include the time-based one-time password: ```python -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password", totp="123456") +sdk = pycpg.sdk.from_local_account("https://console.u1.crashplan.com", "my_username", "my_password", totp="123456") ``` Alternatively, define a function that returns the time-based one-time password: @@ -34,7 +34,7 @@ Alternatively, define a function that returns the time-based one-time password: def promptForPassword(): return input("Please input your authentication code: ") -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password", totp=promptForPassword) +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "my_username", "my_password", totp=promptForPassword) ``` Alternatively, define a function that returns the auth token based on user's authentication approach @@ -45,49 +45,50 @@ import requests from requests.auth import HTTPBasicAuth def jwt_provider(): res = requests.get( - 'https://console.us.code42.com/api/v3/auth/jwt?useBody=true', + 'https://console.us1.crashplan.com/api/v3/auth/jwt?useBody=true', auth=HTTPBasicAuth('username', 'password') ) res_json = json.loads(res.text) return res_json['data']['v3_user_token'] -sdk_client = py42.sdk.from_jwt_provider("https://console.us.code42.com", jwt_provider) +sdk_client = pycpg.sdk.from_jwt_provider("https://console.us1.crashplan.com", jwt_provider) ``` ## Paging -py42 clients often have a method with the name (or name prefix) `get_all` which handles iterating over pages of +pycpg clients often have a method with the name (or name prefix) `get_all` which handles iterating over pages of response items. Here are some examples: -* `py42.sdk.devices.get_all()` -* `py42.sdk.users.get_all()` -* `py42.sdk.legalhold.get_all_matters()` -* `py42.sdk.orgs.get_all()` +* `pycpg.sdk.devices.get_all()` +* `pycpg.sdk.users.get_all()` +* `pycpg.sdk.legalhold.get_all_matters()` +* `pycpg.sdk.orgs.get_all()` These methods each return a [python generator](https://wiki.python.org/moin/Generators). Looping over the pages returned by the generator gives you access to the actual list of items. Use the code snippet below as an example -for working with generators and paging in py42: +for working with generators and paging in pycpg: ```python -# Prints the username and user ID for all employees included on a watchlist - -pages = sdk.watchlists.get_all_included_users(WATCHLIST_ID) # pages has 'generator' type -for page in pages: # page has 'Py42Response' type - users = page["includedUsers"] - for user in users: - username = user["username"] - user_id = user["userId"] - print(f"{username}: {user_id}") +# Prints the userUid and device name for all active devices + +pages = sdk.devices.get_all(active=True,include_backup_usage=True) # pages has 'generator' type +for page in pages: # page has 'PycpgResponse' type + devices = page["computers"] + for device in devices: + userUid = device["userUid"] + name = device["name"] + print(f"{userUid}: {name}") + ``` -Each page is a typical py42 response. The next section covers what you can do with `Py42Response` objects. +Each page is a typical pycpg response. The next section covers what you can do with `PycpgResponse` objects. -## Py42Response +## PycpgResponse -py42 clients return `Py42Response` objects which are intentionally similar to `requests.Response` objects. -The `Py42Response` class hides unneeded metadata found on the raw `requests.Response.text` (which is available as -`Py42Response.raw_text`), making it easier to get the most useful parts of the response. Also, the object is -subscriptable, meaning you can access it with keys or indices (depending on the JSON type underneath `data` on Code42 API responses): +pycpg clients return `PycpgResponse` objects which are intentionally similar to `requests.Response` objects. +The `PycpgResponse` class hides unneeded metadata found on the raw `requests.Response.text` (which is available as +`PycpgResponse.raw_text`), making it easier to get the most useful parts of the response. Also, the object is +subscriptable, meaning you can access it with keys or indices (depending on the JSON type underneath `data` on CrashPlan API responses): ```python user = response["users"][0] @@ -100,16 +101,16 @@ essentially print its text property: ```python # Prints details about the response from a getting a detection list user. -response = sdk.detectionlists.get_user("test.user@example.com") +response = sdk.devices.get_by_guid("test.user@example.com") print(response) # JSON as Dictionary - same as print(response.text) print(response.raw_text) # Raw API response print(response.status_code) # 200 -cloud_usernames = response["cloudUsernames"] +alert_state = response["alertStates"] # if the response might not contain the property you're looking for, # check to see if it exists with data.get -cloud_usernames = response.data.get("cloudUsernames") -if cloud_usernames: - print(cloud_usernames) +alert_state = response.data.get("alertStates") +if alert_state: + print(alert_state) ``` ```{eval-rst} @@ -118,58 +119,52 @@ if cloud_usernames: ## Dates -Most dates in py42 support [POSIX timestamps](https://en.wikipedia.org/wiki/Unix_time) for date parameters. As an -example, see :class:`sdk.queries.filevents.filters.event_filter.EventTimestamp` which is used for querying file events +Most dates in pycpg support [POSIX timestamps](https://en.wikipedia.org/wiki/Unix_time) for date parameters. As an +example, see :class:`sdk.legalhold.get_all_events` which is used for querying legal hold events by their event timestamp. ```python from datetime import datetime, timedelta -import py42.sdk -import py42.util -from py42.sdk.queries.fileevents.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.filters.event_filter import EventTimestamp +import pycpg.sdk +import pycpg.util -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password") +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "my_username", "my_password") # Get the epoch date 14 days in the past -query_date = datetime.utcnow() - timedelta(days=14) -query_epoch = (query_date - datetime.utcfromtimestamp(0)).total_seconds() - -query = FileEventQuery(EventTimestamp.on_or_after(query_epoch)) - -response = sdk.securitydata.search_file_events(query) +event_date = datetime.utcnow() - timedelta(days=14) +event_epoch = (event_date - datetime.utcfromtimestamp(0)).total_seconds() -# Print all the md5 Checksums from every file event within the past 14 days. -file_events = response["fileEvents"] -for event in file_events: - print(event["md5Checksum"]) +#print all the event types for all events in the past 14 days +hold_events = sdk.legalhold.get_all_events(min_event_date=event_epoch) +for event in hold_events: + print(['eventType']) ``` ## Exceptions -py42 throws some of its own exceptions when failures occur. py42 exceptions are found in the `py42.sdk.exceptions` +pycpg throws some of its own exceptions when failures occur. pycpg exceptions are found in the `pycpg.sdk.exceptions` module. Some of the available exceptions are: -* `Py42ForbiddenError`: (403) With your currently signed-in account, you don't have the necessary permissions +* `PycpgForbiddenError`: (403) With your currently signed-in account, you don't have the necessary permissions to perform the action you were trying to do. -* `Py42UnauthorizedError`: (401) The username or password is incorrect. -* `Py42InternalServerError`: (500) Likely an unhandled issue on our servers. +* `PycpgUnauthorizedError`: (401) The username or password is incorrect. +* `PycpgInternalServerError`: (500) Likely an unhandled issue on our servers. For example, you are making a `create_sdk()` function and want to print a more user-friendly message when the provided username or password are incorrect: ```python import keyring -import py42.sdk -from py42.exceptions import Py42UnauthorizedError +import pycpg.sdk +from pycpg.exceptions import PycpgUnauthorizedError def create_sdk(username): """Tries to initialize SDK. If unauthorized, prints message and exits.""" try: password = keyring.get_password("my_program", username) - return py42.sdk.from_local_account("www.authority.example.com", username, password) - except Py42UnauthorizedError: + return pycpg.sdk.from_local_account("www.authority.example.com", username, password) + except PycpgUnauthorizedError: print("Invalid username or password.") exit(1) ``` diff --git a/docs/userguides/devicesettings.md b/docs/userguides/devicesettings.md index 578988394..3dd3910b6 100644 --- a/docs/userguides/devicesettings.md +++ b/docs/userguides/devicesettings.md @@ -1,11 +1,11 @@ # View or Modify device settings -Use py42 to easily view and update the settings for devices with the `DeviceSettings` and `IncydrDeviceSettings` objects for Crashplan and Incydr customers, respectively. +Use pycpg to easily view and update the settings for devices with the `DeviceSettings` objects for Crashplan customers, respectively. -The [Device Settings](../methoddocs/devicesettings.md) objects are wrappers around the complex nested dict that the Code42 `Computer` API endpoint expects, +The [Device Settings](../methoddocs/devicesettings.md) objects are wrappers around the complex nested dict that the CrashPlan `Computer` API endpoint expects, providing helper properties that can be used to get/set values, without having to know the underlying nested structure. -To get started, create a `DeviceSettings` or `IncydrDeviceSettings` object for a given device guid. The `get_settings()` method will create the appropriate object automatically based on the corresponding service running on the device: +To get started, create a `DeviceSettings` object for a given device guid. The `get_settings()` method will create the appropriate object automatically based on the corresponding service running on the device: ```python device_settings = sdk.devices.get_settings(908765043021) @@ -43,12 +43,12 @@ For convenience and logging purposes, all changes are tracked in the `.changes` {'destinations': "{'43': 'PROe Cloud, US '} -> {'43': 'PROe Cloud, US ', '632540230984925185': 'PROe Cloud, US - West'}"} ``` -Once you've made all the desired changes to a `DeviceSettings` object, you can post the changes by passing it to the `sdk.devices.update_settings` method, which returns a `Py42Response` object +Once you've made all the desired changes to a `DeviceSettings` object, you can post the changes by passing it to the `sdk.devices.update_settings` method, which returns a `PycpgResponse` object with the server response: ```python >>> sdk.devices.update_settings(device_settings) - + ``` @@ -78,7 +78,7 @@ Because `DeviceSettings` is a subclass of `UserDict` with added attributes/metho the underlying dict that ultimately gets posted to the server is stored on the `.data` attribute of `DeviceSettings` instances, and a `DeviceSettings` object otherwise behaves like a normal dict. -If there is a setting that is not yet implemented by py42 as a helper method/attribute, those values can be manually managed +If there is a setting that is not yet implemented by pycpg as a helper method/attribute, those values can be manually managed by treating the `DeviceSettings` object as a normal dict. For example, setting the "backup status email frequency" value to only send every 10 days, via the helper attribute: @@ -93,9 +93,9 @@ And doing the same thing by setting the value manually on the underlying dict: >>> device_settings["settings"]["serviceBackupConfig"]["backupStatusEmailFreqInMinutes"] = "14400" ``` -The benefits of the py42 helper attributes/methods is that the values mimic what the Console UI uses (in this case days +The benefits of the pycpg helper attributes/methods is that the values mimic what the Console UI uses (in this case days vs the minutes expected by the API endpoint), so you don't have to worry about doing conversions yourself. But since -the underlying dict is accessible, you aren't constrained to only what py42 has so far implemented. +the underlying dict is accessible, you aren't constrained to only what pycpg has so far implemented. ```{eval-rst} .. warning:: diff --git a/docs/userguides/gettingstarted.md b/docs/userguides/gettingstarted.md index 94191bf70..1ed867d10 100644 --- a/docs/userguides/gettingstarted.md +++ b/docs/userguides/gettingstarted.md @@ -1,4 +1,4 @@ -# Getting started with py42 +# Getting started with pycpg * [Licensing](#licensing) * [Installation](#installation) @@ -7,35 +7,35 @@ ## Licensing -This project uses the [MIT License](https://github.com/code42/py42/blob/main/LICENSE.md). +This project uses the [MIT License](https://github.com/CrashPlan-Labs/pycpg/blob/main/LICENSE.md). ## Installation -You can install py42 from PyPI, from source, or from distribution. +You can install pycpg from PyPI, from source, or from distribution. ### From PyPI The easiest and most common way is to use `pip`: ```bash -pip install py42 +pip install pycpg ``` -To install a previous version of py42 via `pip`, add the version number. For example, to install version +To install a previous version of pycpg via `pip`, add the version number. For example, to install version 0.4.1, you would enter: ```bash -pip install py42==0.4.1 +pip install pycpg==0.4.1 ``` -Visit the [project history](https://pypi.org/project/py42/#history) on PyPI to see all published versions. +Visit the [project history](https://pypi.org/project/pycpg/#history) on PyPI to see all published versions. ### From source -Alternatively, you can install py42 directly from [source code](https://github.com/code42/py42): +Alternatively, you can install pycpg directly from [source code](https://github.com/CrashPlan-Labs/pycpg): ```bash -git clone https://github.com/code42/py42.git +git clone https://github.com/CrashPlan-Labs/pycpg.git ``` When it finishes downloading, from the root project directory, run: @@ -55,40 +55,40 @@ python setup.py sdist After it finishes building, the `.tar` ball will be located in the newly created `dist` directory. To install it, enter: ```bash -pip install py42-[VERSION].tar.gz +pip install pycpg-[VERSION].tar.gz ``` ## Authentication ```{eval-rst} -.. important:: py42 currently only supports token-based authentication. +.. important:: pycpg currently only supports token-based authentication. ``` -To initialize the `py42.sdk.SDKClient`, you must provide your credentials. If you are writing a script, +To initialize the `pycpg.sdk.SDKClient`, you must provide your credentials. If you are writing a script, we recommend using a secure password storage library, such as `keyring`, for retrieving passwords and secrets. However, subsequent requests use JWT authentication. ### Basic Authentication -Py42 supports basic auth with your Code42 username and password. +Pycpg supports basic auth with your CrashPlan username and password. -If your account uses [two-factor authentication](https://support.code42.com/Administrator/Cloud/Configuring/Two-factor_authentication_for_local_users), include the time-based one-time password (TOTP) when you initialize the `py42.sdk.SDKClient`. +If your account uses [two-factor authentication](https://support.crashplan.com/Administrator/Cloud/Configuring/Two-factor_authentication_for_local_users), include the time-based one-time password (TOTP) when you initialize the `pycpg.sdk.SDKClient`. You can also provide a callable object that returns a TOTP. If you pass a callable, it will be called whenever a new TOTP is required to renew the authentication token. ```python -import py42.sdk +import pycpg.sdk -sdk = py42.sdk.from_local_account("https://console.code42.com", "username@code42.com", "password") +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "username@crashplan.com", "password") ``` -### Code42 API Clients +### CrashPlan API Clients -Py42 also supports api clients. You can use the client ID and secret generated through the Code42 console to initiate the `SDKClient`. +Pycpg also supports api clients. You can use the client ID and secret generated through the CrashPlan console to initiate the `SDKClient`. ```python -import py42.sdk +import pycpg.sdk -sdk = py42.sdk.from_api_client("https://console.code42.com", "key-123-42", "my%secret!") +sdk = pycpg.sdk.from_api_client("https://console.us1.crashplan.com", "key-123-42", "my%secret!") ``` ## Troubleshooting and support @@ -96,37 +96,37 @@ sdk = py42.sdk.from_api_client("https://console.code42.com", "key-123-42", "my%s ### Debug mode Debug mode may be useful if you are trying to determine if you are experiencing permissions issues. When debug mode is -on, py42 logs HTTP request data to the console's stderr. Use the following as a guide for how to turn on debug mode in -py42: +on, pycpg logs HTTP request data to the console's stderr. Use the following as a guide for how to turn on debug mode in +pycpg: ```python -import py42.sdk -import py42.settings +import pycpg.sdk +import pycpg.settings import logging -py42.settings.debug.level = logging.DEBUG +pycpg.settings.debug.level = logging.DEBUG ``` -To provide your own logger, just replace `py42.settings.debug.logger`: +To provide your own logger, just replace `pycpg.settings.debug.logger`: ``` custom_logger = logging.getLogger("my_app") handler = logging.FileHandler("my_app.log") custom_logger.addHandler(handler) -py42.settings.debug.logger = custom_logger +pycpg.settings.debug.logger = custom_logger ``` ### File an issue on GitHub -If you are experiencing an issue with py42, you can create a *New issue* at the -[project repository](https://github.com/code42/py42/issues). See the Github [guide on creating an issue](https://help.github.com/en/github/managing-your-work-on-github/creating-an-issue) for more information. +If you are experiencing an issue with pycpg, you can create a *New issue* at the +[project repository](https://github.com/CrashPlan-Labs/pycpg/issues). See the Github [guide on creating an issue](https://help.github.com/en/github/managing-your-work-on-github/creating-an-issue) for more information. -### Contact Code42 Support +### Contact CrashPlan Support If you don't have a GitHub account and are experiencing issues, contact -[Code42 support](https://support.code42.com/). +[CrashPlan support](https://support.crashplan.com/). ## What's next? -Learn the basics by following the py42 [Basics guide](basics.md). +Learn the basics by following the pycpg [Basics guide](basics.md). diff --git a/docs/userguides/orgdevices.md b/docs/userguides/orgdevices.md index f05b91520..49cba74fa 100644 --- a/docs/userguides/orgdevices.md +++ b/docs/userguides/orgdevices.md @@ -1,16 +1,16 @@ # Get Active Devices From an Organization -Using py42, you can retrieve information about the active devices in your organization for various use cases. For example, you might want to create a simple report that illustrates how many devices are running each operating system in your Code42 environment. Your user role determines which devices you have access to. +Using pycpg, you can retrieve information about the active devices in your organization for various use cases. For example, you might want to create a simple report that illustrates how many devices are running each operating system in your CrashPlan environment. Your user role determines which devices you have access to. To begin, initialize the SDK: ```python -import py42.sdk -sdk = py42.sdk.from_local_account("https://console.us.code42.com", "my_username", "my_password") +import pycpg.sdk +sdk = pycpg.sdk.from_local_account("https://console.us1.crashplan.com", "my_username", "my_password") ``` ### The `DeviceClient.get_all()` Function -Next, use `py42.sdk.clients.devices.DeviceClient` to search for active devices in your +Next, use `pycpg.sdk.clients.devices.DeviceClient` to search for active devices in your organization. Use the `active` parameter on the `get_all()` method. The `active` parameter has three different states: diff --git a/docs/userguides/orgsettings.md b/docs/userguides/orgsettings.md index 8149a559b..45016d9c0 100644 --- a/docs/userguides/orgsettings.md +++ b/docs/userguides/orgsettings.md @@ -1,8 +1,8 @@ # View or Modify organization settings -Use py42 to easily view and update the settings for organizations with the `OrgSettings` object. +Use pycpg to easily view and update the settings for organizations with the `OrgSettings` object. -The `OrgSettings` object is a wrapper around the complex dicts that the Code42 `Org` and `OrgSettings` API endpoints expect, +The `OrgSettings` object is a wrapper around the complex dicts that the CrashPlan `Org` and `OrgSettings` API endpoints expect, providing helper properties that can be used to get/set values, without having to know the underlying complexity of the APIs. To get started, create a `OrgSettings` object for a given org_id: @@ -56,10 +56,10 @@ Because there are two endpoints that manage different organization settings valu method might make up to two requests to the server, depending on what `OrgSetting` values were actually modified. Because of the potential for two response values, `orgs.update_settings()` returns a `OrgSettingsResponse` namedtuple with the responses from both endpoints (if applicable), along with an `error` flag that indicates if any errors occurred. If an error occurred, the `org_response` or `org_settings_response` attributes will contain the -`Py42Exception` that was raised instead of the `Py42Response`. +`PycpgException` that was raised instead of the `PycpgResponse`. ```python >>> sdk.orgs.update_settings(org_settings) -OrgSettingsResponse(error=False, org_response=, org_settings_response=None) +OrgSettingsResponse(error=False, org_response=, org_settings_response=None) ``` diff --git a/setup.cfg b/setup.cfg index c46dd8622..6b719a295 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,11 +33,5 @@ ignore = max-line-length = 80 per-file-ignores = # these export names - src/py42/sdk/queries/fileevents/filters/__init__.py: F401, F403 - src/py42/sdk/queries/fileevents/v2/filters/__init__.py: F401, F403 - src/py42/sdk/queries/fileevents/v2/__init__.py: F401, F403 - src/py42/sdk/queries/fileevents/v1/__init__.py: F401, F403 - src/py42/sdk/queries/fileevents/file_event_query.py: F401, F403 - src/py42/sdk/queries/alerts/filters/__init__.py: F401, F403 - src/py42/constants/__init__.py: F401 + src/pycpg/constants/__init__.py: F401 docs/conf.py: F401 diff --git a/setup.py b/setup.py index ce7f0c926..1f8771039 100644 --- a/setup.py +++ b/setup.py @@ -7,22 +7,22 @@ here = path.abspath(path.dirname(__file__)) about = {} -with open(path.join(here, "src", "py42", "__version__.py"), encoding="utf8") as fh: +with open(path.join(here, "src", "pycpg", "__version__.py"), encoding="utf8") as fh: exec(fh.read(), about) with open(path.join(here, "README.md"), "r", "utf-8") as f: readme = f.read() setup( - name="py42", + name="pycpg", version=about["__version__"], - url="https://github.com/code42/py42", + url="https://github.com/CrashPlan-Labs/pycpg", project_urls={ - "Issue Tracker": "https://github.com/code42/py42/issues", - "Documentation": "https://py42docs.code42.com/", - "Source Code": "https://github.com/code42/py42", + "Issue Tracker": "https://github.com/CrashPlan-Labs/pycpg/issues", + "Documentation": "https://pycpgdocs.CrashPlan.com/", + "Source Code": "https://github.com/CrashPlan-Labs/pycpg", }, - description="The Official Code42 Python API Client", + description="The Official CrashPlan Python API Client", long_description=readme, long_description_content_type="text/markdown", packages=find_packages("src"), @@ -43,8 +43,8 @@ "tox==3.24.0", ], "docs": [ - "sphinx==8.1.3", - "myst-parser==4.0.0", + "sphinx==8.2.3", + "myst-parser==4.0.1", "sphinx_rtd_theme==3.0.2", "docutils == 0.21.2", ], diff --git a/src/py42/__version__.py b/src/py42/__version__.py deleted file mode 100644 index 3b3b99dd6..000000000 --- a/src/py42/__version__.py +++ /dev/null @@ -1,3 +0,0 @@ -# py42 - -__version__ = "1.27.3" diff --git a/src/py42/clients/alertrules.py b/src/py42/clients/alertrules.py deleted file mode 100644 index ab87b70f8..000000000 --- a/src/py42/clients/alertrules.py +++ /dev/null @@ -1,175 +0,0 @@ -from py42 import settings -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42InvalidRuleOperationError - - -class AlertRulesClient: - """`Rest Documentation `__""" - - def __init__(self, alerts_service, alert_rules_service): - self._alerts_service = alerts_service - self._alert_rules_service = alert_rules_service - - @property - def exfiltration(self): - """A collection of methods for managing exfiltration alert rules. - - Returns: - :class:`py42.services.alertrules.exfiltration.ExfiltrationService` - """ - return self._alert_rules_service.exfiltration - - @property - def cloudshare(self): - """A collection of methods for managing cloud sharing alert rules. - - Returns: - :class:`py42.services.alertrules.cloud_share.CloudShareService` - """ - return self._alert_rules_service.cloudshare - - @property - def filetypemismatch(self): - """A collection of methods for managing file type mismatch alert rules. - - Returns: - :class:`py42.services.alertrules.file_type_mismatch.FileTypeMismatchService` - """ - return self._alert_rules_service.filetypemismatch - - def add_user(self, rule_id, user_id): - """Update alert rule criteria to add a user and all their aliases to an alert rule. A rule's - user list can either be inclusive (only the users on the list can generate alerts) or exclusive - (everyone can generate alerts, except users on the list). This method will include or - exclude based on the rule configuration. - - `Rest Documentation `__ - - Args: - rule_id (str): Observer Id of a rule to be updated. - user_id (str): The Code42 userUid of the user to add to the alert - - Returns - :class:`py42.response.Py42Response` - """ - try: - return self._alert_rules_service.add_user(rule_id, user_id) - except Py42InternalServerError as err: - rules = self.get_by_observer_id(rule_id)["ruleMetadata"] - _check_if_system_rule(err, rules) - raise - - def remove_user(self, rule_id, user_id): - """Update alert rule criteria to remove a user and all their aliases from an alert rule. A rule's - user list can either be inclusive (only the users on the list can generate alerts) or exclusive - (everyone can generate alerts, except users on the list). This method will include or - exclude based on the rule configuration. - - `Rest Documentation `__ - - Args: - rule_id (str): Observer rule Id of a rule to be updated. - user_id (str): The Code42 userUid of the user to remove from the alert - - Returns - :class:`py42.response.Py42Response` - """ - try: - return self._alert_rules_service.remove_user(rule_id, user_id) - except Py42InternalServerError as err: - rules = self.get_by_observer_id(rule_id)["ruleMetadata"] - _check_if_system_rule(err, rules) - raise - - def remove_all_users(self, rule_id): - """Update alert rule criteria to remove all users the from the alert rule. - - `Rest Documentation `__ - - Args: - rule_id (str): Observer rule Id of a rule to be updated. - - Returns - :class:`py42.response.Py42Response` - """ - try: - return self._alert_rules_service.remove_all_users(rule_id) - except Py42InternalServerError as err: - rules = self.get_by_observer_id(rule_id)["ruleMetadata"] - _check_if_system_rule(err, rules) - raise - - def get_page( - self, sort_key="CreatedAt", sort_direction="DESC", page_num=1, page_size=None - ): - """Gets a page of alert rules. Note that you can use page_size here the same - way as other methods that have a `page_size` parameter in py42. However, under - the hood, it subtracts one from the given page size in the implementation as - the Code42 alerts API expected the start page to be zero while the rest of the - Code42 APIs expect the start page to be one. - - Args: - sort_key (str, optional): Sort results based by field. Defaults to "CreatedAt". - sort_direction (str, optional): ``ASC`` or ``DESC``. Constants available at - :class:`py42.constants.SortDirection`. Defaults to "DESC". - page_num (int, optional): The page number to get. Defaults to 1. - page_size (int, optional): The number of items per page. Defaults to `py42.settings.items_per_page`. - - Returns: - :class:`py42.response.Py42Response` - """ - page_size = page_size or settings.items_per_page - return self._alerts_service.get_rules_page( - sort_key=sort_key, - sort_direction=sort_direction, - page_num=page_num, - page_size=page_size, - ) - - def get_all(self, sort_key="CreatedAt", sort_direction="DESC"): - """Fetch all available rules. - - Args: - sort_key (str, optional): Sort results based by field. Defaults to 'CreatedAt'. - sort_direction (str, optional): ``ASC`` or ``DESC``. Constants available at - :class:`py42.constants.SortDirection`. Defaults to "DESC" - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects - that each contain a page of rules. - """ - return self._alerts_service.get_all_rules( - sort_key=sort_key, sort_direction=sort_direction - ) - - def get_all_by_name(self, rule_name): - """Search for matching rules by name. - - Args: - rule_name (str): Rule name to search for, case insensitive search. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects - that each contain a page of rules with the given name. - """ - return self._alerts_service.get_all_rules_by_name(rule_name) - - def get_by_observer_id(self, observer_id): - """Get the rule with the matching observer ID. - - Args: - observer_id (str): The observer ID of the rule to return. - - Returns - :class:`py42.response.Py42Response` - """ - return self._alerts_service.get_rule_by_observer_id(observer_id) - - -def _check_if_system_rule(base_err, rules): - """You cannot add or remove users from system rules this way; use the specific - feature behind the rule, such as the Departing Employee list.""" - if rules and rules[0]["isSystem"]: - observer_id = rules[0]["observerRuleId"] - source = rules[0]["ruleSource"] - raise Py42InvalidRuleOperationError(base_err, observer_id, source) diff --git a/src/py42/clients/alerts.py b/src/py42/clients/alerts.py deleted file mode 100644 index 914961563..000000000 --- a/src/py42/clients/alerts.py +++ /dev/null @@ -1,168 +0,0 @@ -from py42.sdk.queries.alerts.filters import AlertState - - -class AlertsClient: - """A client to expose alert API. - - `Rest Documentation `__ - """ - - def __init__(self, alert_service, alert_rules_client): - self._alert_service = alert_service - self._alert_rules_client = alert_rules_client - - @property - def rules(self): - """A collection of methods for managing alert rules. - - Returns: - :class:`py42.services.alertrules.AlertRulesClient` - """ - return self._alert_rules_client - - def search(self, query, page_num=1, page_size=None): - """Searches alerts using the given :class:`py42.sdk.queries.alerts.alert_query.AlertQuery`. - - `Rest Documentation `__ - - Args: - query (:class:`py42.sdk.queries.alerts.alert_query.AlertQuery`): An alert query. - page_num (int, optional): The page number to get. Defaults to 1. - page_size (int, optional): The number of items per page. Defaults to `py42.settings.items_per_page`. - - - Returns: - :class:`py42.response.Py42Response`: A response containing the alerts that match the given - query. - """ - return self._alert_service.search(query, page_num, page_size) - - def search_all_pages(self, query): - """Searches alerts using the given :class:`py42.sdk.queries.alerts.alert_query.AlertQuery`. - - `Rest Documentation `__ - - Args: - query (:class:`py42.sdk.queries.alerts.alert_query.AlertQuery`): An alert query. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects - that each contain a page of alerts that match the given query. - """ - return self._alert_service.search_all_pages(query) - - def get_details(self, alert_ids): - """Gets the details for the alerts with the given IDs, including the file event query that, - when passed into a search, would result in events that could have triggered the alerts. - - `Rest Documentation `__ - - Args: - alert_ids (str or list[str]): The identification number(s) of the alerts for which you want to - get details for. Note: The alerts backend accepts a maximum of 100 alerts per request. - - Returns: - :class:`py42.response.Py42Response`: A response containing the alert details. - """ - return self._alert_service.get_details(alert_ids) - - def resolve(self, alert_ids, reason=None): - """Resolves the alerts with the given IDs. - - Args: - alert_ids (str or list[str]): The identification number(s) for the alerts to resolve. - Note: The alerts backend accepts a maximum of 100 alerts per request. - reason (str, optional): The reason the alerts are now resolved. Defaults to None. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._alert_service.update_state( - AlertState.DISMISSED, alert_ids, note=reason - ) - - def reopen(self, alert_ids, reason=None): - """Reopens the resolved alerts with the given IDs. - - Args: - alert_ids (str or list[str]): The identification number(s) for the alerts to reopen. - Note: The alerts backend accepts a maximum of 100 alerts per request. - reason (str, optional): The reason the alerts are reopened. Defaults to None. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._alert_service.update_state(AlertState.OPEN, alert_ids, note=reason) - - def update_state(self, status, alert_ids, note=None): - """Updates the status of alerts. - - Args: - status (str): Status to set from OPEN, RESOLVED, PENDING, IN_PROGRESS - alert_ids (str or list[str]): The identification number(s) for the alerts to reopen. - Note: The alerts backend accepts a maximum of 100 alerts per request. - note (str, optional): A note to attach to the alerts. Must be less than 2000 - characters. Defaults to None. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._alert_service.update_state(status, alert_ids, note=note) - - def update_note(self, alert_id, note): - """Updates an alert's note. - - Args: - alert_id (str): The identification number of an alert to add a note to. - note (str): A note to attach to the alert. Must be less than 2000 - characters. Defaults to None. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._alert_service.update_note(alert_id, note) - - def get_aggregate_data(self, alert_id): - """Gets alert summary with details about observations. - - Args: - alert_id (str): Gets the details for the alert with the given ID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._alert_service.get_aggregate_data(alert_id) - - def get_all_alert_details(self, query): - """ - Helper method that combines :func:`.search_all_pages()` and :func:`.get_details()` - methods to get alert objects with alert "observations" details populated. - Returns an iterator of alert detail objects. - - Note: automatically overrides the `page_size` property on the query object to limit - search to 100 results per page, as that is the max that :func:`.get_details()` can - request at a time. - - Args: - query (:class:`py42.sdk.queries.alerts.alert_query.AlertQuery`): An alert query. - - Returns: - generator: An object that iterates over alert detail items. - """ - query.page_size = 25 - sort_key = query.sort_key[0].lower() + query.sort_key[1:] - if sort_key == "alertId": - sort_key = "id" - reverse = query.sort_direction == "desc" - pages = self._alert_service.search_all_pages(query) - for page in pages: - alert_ids = [alert["id"] for alert in page["alerts"]] - if alert_ids: - alert_details = self._alert_service.get_details(alert_ids) - yield from sorted( - alert_details["alerts"], - key=lambda x: x.get(sort_key), - reverse=reverse, - ) - else: - yield from [] diff --git a/src/py42/clients/cases.py b/src/py42/clients/cases.py deleted file mode 100644 index 625997877..000000000 --- a/src/py42/clients/cases.py +++ /dev/null @@ -1,250 +0,0 @@ -from datetime import datetime - -from py42.choices import Choices -from py42.util import parse_timestamp_to_milliseconds_precision - - -class CaseStatus(Choices): - """Constants available for setting the status of a case. - - * ``OPEN`` - * ``CLOSED`` - """ - - OPEN = "OPEN" - CLOSED = "CLOSED" - - -class CasesClient: - """A client to expose cases API. - - `Rest documentation `__ - """ - - def __init__(self, cases_service, cases_file_event_service): - self._cases_service = cases_service - self._file_events = cases_file_event_service - - @property - def file_events(self): - """A collection of methods for managing file events associated with a given case. - - Returns: - :class:`py42.services.casesfileevents.CasesFileEventsService` - """ - return self._file_events - - def create( - self, name, subject=None, assignee=None, description=None, findings=None - ): - """Creates a new case. - `Rest documentation `__ - - Args: - name (str): Name of the case. - subject (str, optional): User UID of a subject of a case. - assignee (str, optional): User UID of the assignee. - description (str, optional): Description of the case - findings (str, optional): Observations of the case. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._cases_service.create( - name, - subject=subject, - assignee=assignee, - description=description, - findings=findings, - ) - - def get_page( - self, - page_num, - name=None, - status=None, - min_create_time=None, - max_create_time=None, - min_update_time=None, - max_update_time=None, - subject=None, - assignee=None, - page_size=100, - sort_direction="asc", - sort_key="number", - **kwargs, - ): - """Gets individual page of cases. - `Rest documentation `__ - - Args: - page_num (int): The page number to request. - name (str, optional): Filter results by case name, matches partial names. Defaults to None. - status (str, optional): Filter results by case status. ``OPEN`` or ``CLOSED``. Defaults to None. Constants available at :class:`py42.constants.CaseStatus`. - min_create_time (str or int or float or datetime, optional): Filter results by case creation time, start time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - max_create_time (str or int or float or datetime, optional): Filter results by case creation time, end time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - min_update_time (str or int or float or datetime, optional): Filter results by last updated time, start time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - max_update_time (str or int or float or datetime, optional): Filter results by last updated time, end time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - subject (str, optional): Filter results based on User UID of a subject of a case. Defaults to None. - assignee (str, optional): Filter results based on User UID of an assignee of a case. Defaults to None. - page_size (int, optional): Number of results to return per page. Defaults to 100. - sort_direction (str, optional): The direction on which to sort the response, - based on the corresponding sort key. `asc` or `desc`. Defaults to `asc`. - sort_key (str, optional): Values on which the response will be sorted. Defaults to "number". - Available options are `name`, `number`, `createdAt`, `updatedAt`, `status`, `assigneeUsername`, `subjectUsername`. - - Returns: - :class:`py42.response.Py42Response` - """ - - created_at = _make_range(min_create_time, max_create_time) - updated_at = _make_range(min_update_time, max_update_time) - - return self._cases_service.get_page( - page_num, - name=name, - status=status, - created_at=created_at, - updated_at=updated_at, - subject=subject, - assignee=assignee, - page_size=page_size, - sort_direction=sort_direction, - sort_key=sort_key, - **kwargs, - ) - - def get_all( - self, - name=None, - status=None, - min_create_time=None, - max_create_time=None, - min_update_time=None, - max_update_time=None, - subject=None, - assignee=None, - page_size=100, - sort_direction="asc", - sort_key="number", - **kwargs, - ): - """Gets all cases. - `Rest documentation `__ - - Args: - name (str, optional): Filter results by case name, matches partial names. Defaults to None. - status (str, optional): Filter results by case status. ``OPEN`` or ``CLOSED``. Defaults to None. Constants available at :class:`py42.constants.CaseStatus`. - min_create_time (str or int or float or datetime, optional): Filter results by case creation time, start time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - max_create_time (str or int or float or datetime, optional): Filter results by case creation time, end time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - min_update_time (str or int or float or datetime, optional): Filter results by last updated time, start time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - max_update_time (str or int or float or datetime, optional): Filter results by last updated time, end time. - str format %Y-%m-%d %H:%M:%S. Defaults to None. - subject (str, optional): Filter results based on User UID of a subject of a case. Defaults to None. - assignee (str, optional): Filter results based on User UID of an assignee of a case. Defaults to None. - page_size (int, optional): Number of results to return per page. Defaults to 100. - sort_direction (str, optional): The direction on which to sort the response, - based on the corresponding sort key. `asc` or `desc`. Defaults to `asc`. - sort_key (str, optional): Values on which the response will be sorted. Defaults to "number". - Available options are `name`, `number`, `createdAt`, `updatedAt`, `status`, `assigneeUsername`, `subjectUsername`. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects - that each contain a page of cases. - """ - - created_at = _make_range(min_create_time, max_create_time) - updated_at = _make_range(min_update_time, max_update_time) - - return self._cases_service.get_all( - name=name, - status=status, - created_at=created_at, - updated_at=updated_at, - subject=subject, - assignee=assignee, - page_size=page_size, - sort_direction=sort_direction, - sort_key=sort_key, - **kwargs, - ) - - def get(self, case_number): - """Retrieve case details by case number. - `Rest documentation `__ - - Args: - case_number (int): Case number of the case. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._cases_service.get(case_number) - - def export_summary(self, case_number): - """Provides case summary to download as a PDF file. - `Rest documentation `__ - - - Args: - case_number (int): Case number of the case. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._cases_service.export_summary(case_number) - - def update( - self, - case_number, - name=None, - subject=None, - assignee=None, - description=None, - findings=None, - status=None, - ): - """Updates case details for the given case number. - `Rest documentation `__ - - - Args: - case_number (int): Case number of the case. - name (str, optional): Name of the case. Defaults to None. - subject (str, optional): A subject of the case. Defaults to None. - assignee (str, optional): User UID of the assignee. Defaults to None. - description (str, optional): Description of the case. Defaults to None. - findings (str, optional): Notes on the case. Defaults to None. - status (str, optional): Status of the case. ``OPEN`` or ``CLOSED``. Defaults to None. Constants available at :class:`py42.constants.CaseStatus`. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._cases_service.update( - case_number, - name=name, - subject=subject, - assignee=assignee, - description=description, - findings=findings, - status=status, - ) - - -def _make_range(begin_time, end_time): - if not begin_time and not end_time: - return None - if not begin_time: - begin_time = datetime.utcfromtimestamp(0) - if not end_time: - end_time = datetime.utcnow() - end = parse_timestamp_to_milliseconds_precision(end_time) - start = parse_timestamp_to_milliseconds_precision(begin_time) - return f"{start}/{end}" diff --git a/src/py42/clients/securitydata.py b/src/py42/clients/securitydata.py deleted file mode 100644 index d7ab1ffb7..000000000 --- a/src/py42/clients/securitydata.py +++ /dev/null @@ -1,218 +0,0 @@ -from py42.exceptions import Py42ChecksumNotFoundError -from py42.exceptions import Py42Error -from py42.sdk.queries.fileevents.v2.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.v2.filters.file import MD5 -from py42.sdk.queries.fileevents.v2.filters.file import SHA256 -from py42.services.util import escape_quote_chars - - -class SecurityDataClient: - def __init__( - self, - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - self._file_event_service = file_event_service - self._preservation_data_service = preservation_data_service - self._saved_search_service = saved_search_service - self._storage_service_factory = storage_service_factory - - @property - def savedsearches(self): - """A collection of methods related to retrieving forensic search data. - - Returns: - :class: `py42.services.savedsearch.SavedSearchService` - """ - return self._saved_search_service - - def search_file_events(self, query): - """Searches for file events, returns up to the first 10,000 events. - `REST Documentation `__ - - Args: - query (str or :class:`py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery`): - The file event query to filter search results. - - Returns: - :class:`py42.response.Py42Response`: A response containing the first 10,000 - events. - """ - return self._file_event_service.search(query) - - def search_all_file_events(self, query, page_token=""): - """Searches for all file events, returning a page of events with a token in the response to retrieve next page. - `REST Documentation `__ - - Args: - query (str or :class:`py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery`): - The file event query to filter search results. - page_token (str, optional): A token used to indicate the starting point for - additional page results. For the first page, do not pass ``page_token``. For - all consecutive pages, pass the token from the previous response from - field ``nextPgToken``. Defaults to empty string. - - Returns: - :class:`py42.response.Py42Response`: A response containing a page of events. - """ - - query.page_token = escape_quote_chars(page_token) - response = self._file_event_service.search(query) - return response - - def stream_file_by_sha256(self, checksum): - """Stream file based on SHA256 checksum. - - Args: - checksum (str): SHA256 hash of the file. - - Returns: - Returns a stream of the requested file. - """ - response = self._search_by_hash(checksum, SHA256) - events = response["fileEvents"] - info = _get_version_lookup_info(events) - if not len(events) or not info: - raise Py42ChecksumNotFoundError(response, "SHA256", checksum) - return self._stream_file(checksum, info) - - def stream_file_by_md5(self, checksum): - """Stream file based on MD5 checksum. - - Args: - checksum (str): MD5 hash of the file. - - Returns: - Returns a stream of the requested file. - """ - response = self._search_by_hash(checksum, MD5) - events = response["fileEvents"] - info = _get_version_lookup_info(events) - if not len(events) or not info: - raise Py42ChecksumNotFoundError(response, "MD5", checksum) - return self._stream_file(checksum, info) - - def _search_by_hash(self, checksum, checksum_type): - query = FileEventQuery.all(checksum_type.eq(checksum)) - query.sort_key = "@timestamp" - query.sort_direction = "desc" - response = self.search_file_events(query) - return response - - def _stream_file(self, checksum, version_info): - (device_guid, md5_hash, sha256_hash, path) = version_info - version = self._get_file_version_for_stream( - device_guid, md5_hash, sha256_hash, path - ) - if version: - return self._get_file_stream(version) - - raise Py42Error(f"No file with hash {checksum} available for download.") - - def _get_file_version_for_stream(self, device_guid, md5_hash, sha256_hash, path): - version = self._get_device_file_version( - device_guid, md5_hash, sha256_hash, path - ) - if not version: - version = self._get_other_file_location_version(md5_hash, sha256_hash) - return version - - def _get_device_file_version(self, device_guid, md5_hash, sha256_hash, path): - response = self._preservation_data_service.get_file_version_list( - device_guid, md5_hash, sha256_hash, path - ) - versions = ( - response.data.get("securityEventVersionsMatchingChecksum") - or response.data.get("securityEventVersionsAtPath") - or response.data.get("preservationVersions") - ) - - if versions: - if not response.data.get("securityEventVersionsAtPath"): - exact_match = _get_first_matching_version(versions, md5_hash) - if exact_match: - return exact_match - - most_recent = sorted( - versions, key=lambda i: i["versionTimestamp"], reverse=True - ) - return most_recent[0] - - def _get_other_file_location_version(self, md5_hash, sha256_hash): - response = self._file_event_service.get_file_location_detail_by_sha256( - sha256_hash - ) - locations = response["locations"] - if locations: - paths = _parse_file_location_response(locations) - version = self._preservation_data_service.find_file_version( - md5_hash, sha256_hash, paths - ) - if version.status_code == 200: - return version.data - - def _get_file_stream(self, version): - if version.get("edsUrl"): - return self._get_exfiltrated_file(version) - - return self._get_stored_file(version) - - def _get_exfiltrated_file(self, version): - eds = self._storage_service_factory.create_exfiltrated_data_service( - version["edsUrl"] - ) - token = eds.get_download_token( - version["eventId"], - version["deviceUid"], - version["filePath"], - version["fileSHA256"], - version["versionTimestamp"], - ) - return eds.get_file(str(token)) - - def _get_stored_file(self, version): - pds = self._storage_service_factory.create_preservation_data_service( - version["storageNodeURL"] - ) - token = pds.get_download_token( - version["archiveGuid"], - version["fileId"], - version["versionTimestamp"], - ) - return pds.get_file(str(token)) - - -def _parse_file_location_response(locations): - devices = {} - for location in locations: - file_name = location["fileName"] - file_path = f'{location["filePath"]}{file_name}' - device_id = location["deviceUid"] - device_entry = devices.get(device_id) - if device_entry: - devices[device_id]["paths"].append(file_path) - else: - devices[device_id] = {"deviceGuid": device_id, "paths": [file_path]} - - return [devices[key] for key in devices] - - -def _get_version_lookup_info(events): - for event in events: - device_guid = event["user"]["deviceUid"] - md5 = event["file"]["hash"]["md5"] - sha256 = event["file"]["hash"]["sha256"] - fileName = event["file"]["name"] - filePath = event["file"]["directory"] - - if device_guid and md5 and sha256 and fileName and filePath: - path = f"{filePath}{fileName}" - return device_guid, md5, sha256, path - - -def _get_first_matching_version(versions, md5_hash): - exact_match = next((x for x in versions if x["fileMD5"] == md5_hash), None) - if exact_match: - return exact_match diff --git a/src/py42/clients/settings/org_settings.py b/src/py42/clients/settings/org_settings.py deleted file mode 100644 index 407b39684..000000000 --- a/src/py42/clients/settings/org_settings.py +++ /dev/null @@ -1,365 +0,0 @@ -from collections import UserDict - -from py42.clients.settings import SettingProperty -from py42.clients.settings import TSettingProperty -from py42.clients.settings._converters import bool_to_str -from py42.clients.settings._converters import bytes_to_gb -from py42.clients.settings._converters import comma_separated_to_list -from py42.clients.settings._converters import gb_to_bytes -from py42.clients.settings._converters import str_to_bool -from py42.clients.settings._converters import to_comma_separated -from py42.clients.settings._converters import to_list -from py42.clients.settings.device_settings import DeviceSettingsDefaults - - -class OrgSettings(UserDict): - """Class used to manage an Organization's settings.""" - - def __init__(self, org_settings, t_settings): - self.data = org_settings - self._t_settings = t_settings - self._packets = {} - self.changes = {} - try: - self.device_defaults = DeviceSettingsDefaults( - self.data["deviceDefaults"], org_settings=self - ) - except KeyError: - self.device_defaults = None - - @property - def packets(self): - """The setting packets for any modifications to be posted to the /api/v1/OrgSettings - endpoint. - """ - return list(self._packets.values()) - - @property - def org_id(self): - """The identifier for the org.""" - return self.data["orgId"] - - @property - def registration_key(self): - """The registration key for the org.""" - return self.data["registrationKey"] - - org_name = SettingProperty("org_name", ["orgName"]) - """Name for this Org.""" - - external_reference = SettingProperty("external_reference", ["orgExtRef"]) - """External reference field for this Org.""" - - notes = SettingProperty("notes", ["notes"]) - """Notes field for this Org.""" - - quota_settings_inherited = SettingProperty( - "quota_settings_inherited", - ["settings", "isUsingQuotaDefaults"], - ) - """Determines if Org Quota settings (`maximum_user_subscriptions`, `org_backup_quota`, - `user_backup_quota`, `archive_hold_days`) are inherited from parent organization. - - Modifying one of the Org Quota attributes automatically sets this attribute to `False`. - """ - - archive_hold_days = SettingProperty( - "archive_hold_days", - ["settings", "archiveHoldDays"], - inheritance_attr="quota_settings_inherited", - ) - """Number of days backup archives are held in cold storage after deactivation or - destination removal from any devices in this Org. - """ - - maximum_user_subscriptions = SettingProperty( - "maximum_user_subscriptions", - ["settings", "maxSeats"], - inheritance_attr="quota_settings_inherited", - ) - """Number of users allowed to consume a license in this Org. Set to -1 for unlimited.""" - - org_backup_quota = SettingProperty( - "org_backup_quota", - ["settings", "maxBytes"], - get_converter=bytes_to_gb, - set_converter=gb_to_bytes, - inheritance_attr="quota_settings_inherited", - ) - """Backup storage quota (in GB) for this organization. Set to -1 for unlimited.""" - - user_backup_quota = SettingProperty( - "user_backup_quota", - ["settings", "defaultUserMaxBytes"], - get_converter=bytes_to_gb, - set_converter=gb_to_bytes, - inheritance_attr="quota_settings_inherited", - ) - """Backup storage quota (in GB) for each user in this organization. Set to -1 for - unlimited.""" - - web_restore_admin_limit = SettingProperty( - "web_restore_admin_limit", ["settings", "webRestoreAdminLimitMb"] - ) - """Limit (in MB) to amount of data restorable by admin users via web restore.""" - - web_restore_user_limit = SettingProperty( - "web_restore_user_limit", ["settings", "webRestoreUserLimitMb"] - ) - """Limit (in MB) to amount of data restorable by non-admin users via web restore.""" - - reporting_settings_inherited = SettingProperty( - "reporting_settings_inherited", - ["settings", "isUsingReportingDefaults"], - ) - """Determines if Org Reporting settings (`backup_warning_email_days`, - `backup_critical_email_days', `backup_alert_recipient_emails`) are inherited from - parent organization. - - Modifying one of the Org Reporting attributes automatically sets this attribute to - `False`. - """ - - backup_warning_email_days = SettingProperty( - "backup_warning_email_days", - ["settings", "warnInDays"], - inheritance_attr="reporting_settings_inherited", - ) - """The number of days devices in this org can go without any backup before "warning" - alerts get sent to org admins. - """ - - backup_critical_email_days = SettingProperty( - "backup_critical_email_days", - ["settings", "alertInDays"], - inheritance_attr="reporting_settings_inherited", - ) - """The number of days devices in this org can go without any backup before "critical" - alerts get sent to org admins. - """ - - backup_alert_recipient_emails = SettingProperty( - "backup_alert_recipient_emails", - ["settings", "recipients"], - set_converter=to_list, - inheritance_attr="reporting_settings_inherited", - ) - """List of email addresses that organization backup alert emails get sent to (org - admin users get these automatically). - """ - - _endpoint_monitoring_enabled = TSettingProperty( - "endpoint_monitoring_enabled", - "org-securityTools-enable", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - _aed_enabled = TSettingProperty( - "aed_enabled", - "device_advancedExfiltrationDetection_enabled", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - _removable_media_enabled = TSettingProperty( - "removable_media_enabled", - "org-securityTools-device-detection-enable", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - _cloud_sync_enabled = TSettingProperty( - "cloud_sync_enabled", - "org-securityTools-cloud-detection-enable", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - _browser_and_applications_enabled = TSettingProperty( - "browser_and_applications_enabled", - "org-securityTools-open-file-detection-enable", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - _file_metadata_collection_enabled = TSettingProperty( - "file_metadata_collection_enabled", - "device_fileForensics_enabled", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - _printer_detection_enabled = TSettingProperty( - "printer_detection_enabled", - "org_securityTools_printer_detection_enable", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - - @property - def endpoint_monitoring_enabled(self): - """Determines if endpoint monitoring settings are enabled for this org. - - Disabling this property also disables "removable media", "cloud sync", - "browser and application monitoring" and "printer detection" properties. - """ - return self._endpoint_monitoring_enabled - - @endpoint_monitoring_enabled.setter - def endpoint_monitoring_enabled(self, val): - self._endpoint_monitoring_enabled = val - self._aed_enabled = val - if not val: - self._cloud_sync_enabled = val - self._browser_and_applications_enabled = val - self._removable_media_enabled = val - self._printer_detection_enabled = val - - @property - def endpoint_monitoring_removable_media_enabled(self): - """Determines if removable media endpoint monitoring event capturing is enabled - for this org. - """ - return self._removable_media_enabled - - @endpoint_monitoring_removable_media_enabled.setter - def endpoint_monitoring_removable_media_enabled(self, value): - if value: - self.endpoint_monitoring_enabled = value - self._removable_media_enabled = value - - @property - def endpoint_monitoring_cloud_sync_enabled(self): - """Determines if cloud sync endpoint monitoring event capturing is enabled - for this org. - """ - return self._cloud_sync_enabled - - @endpoint_monitoring_cloud_sync_enabled.setter - def endpoint_monitoring_cloud_sync_enabled(self, value): - if value: - self.endpoint_monitoring_enabled = value - self._cloud_sync_enabled = value - - @property - def endpoint_monitoring_browser_and_applications_enabled(self): - """Determines if browser and other application activity endpoint monitoring - event capturing is enabled for this org. - """ - return self._browser_and_applications_enabled - - @endpoint_monitoring_browser_and_applications_enabled.setter - def endpoint_monitoring_browser_and_applications_enabled(self, value): - if value: - self.endpoint_monitoring_enabled = value - self._browser_and_applications_enabled = value - - @property - def endpoint_monitoring_printer_detection_enabled(self): - """Determines if printer endpoint monitoring event capturing is enabled for this - org. - """ - return self._printer_detection_enabled - - @endpoint_monitoring_printer_detection_enabled.setter - def endpoint_monitoring_printer_detection_enabled(self, value): - if value: - self.endpoint_monitoring_enabled = value - self._printer_detection_enabled = value - - @property - def endpoint_monitoring_file_metadata_collection_enabled(self): - """Determines if file metadata collection is enabled for this org.""" - return self._file_metadata_collection_enabled - - @endpoint_monitoring_file_metadata_collection_enabled.setter - def endpoint_monitoring_file_metadata_collection_enabled(self, value): - if value: - self.endpoint_monitoring_enabled = value - self._file_metadata_collection_enabled = value - - endpoint_monitoring_file_metadata_scan_enabled = TSettingProperty( - "file_metadata_scan_enabled", - "device_fileForensics_scan_enabled", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - """Determines if file metadata collection regular full scans are enabled for this - org. - """ - - endpoint_monitoring_file_metadata_ingest_scan_enabled = TSettingProperty( - "file_metadata_ingest_scan_enabled", - "device_fileForensics_enqueue_scan_events_during_ingest", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - """Determines if file metadata collection does an initial full scan when first - enabled on devices. - """ - - endpoint_monitoring_background_priority_enabled = TSettingProperty( - "background_priority_enabled", - "device_background_priority_enabled", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - """Determines if devices in this org have reduced priority in some IO bound tasks. - If enabled, devices may see improved general device performance at the expense of - some Code42 backup/security tasks taking longer. - """ - - endpoint_monitoring_custom_applications_win = TSettingProperty( - "custom_monitored_applications_win", - "device_org_winAppActivity_binaryWhitelist", - get_converter=comma_separated_to_list, - set_converter=to_comma_separated, - ) - """List of additional applications the Code42 client monitors for file exfiltration - activity. - - See `Support Documentation `__ - for more details. - """ - - endpoint_monitoring_custom_applications_mac = TSettingProperty( - "custom_monitored_applications_mac", - "device_org_macAppActivity_binaryWhitelist", - get_converter=comma_separated_to_list, - set_converter=to_comma_separated, - ) - """List of additional applications the Code42 client monitors for file exfiltration - activity. - - See `Support Documentation `__ - for more details. - """ - - endpoint_monitoring_file_metadata_collection_exclusions = TSettingProperty( - "file_metadata_collection_exclusions", - "device_fileForensics_fileExclusions_org", - ) - """File types and file paths to exclude from file metadata collection. - - See `Support Documentation `__ - for more details on the shape of the body this setting expects. - """ - - endpoint_monitoring_file_exfiltration_detection_exclusions = TSettingProperty( - "file_exfiltration_detection_exclusions", - "org_securityTools_detection_monitoring_exclusions", - ) - """File types and file paths to exclude from file exfiltration detection. - - See `Support Documentation `__ - for more details on the shape of the body this setting expects. - """ - - web_restore_enabled = TSettingProperty( - "web_restore_enabled", - "device_webRestore_enabled", - get_converter=str_to_bool, - set_converter=bool_to_str, - ) - """Determines if web restores are enabled for devices in this org.""" - - def __repr__(self): - return f"" - - def __str__(self): - return str(self.data) diff --git a/src/py42/clients/trustedactivities.py b/src/py42/clients/trustedactivities.py deleted file mode 100644 index 5a5dc3982..000000000 --- a/src/py42/clients/trustedactivities.py +++ /dev/null @@ -1,90 +0,0 @@ -from py42.choices import Choices - - -class TrustedActivityType(Choices): - """Constants available for setting the type of a trusted activity. - - * ``DOMAIN`` - * ``SLACK`` - """ - - DOMAIN = "DOMAIN" - SLACK = "SLACK" - - -class TrustedActivitiesClient: - """A client to expose the trusted activities/data preferences API - - `Rest documentation `__ - """ - - def __init__(self, trusted_activities_service): - self._trusted_activities_service = trusted_activities_service - - def get_all(self, type=None, page_size=None): - """Gets all trusted activities. - `Rest documentation `__ - - Args: - type (str, optional): Type of the trusted activity. Defaults to None. Constants available at :class:`py42.constants.TrustedActivityType`. - page_size (int, optional): Number of results to return per page. Defaults to 100. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects - that each contain a page of cases. - """ - return self._trusted_activities_service.get_all(type, page_size) - - def create(self, type, value, description=None): - """Gets all trusted activities with the given type. - `Rest documentation `__ - - Args: - type (str): Type of the trusted activity. Constants available at :class:`py42.constants.TrustedActivityType`. - value (str): The URL of the domain or name of the Slack workspace. - description (str, optional): Description of the trusted activity. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._trusted_activities_service.create(type, value, description) - - def get(self, id): - """Retrieve trusted activity details by given resource number. - `Rest documentation `__ - - Args: - id (int): Resource number of the trusted activity or domain. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._trusted_activities_service.get(id) - - def update(self, id, value=None, description=None): - """Updates trusted activity details by given resource number. - `Rest documentation `__ - - Args: - id (int): Resource number of the trusted activity. - value (str, optional): The URL of the domain or name of the Slack workspace. - description (str, optional): Description of the trusted activity. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._trusted_activities_service.update( - id=id, value=value, description=description - ) - - def delete(self, id): - """Deletes a trusted activity by given resource number. - `Rest documentation `__ - - Args: - id (int): Resource number of the trusted activity or domain. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._trusted_activities_service.delete(id) diff --git a/src/py42/clients/userriskprofile.py b/src/py42/clients/userriskprofile.py deleted file mode 100644 index dc7d5b34d..000000000 --- a/src/py42/clients/userriskprofile.py +++ /dev/null @@ -1,183 +0,0 @@ -from py42.exceptions import Py42Error -from py42.exceptions import Py42NotFoundError - - -class UserRiskProfileClient: - """A client to expose the user risk profile API. - - `Rest Documentation `__ - """ - - def __init__(self, user_risk_profile_service, user_service): - self._user_risk_profile_service = user_risk_profile_service - self._user_service = user_service - - def get_by_id(self, user_id): - """Get a user risk profile by a user UID. - - Args: - user_id (str): A unique user UID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._user_risk_profile_service.get_by_id(user_id) - - def get_by_username(self, username): - """Get a user risk profile by username. - - Args: - username (str): A username. - - Returns: - :class:`py42.response.Py42Response` - """ - user_response = self._user_service.get_by_username(username) - if len(user_response.data["users"]) == 0: - err = Py42Error() - err.response = user_response - raise Py42NotFoundError(err, message=f"Username '{username}' not found.") - user_id = user_response.data["users"][0]["userUid"] - return self.get_by_id(user_id) - - def update(self, user_id, start_date=None, end_date=None, notes=None): - """Update a user risk profile. - - For each arg, if None is provided, the value will not be updated. Pass an empty string if you want to clear that value from the profile. - - Args: - user_id (str): The UID of the user to update. - start_date (str or datetime, optional): The start date of the user risk profile to be updated. Expects format of 'YYYY-MM-DD' or instance of datetime. Defaults to None. - end_date (str or datetime, optional): The departure date of the user risk profile to be updated. Expects format of 'YYYY-MM-DD' or instance of datetime. Defaults to None. - notes (str, optional): The notes field of the user risk profile to be updated. Defaults to None - - Returns: - :class:`py42.response.Py42Response` - """ - return self._user_risk_profile_service.update( - user_id, start_date, end_date, notes - ) - - def get_page( - self, - page_num=1, - page_size=None, - manager_id=None, - title=None, - division=None, - department=None, - employment_type=None, - country=None, - region=None, - locality=None, - active=None, - deleted=None, - support_user=None, - ): - """Get a page of user risk profiles. - - Args: - page_num (integer, optional): The desired page of user risk profile results to retrieve. Defaults to None - page_size (integer, optional): The desired number of results per page. Defaults to None - manager_id (str, optional): Matches users whose manager has the given Code42 user UID. Defaults to None - title (str, optional): Matches users with the given job title. Defaults to None - division (str, optional): Matches users in the given division. Defaults to None - department (str, optional): Matches users in the given department. Defaults to None - employment_type (str, optional): Matches users with the given employment type. Defaults to None - country (str, optional): Matches users in the given country. Defaults to None - region (str, optional): Matches users the given region (state). Defaults to None - locality (str, optional): Matches users in the given locality (city). Defaults to None - active (boolean, optional): Matches users by whether the user is active. Defaults to None - deleted (boolean, optional): Matches users by whether the user is deleted. Defaults to None - support_user (boolean, optional): Matches users by whether the user is a support user. Defaults to None - - Returns: - :class:`py42.response.Py42Response` - """ - return self._user_risk_profile_service.get_page( - page_num, - page_size, - manager_id, - title, - division, - department, - employment_type, - country, - region, - locality, - active, - deleted, - support_user, - ) - - def get_all( - self, - manager_id=None, - title=None, - division=None, - department=None, - employment_type=None, - country=None, - region=None, - locality=None, - active=None, - deleted=None, - support_user=None, - ): - """Get all user risk profiles. - - Args: - manager_id (str, optional): Matches users whose manager has the given Code42 user UID. Defaults to None - title (str, optional): Matches users with the given job title. Defaults to None - division (str, optional): Matches users in the given division. Defaults to None - department (str, optional): Matches users in the given department. Defaults to None - employment_type (str, optional): Matches users with the given employment type. Defaults to None - country (str, optional): Matches users in the given country. Defaults to None - region (str, optional): Matches users the given region (state). Defaults to None - locality (str, optional): Matches users in the given locality (city). Defaults to None - active (boolean, optional): Matches users by whether the user is active. Defaults to None - deleted (boolean, optional): Matches users by whether the user is deleted. Defaults to None - support_user (boolean, optional): Matches users by whether the user is a support user. Defaults to None - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects that each contain a page of user risk profiles. - """ - return self._user_risk_profile_service.get_all( - manager_id, - title, - division, - department, - employment_type, - country, - region, - locality, - active, - deleted, - support_user, - ) - - def add_cloud_aliases(self, user_id, cloud_alias): - """Add cloud aliases to a user risk profile. - - Args: - user_id (str): The user UID. - cloud_alias (str or list(str)): The alias(es) to add to the user risk profile. Each user starts with a default alias of their code42 username and can have one additional cloud alias. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._user_risk_profile_service.add_cloud_aliases(user_id, cloud_alias) - - def delete_cloud_aliases(self, user_id, cloud_aliases): - """Delete cloud aliases from a user risk profile. - - Args: - user_id (str): The user UID. - cloud_aliases (str or list(str)): The alias(es) to remove from the user risk profile. Each user starts with a default alias of their code42 username and can have one additional cloud alias. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._user_risk_profile_service.delete_cloud_aliases( - user_id, cloud_aliases - ) diff --git a/src/py42/clients/watchlists.py b/src/py42/clients/watchlists.py deleted file mode 100644 index 7af7f741c..000000000 --- a/src/py42/clients/watchlists.py +++ /dev/null @@ -1,183 +0,0 @@ -from py42.choices import Choices - - -class WatchlistType(Choices): - """Constants available for setting the type of watchlist. - - * ``CONTRACT_EMPLOYEE`` - * ``DEPARTING_EMPLOYEE`` - * ``ELEVATED_ACCESS_PRIVILEGES`` - * ``FLIGHT_RISK`` - * ``HIGH_IMPACT_EMPLOYEE`` - * ``NEW_EMPLOYEE`` - * ``PERFORMANCE_CONCERNS`` - * ``POOR_SECURITY_PRACTICES`` - * ``SUSPICIOUS_SYSTEM_ACTIVITY`` - * ``CUSTOM`` - """ - - CONTRACTOR = "CONTRACT_EMPLOYEE" - DEPARTING = "DEPARTING_EMPLOYEE" - ELEVATED_ACCESS = "ELEVATED_ACCESS_PRIVILEGES" - FLIGHT_RISK = "FLIGHT_RISK" - HIGH_IMPACT = "HIGH_IMPACT_EMPLOYEE" - NEW_HIRE = "NEW_EMPLOYEE" - PERFORMANCE_CONCERNS = "PERFORMANCE_CONCERNS" - POOR_SECURITY_PRACTICES = "POOR_SECURITY_PRACTICES" - SUSPICIOUS_SYSTEM_ACTIVITY = "SUSPICIOUS_SYSTEM_ACTIVITY" - CUSTOM = "CUSTOM" - - -class WatchlistsClient: - """A client to expose the watchlists API. - - `Rest Documentation `__ - """ - - def __init__(self, watchlists_service): - self._watchlists_service = watchlists_service - - def get(self, watchlist_id): - """Get a watchlist. - - Args: - watchlist_id (str): A unique watchlist ID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._watchlists_service.get(watchlist_id) - - def delete(self, watchlist_id): - """Delete a watchlist. - - Args: - watchlist_id (str): A unique watchlist ID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._watchlists_service.delete(watchlist_id) - - def get_all(self): - """Get all watchlists. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects that each contain a page of watchlists. - """ - return self._watchlists_service.get_all() - - def create(self, watchlist_type, title=None, description=None): - """Create a new watchlist. - - Args: - watchlist_type (str): Type of watchlist. Constants available at :class:`py42.constants.WatchlistType`. - title (str, optional): Name of watchlist (for `CUSTOM` watchlists only). - description (str, optional): Description of watchlist (for `CUSTOM` watchlists only). - - Returns: - :class:`py42.response.Py42Response` - """ - if watchlist_type == "CUSTOM" and not title: - raise ValueError("`title` value is required for custom watchlists.") - return self._watchlists_service.create( - watchlist_type, title=title, description=description - ) - - def get_all_included_users(self, watchlist_id): - """Get all users explicitly included on a watchlist. - - Args: - watchlist_id (str): A unique watchlist ID. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects that each contain a page of included users that match the given query. - """ - return self._watchlists_service.get_all_included_users(watchlist_id) - - def add_included_users_by_watchlist_id(self, user_ids, watchlist_id): - """Explicitly include users on a watchlist. - - Args: - user_ids (list(str): A list of user IDs to add to the watchlist - watchlist_id (str): A unique watchlist ID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._watchlists_service.add_included_users_by_watchlist_id( - user_ids, watchlist_id - ) - - def add_included_users_by_watchlist_type(self, user_ids, watchlist_type): - """Explicitly include users on a watchlist. - - Args: - user_ids (list(str): A list of user IDs to add to the watchlist - watchlist_type (str): Type of watchlist. Constants available at :class:`py42.constants.WatchlistType`. - - Returns: - :class:`py42.response.Py42Response` - """ - if watchlist_type == "CUSTOM": - raise ValueError( - "Users can only be added to CUSTOM watchlists by watchlist ID." - ) - return self._watchlists_service.add_included_users_by_watchlist_type( - user_ids, watchlist_type - ) - - def remove_included_users_by_watchlist_id(self, user_ids, watchlist_id): - """Remove users that are explicitly included on a watchlist. - - Args: - user_ids (list(str): A list of user IDs to remove from the watchlist - watchlist_id (str): A unique watchlist ID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._watchlists_service.delete_included_users_by_watchlist_id( - user_ids, watchlist_id - ) - - def remove_included_users_by_watchlist_type(self, user_ids, watchlist_type): - """Remove users that are explicitly included on a watchlist. - - Args: - user_ids (list(str): A list of user IDs to remove from the watchlist - watchlist_type (str): Type of watchlist. Constants available at :class:`py42.constants.WatchlistType`. - - Returns: - :class:`py42.response.Py42Response` - """ - if watchlist_type == "CUSTOM": - raise ValueError( - "Users can only be removed from CUSTOM watchlists by watchlist ID." - ) - return self._watchlists_service.delete_included_users_by_watchlist_type( - user_ids, watchlist_type - ) - - def get_all_watchlist_members(self, watchlist_id): - """Get all members of a watchlist. - - Args: - watchlist_id (str): A unique watchlist ID. - - Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects that each contain a page of watchlist members. - """ - return self._watchlists_service.get_all_watchlist_members(watchlist_id) - - def get_watchlist_member(self, watchlist_id, user_id): - """Get a member of a watchlist. - - Args: - watchlist_id (str): A unique watchlist ID. - user_id (str): A unique user ID. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._watchlists_service.get_watchlist_member(watchlist_id, user_id) diff --git a/src/py42/constants/__init__.py b/src/py42/constants/__init__.py deleted file mode 100644 index 6394aa174..000000000 --- a/src/py42/constants/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from py42.choices import Choices -from py42.clients.cases import CaseStatus -from py42.clients.trustedactivities import TrustedActivityType -from py42.clients.watchlists import WatchlistType - - -class SortDirection(Choices): - """Constants available to set Code42 request `sort_direction` when sorting returned lists in responses. - - * ``ASC`` - * ``DESC`` - """ - - DESC = "DESC" - ASC = "ASC" diff --git a/src/py42/exceptions.py b/src/py42/exceptions.py deleted file mode 100644 index d8fdcecca..000000000 --- a/src/py42/exceptions.py +++ /dev/null @@ -1,639 +0,0 @@ -from py42.settings import debug - - -class Py42Error(Exception): - """A generic, Py42 custom base exception.""" - - -class Py42ResponseError(Py42Error): - """A base custom class to manage all errors raised because of an HTTP response.""" - - def __init__(self, response, message, *args): - super().__init__(message, *args) - self._response = response - - @property - def response(self): - """The response prior to the error.""" - return self._response - - -class Py42ArchiveFileNotFoundError(Py42ResponseError): - """An exception raised when a resource file is not found or the path is invalid.""" - - def __init__(self, response, device_guid, file_path): - message = ( - f"File not found in archive for device {device_guid} at path {file_path}" - ) - super().__init__(response, message, device_guid, file_path) - self._device_guid = device_guid - self._file_path = file_path - - @property - def device_guid(self): - """The device GUID provided.""" - return self._device_guid - - @property - def file_path(self): - """The file path provided.""" - return self._file_path - - -class Py42ChecksumNotFoundError(Py42ResponseError): - """An exception raised when a user-supplied hash could not successfully locate its corresponding resource.""" - - def __init__(self, response, checksum_name, checksum_value): - message = f"No files found with {checksum_name} checksum {checksum_value}." - super().__init__(response, message, checksum_name, checksum_value) - self._checksum_name = checksum_name - self._checksum_value = checksum_value - - @property - def checksum_name(self): - """The checksum name.""" - return self._checksum_name - - @property - def checksum_value(self): - """The checksum value.""" - return self.checksum_value - - -class Py42FeatureUnavailableError(Py42ResponseError): - """An exception raised when a requested feature is not supported in your Code42 environment.""" - - def __init__(self, response): - super().__init__( - response, - "You may be trying to use a feature that is unavailable in your environment.", - ) - - -class Py42HTTPError(Py42ResponseError): - """A base custom class to manage all HTTP errors raised by an API endpoint.""" - - def __init__(self, exception, message=None, *args): - if not message: - response_content = f"Response content: {exception.response.text}" - message = f"Failure in HTTP call {exception}. {response_content}" - debug.logger.debug(message) - - super().__init__(exception.response, message, *args) - - -class Py42DeviceNotConnectedError(Py42ResponseError): - """An exception raised when trying to push a restore to a device that is not - connected to an Authority server.""" - - def __init__(self, response, device_guid): - message = ( - f"Device with GUID '{device_guid}' is not currently connected to the Authority " - "server." - ) - super().__init__(response, message, device_guid) - self._device_guid = device_guid - - @property - def device_guid(self): - """The device GUID.""" - return self._device_guid - - -class Py42InvalidArchivePassword(Py42HTTPError): - """An exception raised when the password for unlocking an archive is invalid.""" - - def __init__(self, exception): - message = "Invalid archive password." - super().__init__(exception, message) - - -class Py42InvalidArchiveEncryptionKey(Py42HTTPError): - """An exception raised the encryption key for an archive is invalid.""" - - def __init__(self, exception): - message = "Invalid archive encryption key." - super().__init__(exception, message) - - -class Py42StorageSessionInitializationError(Py42HTTPError): - """An exception raised when the user is not authorized to initialize a storage session. This - may occur when trying to restore a file or trying to get events for file activity on removable - media, in cloud sync folders, and browser uploads.""" - - def __init__(self, exception, message): - super().__init__(exception, message) - - -class Py42SessionInitializationError(Py42Error): - """An exception raised when a user connection is invalid. A connection might be invalid due to - connection timeout, invalid token, etc. - """ - - def __init__(self, exception): - message = ( - "An error occurred while requesting " - f"server environment information, caused by {exception}" - ) - super().__init__(exception, message) - - -class Py42BadRequestError(Py42HTTPError): - """A wrapper to represent an HTTP 400 error.""" - - -class Py42UnauthorizedError(Py42HTTPError): - """A wrapper to represent an HTTP 401 error.""" - - -class Py42ForbiddenError(Py42HTTPError): - """A wrapper to represent an HTTP 403 error.""" - - -class Py42NotFoundError(Py42HTTPError): - """A wrapper to represent an HTTP 404 error.""" - - -class Py42ConflictError(Py42HTTPError): - """A wrapper to represent an HTTP 409 error.""" - - -class Py42InternalServerError(Py42HTTPError): - """A wrapper to represent an HTTP 500 error.""" - - -class Py42TooManyRequestsError(Py42HTTPError): - """A wrapper to represent an HTTP 429 error.""" - - -class Py42OrgNotFoundError(Py42BadRequestError): - """An exception raised when a 400 HTTP error message indicates that an - organization was not found.""" - - def __init__(self, exception, org_uid): - msg = f"The organization with UID '{org_uid}' was not found." - super().__init__(exception, msg, org_uid) - self._org_uid = org_uid - - @property - def org_uid(self): - """ " The org UID.""" - return self._org_uid - - -class Py42ActiveLegalHoldError(Py42BadRequestError): - """An exception raised when attempting to deactivate a user or device that is in an - active legal hold.""" - - def __init__(self, exception, resource, resource_id): - msg = f"Cannot deactivate the {resource} with ID {resource_id} as the {resource} is involved in a legal hold matter." - super().__init__(exception, msg, resource, resource_id) - self._resource = resource - self._resource_id = resource_id - - @property - def resource(self): - """The user or device resource.""" - return self._resource - - @property - def resource_id(self): - """The resource ID.""" - return self._resource_id - - -class Py42UserAlreadyAddedError(Py42BadRequestError): - """An exception raised when the user is already added to group or list, such as the - Departing Employee list.""" - - def __init__(self, exception, user_id, list_name): - msg = f"User with ID {user_id} is already on the {list_name}." - super().__init__(exception, msg, user_id, list_name) - self._user_id = user_id - - @property - def user_id(self): - """The user ID.""" - return self._user_id - - -class Py42LegalHoldNotFoundOrPermissionDeniedError(Py42ForbiddenError): - """An exception raised when a legal hold matter is inaccessible from your account or - the matter UID is not valid.""" - - def __init__(self, exception, resource_uid, legal_hold_resource="matter"): - message = f"{legal_hold_resource.capitalize()} with UID '{resource_uid}' can not be found. Your account may not have permission to view the {legal_hold_resource.lower()}." - super().__init__(exception, message, resource_uid) - self._resource_uid = resource_uid - - @property - def uid(self): - """The UID of the legal hold resource.""" - return self._resource_uid - - -class Py42LegalHoldCriteriaMissingError(Py42BadRequestError): - """An exception raised when a bad request was made to a Legal Hold endpoint.""" - - def __init__(self, exception): - super().__init__( - exception, - "At least one criteria must be specified: legal_hold_membership_uid, " - "legal_hold_matter_uid, user_uid, or user.", - ) - - -class Py42LegalHoldAlreadyDeactivatedError(Py42BadRequestError): - """An exception raised when trying to deactivate a Legal Hold Matter that is already inactive.""" - - def __init__(self, exception, legal_hold_matter_uid): - message = f"Legal Hold Matter with UID '{legal_hold_matter_uid}' has already been deactivated." - super().__init__(exception, message, legal_hold_matter_uid) - self._legal_hold_matter_uid = legal_hold_matter_uid - - @property - def legal_hold_matter_uid(self): - """The legal hold matter UID.""" - return self._legal_hold_matter_uid - - -class Py42LegalHoldAlreadyActiveError(Py42BadRequestError): - """An exception raised when trying to activate a Legal Hold Matter that is already active.""" - - def __init__(self, exception, legal_hold_matter_uid): - message = ( - f"Legal Hold Matter with UID '{legal_hold_matter_uid}' is already active." - ) - super().__init__(exception, message, legal_hold_matter_uid) - self._legal_hold_matter_uid = legal_hold_matter_uid - - @property - def legal_hold_matter_uid(self): - """The legal hold matter UID.""" - return self._legal_hold_matter_uid - - -class Py42InvalidRuleOperationError(Py42HTTPError): - """An exception raised when trying to add or remove users to a system rule.""" - - def __init__(self, exception, rule_id, source): - msg = "Only alert rules with a source of 'Alerting' can be targeted by this command. " - msg += f"Rule {rule_id} has a source of '{source}'." - super().__init__(exception, msg, rule_id, source) - self._rule_id = rule_id - self._source = source - - @property - def rule_id(self): - """The rule ID.""" - return self._rule_id - - @property - def source(self): - """The rule source.""" - return self._source - - -class Py42MFARequiredError(Py42UnauthorizedError): - """Deprecated: An exception raised when a request requires multi-factor authentication""" - - def __init__(self, exception, message=None): - message = message or "User requires multi-factor authentication." - super().__init__(exception, message) - - -class Py42UserAlreadyExistsError(Py42InternalServerError): - """An exception raised when a user already exists""" - - def __init__(self, exception, message=None): - message = message or "User already exists." - super().__init__(exception, message) - - -class Py42UsernameMustBeEmailError(Py42InternalServerError): - """An exception raised when trying to set a non-email as a user's username - in a cloud environment.""" - - def __init__(self, exception): - message = "Username must be an email address." - super().__init__(exception, message) - - -class Py42InvalidEmailError(Py42InternalServerError): - """An exception raised when trying to set an invalid email as a user's email.""" - - def __init__(self, email, exception): - message = f"'{email}' is not a valid email." - super().__init__(exception, message, email) - self._email = email - - @property - def email(self): - """The email being assigned to a user.""" - return self._email - - -class Py42InvalidPasswordError(Py42InternalServerError): - """An exception raised when trying to set an invalid password as a user's password.""" - - def __init__(self, exception): - message = "Invalid password." - super().__init__(exception, message) - - -class Py42InvalidUsernameError(Py42InternalServerError): - """An exception raised when trying to set an invalid username as a user's username.""" - - def __init__(self, exception): - message = "Invalid username." - super().__init__(exception, message) - - -class Py42CloudAliasLimitExceededError(Py42BadRequestError): - """An Exception raised when trying to add a cloud alias to a user when that user - already has the max amount of supported cloud aliases.""" - - def __init__(self, exception, message=None): - message = ( - message - or "Cloud alias limit exceeded. A max of 2 cloud aliases are allowed." - ) - super(Py42BadRequestError, self).__init__(exception, message) - - -class Py42CloudAliasCharacterLimitExceededError(Py42Error): - """An exception raised when trying to add a cloud alias to a user that exceeds the max character limit.""" - - def __init__(self): - message = "Cloud alias character limit exceeded. Max 50 characters." - super().__init__(message) - - -class Py42BadRestoreRequestError(Py42BadRequestError): - """An error raised when the given restore arguments are not compatible and cause - a bad request.""" - - def __init__(self, exception): - message = "Unable to create restore session." - super().__init__(exception, message) - - -class Py42InvalidPageTokenError(Py42BadRequestError): - """An error raised when the page token given is invalid.""" - - def __init__(self, exception, page_token): - message = ( - f'Invalid page token: "{page_token}".\n' - "Page tokens match the last event ID received in a previous query. " - "Your page token may be invalid if the original query has changed " - "such that the corresponding event is being filtered out of the results, " - "or if the event has expired according to your data retention policy." - ) - super().__init__(exception, message, page_token) - self._page_token = page_token - - @property - def page_token(self): - """The page token.""" - return self._page_token - - -class Py42UserNotOnListError(Py42NotFoundError): - """An exception raised when the user is not on a detection list.""" - - def __init__(self, exception, user_id, list_name): - message = f"User with ID '{user_id}' is not currently on the {list_name} list." - super(Py42NotFoundError, self).__init__(exception, message, user_id, list_name) - self._user_id = user_id - self._list_name = list_name - - @property - def user_id(self): - """The user ID.""" - return self._user_id - - @property - def list_name(self): - """The list name.""" - return self._list_name - - -class Py42UnableToCreateProfileError(Py42BadRequestError): - """An error raised when trying to call the method for creating a detection-list - user when the user does not exist or is currently awaiting the profile to get - created on the back-end. Note: you are no longer able to create detection-list - profiles using the API; py42 only returns already existing profiles.""" - - def __init__(self, exception, username): - message = ( - "Detection-list profiles are now created automatically on the server. " - f"Unable to find a detection-list profile for '{username}'. " - "It is possibly still being created if you just recently created the " - "Code42 user." - ) - super().__init__(exception, message, username) - self._username = username - - @property - def username(self): - """The username of the user.""" - return self._username - - -class Py42InvalidRuleError(Py42NotFoundError): - """An exception raised when the observer rule ID does not exist.""" - - def __init__(self, exception, rule_id): - message = f"Invalid Observer Rule ID '{rule_id}'." - super(Py42NotFoundError, self).__init__(exception, message, rule_id) - self._rule_id = rule_id - - @property - def rule_id(self): - """The observer rule ID.""" - return self._rule_id - - -class Py42UpdateClosedCaseError(Py42BadRequestError): - """An error raised when trying to update a closed case.""" - - def __init__(self, exception): - msg = "Cannot update a closed case." - super().__init__(exception, msg) - - -class Py42CaseNameExistsError(Py42BadRequestError): - """An error raised when trying to create a case with a name that already exists.""" - - def __init__(self, exception, case_name): - msg = f"Case name '{case_name}' already exists, please set another name." - super().__init__(exception, msg, case_name) - self._case_name = case_name - - @property - def case_name(self): - """The case name.""" - return self._case_name - - -class Py42DescriptionLimitExceededError(Py42BadRequestError): - """An error raised when description of a case exceeds the allowed char length limit.""" - - def __init__(self, exception): - msg = "Description limit exceeded, max 250 characters allowed." - super().__init__(exception, msg) - - -class Py42InvalidCaseUserError(Py42BadRequestError): - """An error raised when a case subject or assignee is not a valid user.""" - - def __init__(self, exception, user_uid): - msg = f"The provided {user_uid} is not a valid user." - super().__init__(exception, msg) - self._user_uid = user_uid - - @property - def user(self): - """The user UID.""" - return self._user_uid - - -class Py42CaseAlreadyHasEventError(Py42BadRequestError): - """An error raised when event is already associated to the case.""" - - def __init__(self, exception): - msg = "Event is already associated to the case." - super().__init__(exception, msg) - - -class Py42TrustedActivityInvalidChangeError(Py42BadRequestError): - """An error raised when an invalid change is being made to a trusted activity.""" - - def __init__(self, exception): - msg = "Invalid change to trusted activity. Trusted activity type cannot be changed." - super().__init__(exception, msg) - - -class Py42TrustedActivityConflictError(Py42ConflictError): - """An error raised when theres a conflict with a trusted activity domain URL.""" - - def __init__(self, exception, value): - msg = ( - f"Duplicate URL or workspace name, '{value}' already exists on your trusted list. " - "Please provide a unique value" - ) - super().__init__(exception, msg, value) - self._value = value - - @property - def value(self): - """The domain, URL or workspace name.""" - return self._value - - -class Py42TrustedActivityInvalidCharacterError(Py42BadRequestError): - """An error raised when an invalid character is in a trusted activity value.""" - - def __init__(self, exception): - msg = "Invalid character in domain or Slack workspace name" - super().__init__(exception, msg) - - -class Py42TrustedActivityIdNotFound(Py42NotFoundError): - """An exception raised when the trusted activity ID does not exist.""" - - def __init__(self, exception, resource_id): - message = f"Resource ID '{resource_id}' not found." - super().__init__(exception, message, resource_id) - self._resource_id = resource_id - - @property - def resource_id(self): - """The resource ID.""" - return self._resource_id - - -class Py42WatchlistNotFound(Py42NotFoundError): - """An exception raised when the watchlist with the given ID was not found.""" - - def __init__(self, exception, resource_id): - message = f"Watchlist ID '{resource_id}' not found." - super().__init__(exception, message, resource_id) - self._watchlist_id = resource_id - - @property - def watchlist_id(self): - """The watchlist ID.""" - return self._watchlist_id - - -class Py42WatchlistOrUserNotFound(Py42NotFoundError): - """An exception raised when the watchlist ID or the User ID does not exist.""" - - def __init__(self, exception, watchlist_id, user_id): - message = f"Watchlist ID '{watchlist_id}' or User ID '{user_id}' not found." - super().__init__(exception, message, watchlist_id, user_id) - self._watchlist_id = watchlist_id - self._user_id = user_id - - @property - def watchlist_id(self): - """The watchlist ID.""" - return self._watchlist_id - - @property - def user_id(self): - """The user ID.""" - return self._user_id - - -class Py42InvalidWatchlistType(Py42BadRequestError): - """An exception raised when an invalid watchlist type is specified.""" - - def __init__(self, exception, watchlist_type): - message = f"'{watchlist_type}' cannot be converted to a valid watchlist type. Please look at the WatchlistType class for valid types." - super().__init__(exception, message, watchlist_type) - self._watchlist_type = watchlist_type - - @property - def watchlist_type(self): - """The specified watchlist type.""" - return self._watchlist_type - - -class Py42UserRiskProfileNotFound(Py42NotFoundError): - """An exception raised when the user with the given ID for a user risk profile was not found.""" - - def __init__(self, exception, user_id, identifier="ID"): - message = ( - f"User risk profile for user with the {identifier} '{user_id}' not found." - ) - super().__init__(exception, message, user_id) - self._user_id = user_id - - @property - def user(self): - """The user identifier.""" - return self._user_id - - -def raise_py42_error(raised_error): - """Raises the appropriate :class:`py42.exceptions.Py42HttpError` based on the given - HTTPError's response status code. - """ - if raised_error.response.status_code == 400: - raise Py42BadRequestError(raised_error) - elif raised_error.response.status_code == 401: - raise Py42UnauthorizedError(raised_error) - elif raised_error.response.status_code == 403: - raise Py42ForbiddenError(raised_error) - elif raised_error.response.status_code == 404: - raise Py42NotFoundError(raised_error) - elif raised_error.response.status_code == 409: - raise Py42ConflictError(raised_error) - elif raised_error.response.status_code == 429: - raise Py42TooManyRequestsError(raised_error) - elif 500 <= raised_error.response.status_code < 600: - raise Py42InternalServerError(raised_error) - else: - raise Py42HTTPError(raised_error) diff --git a/src/py42/sdk/__init__.py b/src/py42/sdk/__init__.py deleted file mode 100644 index 9f306c347..000000000 --- a/src/py42/sdk/__init__.py +++ /dev/null @@ -1,455 +0,0 @@ -import warnings - -from requests.auth import HTTPBasicAuth - -from py42.exceptions import Py42Error -from py42.exceptions import Py42UnauthorizedError -from py42.services._auth import ApiClientAuth -from py42.services._auth import BearerAuth -from py42.services._auth import CustomJWTAuth -from py42.services._connection import Connection -from py42.usercontext import UserContext - -warnings.simplefilter("always", DeprecationWarning) -warnings.simplefilter("always", UserWarning) - - -def from_api_client(host_address, client_id, secret): - """Creates a :class:`~py42.sdk.SDKClient` object for accessing the Code42 REST APIs using - an API client ID and secret. - - Args: - host_address (str): The domain name of the Code42 instance being authenticated to, e.g. - console.us.code42.com - client_id (str): The client ID of the API client to authenticate with. - secret (str): The secret of the API client to authenticate with. - - Returns: - :class:`py42.sdk.SDKClient` - """ - - return SDKClient.from_api_client(host_address, client_id, secret) - - -def from_local_account(host_address, username, password, totp=None): - """Creates a :class:`~py42.sdk.SDKClient` object for accessing the Code42 REST APIs using the - supplied credentials. This method supports only accounts created within the Code42 console or using the - APIs (including py42). Username/passwords that are based on Active Directory, - Okta, or other Identity providers cannot be used with this method. - - Args: - host_address (str): The domain name of the Code42 instance being authenticated to, e.g. - console.us.code42.com - username (str): The username of the authenticating account. - password (str): The password of the authenticating account. - totp (callable or str, optional): The time-based one-time password of the authenticating account. Include only - if the account uses Code42's two-factor authentication. Defaults to None. - - Returns: - :class:`py42.sdk.SDKClient` - """ - client = SDKClient.from_local_account(host_address, username, password, totp) - - # test credentials - try: - client.users.get_current() - except Py42UnauthorizedError as err: - login_type = client.loginconfig.get_for_user(username)["loginType"] - if login_type == "CLOUD_SSO": - raise Py42Error("SSO users are not supported in `from_local_account()`.") - msg = f"SDK initialization failed, double-check username/password, and provide two-factor TOTP token if Multi-Factor Auth configured for your user. User LoginConfig: {login_type}" - err.args = (msg,) - raise - return client - - -def from_jwt_provider(host_address, jwt_provider): - """Creates a :class:`~py42.sdk.SDKClient` object for accessing the Code42 REST APIs using a custom - auth mechanism. User can use any authentication mechanism like that returns a JSON Web token on authentication - which would then be used for all subsequent requests. - - Args: - host_address (str): The domain name of the Code42 instance being authenticated to, e.g. - console.us.code42.com - jwt_provider (function): A function that accepts no parameters and on execution returns a JSON web token string. - - Returns: - :class:`py42.sdk.SDKClient` - """ - - client = SDKClient.from_jwt_provider(host_address, jwt_provider) - client.usercontext.get_current_tenant_id() - return client - - -class SDKClient: - def __init__(self, main_connection, auth, auth_flag=None): - services, user_ctx = _init_services(main_connection, auth, auth_flag) - self._clients = _init_clients(services, main_connection) - self._user_ctx = user_ctx - self._auth_flag = auth_flag - - @classmethod - def from_api_client(cls, host_address, client_id, secret): - """Creates a :class:`~py42.sdk.SDKClient` object for accessing the Code42 REST APIs using - an API client ID and secret. - - Args: - host_address (str): The domain name of the Code42 instance being authenticated to, e.g. - console.us.code42.com - client_id (str): The client ID of the API client to authenticate with. - secret (str): The secret of the API client to authenticate with. - - Returns: - :class:`py42.sdk.SDKClient` - """ - - basic_auth = HTTPBasicAuth(client_id, secret) - auth_connection = Connection.from_host_address(host_address, auth=basic_auth) - api_client_auth = ApiClientAuth(auth_connection) - main_connection = Connection.from_host_address( - host_address, auth=api_client_auth - ) - api_client_auth.get_credentials() - return cls(main_connection, api_client_auth, auth_flag=1) - - @classmethod - def from_local_account(cls, host_address, username, password, totp=None): - """Creates a :class:`~py42.sdk.SDKClient` object for accessing the Code42 REST APIs using - the supplied credentials. This method supports only accounts created within the Code42 console or - using the APIs (including py42). Username/passwords that are based on Active - Directory, Okta, or other Identity providers should use the `from_jwt_provider` method. - - Args: - host_address (str): The domain name of the Code42 instance being authenticated to, e.g. - console.us.code42.com - username (str): The username of the authenticating account. - password (str): The password of the authenticating account. - totp (callable or str, optional): The time-based one-time password of the authenticating account. Include only - if the account uses Code42's two-factor authentication. Defaults to None. - Returns: - :class:`py42.sdk.SDKClient` - """ - basic_auth = None - if username and password: - basic_auth = HTTPBasicAuth(username, password) - auth_connection = Connection.from_host_address(host_address, auth=basic_auth) - bearer_auth = BearerAuth(auth_connection, totp) - main_connection = Connection.from_host_address(host_address, auth=bearer_auth) - - return cls(main_connection, bearer_auth) - - @classmethod - def from_jwt_provider(cls, host_address, jwt_provider): - """Creates a :class:`~py42.sdk.SDKClient` object for accessing the Code42 REST APIs using a custom - auth mechanism. User can use any authentication mechanism like that returns a JSON Web token - on authentication which would then be used for all subsequent requests. - - Args: - host_address (str): The domain name of the Code42 instance being authenticated to, e.g. - console.us.code42.com - jwt_provider (function): A function that accepts no parameters and on execution returns a - JSON web token string. - - Returns: - :class:`py42.sdk.SDKClient` - """ - custom_auth = CustomJWTAuth(jwt_provider) - main_connection = Connection.from_host_address(host_address, auth=custom_auth) - custom_auth.get_credentials() - return cls(main_connection, custom_auth) - - @property - def loginconfig(self): - """A collection of methods related to getting information about the login configuration - of user accounts. - - Returns: - :class:`py42.clients.loginconfig.LoginConfigurationClient.` - """ - return self._clients.loginconfig - - @property - def serveradmin(self): - """A collection of methods for getting server information for on-premise environments - and tenant information for cloud environments. - - Returns: - :class:`py42.services.administration.AdministrationService` - """ - return self._clients.authority.administration - - @property - def archive(self): - """A collection of methods for accessing Code42 storage archives. Useful for doing - web-restores or finding a file on an archive. - - Returns: - :class:`py42.clients.archive.ArchiveClient` - """ - return self._clients.archive - - @property - def users(self): - """A collection of methods for retrieving or updating data about users in the Code42 - environment. - - Returns: - :class:`py42.services.users.UserService` - """ - return self._clients.authority.users - - @property - def devices(self): - """A collection of methods for retrieving or updating data about devices in the Code42 - environment. - - Returns: - :class:`py42.services.devices.DeviceService` - """ - return self._clients.authority.devices - - @property - def orgs(self): - """A collection of methods for retrieving or updating data about organizations in the - Code42 environment. - - Returns: - :class:`py42.services.orgs.OrgService` - """ - return self._clients.authority.orgs - - @property - def legalhold(self): - """A collection of methods for retrieving and updating legal-hold matters, policies, and - custodians. - - Returns: - :class:`py42.services.legalhold.LegalHoldService` - """ - return self._clients.authority.legalhold - - @property - def usercontext(self): - """A collection of methods related to getting information about the currently logged in - user, such as the tenant ID. - - Returns: - :class:`py42.usercontext.UserContext` - """ - return self._user_ctx - - @property - def securitydata(self): - """A collection of methods and properties for getting security data such as: - * File events - * Security plan information - - Returns: - :class:`py42.clients.securitydata.SecurityDataClient` - """ - return self._clients.securitydata - - @property - def alerts(self): - """A collection of methods related to retrieving and updating alerts rules. - - Returns: - :class:`py42.clients.alertrules.AlertRulesClient` - """ - return self._clients.alerts - - @property - def auditlogs(self): - """A collection of methods for retrieving audit logs. - - Returns: - :class:`py42.clients.auditlogs.AuditLogsClient` - """ - return self._clients.auditlogs - - @property - def cases(self): - """A collection of methods and properties for managing cases and file events - associated with the case. - - Returns: - :class:`py42.clients.cases.CaseClient` - """ - return self._clients.cases - - @property - def trustedactivities(self): - """A collection of methods and properties for managing trusted domains. - - Returns: - :class:`py42.clients.trustedactivities.TrustedActivitiesClient` - """ - return self._clients.trustedactivities - - @property - def userriskprofile(self): - """A collection of methods and properties for managing user risk profiles. - - Returns: - :class:`py42.clients.userriskprofile.UserRiskProfileClient` - """ - return self._clients.userriskprofile - - @property - def watchlists(self): - """A collection of methods and properties for managing watchlists. - - Returns: - :class:`py42.clients.watchlists.WatchlistsClient` - """ - return self._clients.watchlists - - -def _init_services(main_connection, main_auth, auth_flag=None): - # services are imported within function to prevent circular imports when a service - # imports anything from py42.sdk.queries - from py42.services import Services - from py42.services._keyvaluestore import KeyValueStoreService - from py42.services.administration import AdministrationService - from py42.services.alertrules import AlertRulesService - from py42.services.alerts import AlertService - from py42.services.archive import ArchiveService - from py42.services.auditlogs import AuditLogsService - from py42.services.cases import CasesService - from py42.services.casesfileevents import CasesFileEventsService - from py42.services.devices import DeviceService - from py42.services.fileevent import FileEventService - from py42.services.legalhold import LegalHoldService - from py42.services.legalholdapiclient import LegalHoldApiClientService - from py42.services.orgs import OrgService - from py42.services.preservationdata import PreservationDataService - from py42.services.savedsearch import SavedSearchService - from py42.services.trustedactivities import TrustedActivitiesService - from py42.services.users import UserService - from py42.services.watchlists import WatchlistsService - from py42.services.userriskprofile import UserRiskProfileService - - alert_rules_key = "FedObserver-API_URL" - alerts_key = "AlertService-API_URL" - file_events_key = "FORENSIC_SEARCH-API_URL" - preservation_data_key = "PRESERVATION-DATA-SERVICE_API-URL" - kv_prefix = "simple-key-value-store" - audit_logs_key = "AUDIT-LOG_API-URL" - cases_key = "CASES_API-URL" - trusted_activities_key = "TRUSTED-DOMAINS_API-URL" - watchlists_key = "watchlists-API_URL" - - kv_connection = Connection.from_microservice_prefix(main_connection, kv_prefix) - kv_service = KeyValueStoreService(kv_connection) - - alert_rules_conn = Connection.from_microservice_key( - kv_service, alert_rules_key, auth=main_auth - ) - alerts_conn = Connection.from_microservice_key( - kv_service, alerts_key, auth=main_auth - ) - file_events_conn = Connection.from_microservice_key( - kv_service, file_events_key, auth=main_auth - ) - pds_conn = Connection.from_microservice_key( - kv_service, preservation_data_key, auth=main_auth - ) - audit_logs_conn = Connection.from_microservice_key( - kv_service, audit_logs_key, auth=main_auth - ) - administration_svc = AdministrationService(main_connection) - file_event_svc = FileEventService(file_events_conn) - user_ctx = UserContext(administration_svc) - cases_conn = Connection.from_microservice_key(kv_service, cases_key, auth=main_auth) - trusted_activities_conn = Connection.from_microservice_key( - kv_service, trusted_activities_key, auth=main_auth - ) - watchlists_conn = Connection.from_microservice_key( - kv_service, watchlists_key, auth=main_auth - ) - user_risk_profile_svc = UserRiskProfileService(watchlists_conn) - - services = Services( - administration=administration_svc, - archive=ArchiveService(main_connection), - devices=DeviceService(main_connection), - # Only use updated legal hold client if initialized with API Client authorization - legalhold=LegalHoldApiClientService(main_connection) - if auth_flag - else LegalHoldService(main_connection), - orgs=OrgService(main_connection), - users=UserService(main_connection), - alertrules=AlertRulesService(alert_rules_conn, user_ctx, user_risk_profile_svc), - alerts=AlertService(alerts_conn, user_ctx), - fileevents=file_event_svc, - savedsearch=SavedSearchService(file_events_conn, file_event_svc), - preservationdata=PreservationDataService(pds_conn), - auditlogs=AuditLogsService(audit_logs_conn), - cases=CasesService(cases_conn), - casesfileevents=CasesFileEventsService(cases_conn), - trustedactivities=TrustedActivitiesService(trusted_activities_conn), - userriskprofile=user_risk_profile_svc, - watchlists=WatchlistsService(watchlists_conn), - ) - - return services, user_ctx - - -def _init_clients(services, connection): - # clients are imported within function to prevent circular imports when a client - # imports anything from py42.sdk.queries - from py42.clients import Clients - from py42.clients._archiveaccess.accessorfactory import ArchiveAccessorFactory - from py42.clients.alertrules import AlertRulesClient - from py42.clients.alerts import AlertsClient - from py42.clients.archive import ArchiveClient - from py42.clients.auditlogs import AuditLogsClient - from py42.clients.authority import AuthorityClient - from py42.clients.cases import CasesClient - from py42.clients.loginconfig import LoginConfigurationClient - from py42.clients.securitydata import SecurityDataClient - from py42.clients.trustedactivities import TrustedActivitiesClient - from py42.services.storage._service_factory import StorageServiceFactory - from py42.clients.userriskprofile import UserRiskProfileClient - from py42.clients.watchlists import WatchlistsClient - - authority = AuthorityClient( - administration=services.administration, - archive=services.archive, - devices=services.devices, - legalhold=services.legalhold, - orgs=services.orgs, - users=services.users, - ) - storage_service_factory = StorageServiceFactory(connection, services.devices) - alertrules = AlertRulesClient(services.alerts, services.alertrules) - securitydata = SecurityDataClient( - services.fileevents, - services.preservationdata, - services.savedsearch, - storage_service_factory, - ) - alerts = AlertsClient(services.alerts, alertrules) - archive_accessor_factory = ArchiveAccessorFactory( - services.archive, storage_service_factory - ) - archive = ArchiveClient(archive_accessor_factory, services.archive) - auditlogs = AuditLogsClient(services.auditlogs) - loginconfig = LoginConfigurationClient(connection) - trustedactivities = TrustedActivitiesClient(services.trustedactivities) - userriskprofile = UserRiskProfileClient(services.userriskprofile, services.users) - watchlists = WatchlistsClient(services.watchlists) - clients = Clients( - authority=authority, - alerts=alerts, - securitydata=securitydata, - archive=archive, - auditlogs=auditlogs, - cases=CasesClient(services.cases, services.casesfileevents), - loginconfig=loginconfig, - trustedactivities=trustedactivities, - userriskprofile=userriskprofile, - watchlists=watchlists, - ) - return clients diff --git a/src/py42/sdk/queries/__init__.py b/src/py42/sdk/queries/__init__.py deleted file mode 100644 index 8463d02ec..000000000 --- a/src/py42/sdk/queries/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -from py42 import settings -from py42.sdk.queries.query_filter import FilterGroup - - -class BaseQuery: - def __init__(self, *args, **kwargs): - self._filter_group_list = list(args) - self._group_clause = kwargs.get("group_clause", "AND") - self.page_number = kwargs.get("page_number") or 1 - self.page_size = kwargs.get("page_size") or settings.security_events_per_page - self.page_token = kwargs.get("page_token") or None - self.sort_direction = "asc" - - # Override - self.sort_key = None - - @classmethod - def from_dict(cls, _dict, group_clause="AND", **kwargs): - filter_groups = [FilterGroup.from_dict(item) for item in _dict["groups"]] - return cls(*filter_groups, group_clause=group_clause, **kwargs) - - @classmethod - def any(cls, *args): - return cls(*args, group_clause="OR") - - @classmethod - def all(cls, *args): - return cls(*args) diff --git a/src/py42/sdk/queries/alerts/alert_query.py b/src/py42/sdk/queries/alerts/alert_query.py deleted file mode 100644 index 567e5111f..000000000 --- a/src/py42/sdk/queries/alerts/alert_query.py +++ /dev/null @@ -1,51 +0,0 @@ -from py42.sdk.queries import BaseQuery - - -class AlertQuery(BaseQuery): - """Helper class for building Code42 Alert queries. - - An AlertQuery instance's ``all()`` and ``any()`` take one or more - :class:`~py42.sdk.queries.query_filter.FilterGroup` objects to construct a query that - can be passed to the :meth:`AlertService.search()` method. ``all()`` returns results - that match all of the provided filter criteria, ``any()`` will return results that - match any of the filters. - - For convenience, the :class:`AlertQuery` constructor does the same as ``all()``. - - Usage example:: - - state_filter = AlertState.eq(AlertState.OPEN) - rule_name_filter = RuleName.contains("EmailRule") - query = AlertQuery.all(state_filter, rule_name_filter) - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.sort_key = "CreatedAt" - self.page_number = 0 - self.page_size = 500 - self.sort_direction = "desc" - - def __str__(self): - groups_string = ",".join( - str(group_item) for group_item in self._filter_group_list - ) - json = ( - f'{{"tenantId": null, "groupClause":"{self._group_clause}", "groups":[{groups_string}], "pgNum":{self.page_number}, ' - f'"pgSize":{self.page_size}, "srtDirection":"{self.sort_direction}", "srtKey":"{self.sort_key}"}}' - ) - return json - - def __iter__(self): - filter_group_list = [dict(item) for item in self._filter_group_list] - output_dict = { - "tenantId": None, - "groupClause": self._group_clause, - "groups": filter_group_list, - "pgNum": self.page_number, - "pgSize": self.page_size, - "srtDirection": self.sort_direction, - "srtKey": self.sort_key, - } - for key in output_dict: - yield key, output_dict[key] diff --git a/src/py42/sdk/queries/alerts/filters/__init__.py b/src/py42/sdk/queries/alerts/filters/__init__.py deleted file mode 100644 index 6c29593d5..000000000 --- a/src/py42/sdk/queries/alerts/filters/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from py42.sdk.queries.alerts.filters.alert_filter import * diff --git a/src/py42/sdk/queries/alerts/filters/alert_filter.py b/src/py42/sdk/queries/alerts/filters/alert_filter.py deleted file mode 100644 index 2e3961692..000000000 --- a/src/py42/sdk/queries/alerts/filters/alert_filter.py +++ /dev/null @@ -1,110 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.alerts.util import ( - AlertQueryFilterStringField as _AlertQueryFilterStringField, -) -from py42.sdk.queries.alerts.util import ( - AlertQueryFilterTimestampField as _AlertQueryFilterTimestampField, -) -from py42.sdk.queries.query_filter import ( - QueryFilterStringField as _QueryFilterStringField, -) - - -class DateObserved(_AlertQueryFilterTimestampField): - """Class that filters alerts based on the timestamp the alert was triggered.""" - - _term = "createdAt" - - -class Actor(_AlertQueryFilterStringField): - """Class that filters alerts based on the username that originated the event(s) that - triggered the alert.""" - - _term = "actor" - - -class RuleName(_AlertQueryFilterStringField): - """Class that filters alerts based on rule name.""" - - _term = "name" - - -class RuleId(_QueryFilterStringField): - """Class that filters alerts based on rule identifier.""" - - _term = "ruleId" - - -class RuleSource(_QueryFilterStringField, _Choices): - """Class that filters alerts based on rule source. - - Available options are: - - :attr:`RuleSource.ALERTING` - - :attr:`RuleSource.DEPARTING_EMPLOYEE` - - :attr:`RuleSource.HIGH_RISK_EMPLOYEE` - """ - - _term = "ruleSource" - - ALERTING = "Alerting" - DEPARTING_EMPLOYEE = "Departing Employee" - HIGH_RISK_EMPLOYEE = "High Risk Employee" - - -class RuleType(_QueryFilterStringField, _Choices): - """Class that filters alerts based on rule type. - - Available options are: - - :attr:`RuleType.ENDPOINT_EXFILTRATION` - - :attr:`RuleType.CLOUD_SHARE_PERMISSIONS` - - :attr:`RuleType.FILE_TYPE_MISMATCH` - """ - - _term = "type" - - ENDPOINT_EXFILTRATION = "FedEndpointExfiltration" - CLOUD_SHARE_PERMISSIONS = "FedCloudSharePermissions" - FILE_TYPE_MISMATCH = "FedFileTypeMismatch" - - -class Description(_AlertQueryFilterStringField): - """Class that filters alerts based on rule description text.""" - - _term = "description" - - -class Severity(_QueryFilterStringField, _Choices): - """Class that filters alerts based on severity. - - Available options are: - - :attr:`Severity.CRITICAL` - - :attr:`Severity.HIGH` - - :attr:`Severity.MODERATE` - - :attr:`Severity.LOW` - """ - - _term = "riskSeverity" - - CRITICAL = "CRITICAL" - HIGH = "HIGH" - MEDIUM = "MODERATE" - MODERATE = "MODERATE" - LOW = "LOW" - - -class AlertState(_QueryFilterStringField, _Choices): - """Class that filters alerts based on alert state. - - Available options are: - - :attr:`AlertState.OPEN` - - :attr:`AlertState.DISMISSED` - - :attr:`AlertState.PENDING` - - :attr:`AlertState.IN_PROGRESS` - """ - - _term = "state" - - OPEN = "OPEN" - DISMISSED = "RESOLVED" - PENDING = "PENDING" - IN_PROGRESS = "IN_PROGRESS" diff --git a/src/py42/sdk/queries/alerts/util.py b/src/py42/sdk/queries/alerts/util.py deleted file mode 100644 index 5ed14066e..000000000 --- a/src/py42/sdk/queries/alerts/util.py +++ /dev/null @@ -1,87 +0,0 @@ -from py42.sdk.queries.query_filter import create_filter_group -from py42.sdk.queries.query_filter import create_query_filter -from py42.sdk.queries.query_filter import QueryFilterStringField -from py42.sdk.queries.query_filter import QueryFilterTimestampField -from py42.util import MICROSECOND_FORMAT -from py42.util import parse_timestamp_to_microseconds_precision - - -def create_contains_filter_group(term, value): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` contains the given value. Useful for creating ``CONTAINS`` - filters that are not yet supported in py42 or programmatically crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``actor``. - value (str): The value used to match on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "CONTAINS", value)] - return create_filter_group(filter_list, "AND") - - -def create_not_contains_filter_group(term, value): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` does not contain the given value. Useful for creating - ``DOES_NOT_CONTAIN`` filters that are not yet supported in py42 or programmatically - crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``actor``. - value (str): The value used to exclude on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "DOES_NOT_CONTAIN", value)] - return create_filter_group(filter_list, "AND") - - -class AlertQueryFilterStringField(QueryFilterStringField): - @classmethod - def contains(cls, value): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering - results where the value with key ``self._term`` contains the given value. Useful - for creating ``CONTAINS`` filters that are not yet supported in py42 or programmatically - crafting filter groups. - - Args: - value (str): The value used to match on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - return create_contains_filter_group(cls._term, value) - - @classmethod - def not_contains(cls, value): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering - results where the value with key ``self._term`` does not contain the given value. - Useful for creating ``DOES_NOT_CONTAIN`` filters that are not yet supported in py42 - or programmatically crafting filter groups. - - Args: - value (str): The value used to exclude on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - return create_not_contains_filter_group(cls._term, value) - - -class AlertQueryFilterTimestampField(QueryFilterTimestampField): - """Helper class for creating alert filters where the search value is a timestamp.""" - - @staticmethod - def _parse_timestamp(value): - return parse_timestamp_to_microseconds_precision(value) - - @staticmethod - def _convert_datetime_to_timestamp(value): - return value.strftime(MICROSECOND_FORMAT) diff --git a/src/py42/sdk/queries/fileevents/__init__.py b/src/py42/sdk/queries/fileevents/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/py42/sdk/queries/fileevents/file_event_query.py b/src/py42/sdk/queries/fileevents/file_event_query.py deleted file mode 100644 index b4465c5f0..000000000 --- a/src/py42/sdk/queries/fileevents/file_event_query.py +++ /dev/null @@ -1,71 +0,0 @@ -from warnings import warn - -from py42.sdk.queries import BaseQuery -from py42.sdk.queries.fileevents.util import FileEventFilterComparableField -from py42.sdk.queries.fileevents.util import FileEventFilterStringField -from py42.sdk.queries.fileevents.util import FileEventFilterTimestampField - -# import from util for backwards-compatibility - - -class FileEventQuery(BaseQuery): - """Helper class for building V1 Code42 Forensic Search queries. - - A FileEventQuery instance's ``all()`` and ``any()`` take one or more - :class:`~py42.sdk.queries.query_filter.FilterGroup` objects to construct a query that - can be passed to the :meth:`FileEventService.search()` method. ``all()`` returns results - that match all of the provided filter criteria, ``any()`` will return results that - match any of the filters. - - For convenience, the :class:`FileEventQuery` constructor does the same as ``all()``. - - Usage example:: - - email_filter = EmailSender.is_in(["test.user@example.com", "test.sender@example.com"]) - exposure_filter = ExposureType.exists() - query = FileEventQuery.all(email_filter, exposure_filter) - - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - warn( - "V1 file events and saved searches are deprecated. Use `from py42.sdk.queries.fileevents.v2 import *` to build V2 queries instead..", - DeprecationWarning, - stacklevel=2, - ) - - self.sort_key = "eventId" - - @property - def version(self): - return "v1" - - def __str__(self): - groups_string = ",".join( - str(group_item) for group_item in self._filter_group_list - ) - if self.page_token is not None: - paging_prop = f'"srtDir":"{self.sort_direction}", "srtKey":"{self.sort_key}", "pgToken":"{self.page_token}"' - else: - paging_prop = f'"srtDir":"{self.sort_direction}", "srtKey":"{self.sort_key}", "pgNum":{self.page_number}' - json = f'{{"groupClause":"{self._group_clause}", "groups":[{groups_string}], {paging_prop}, "pgSize":{self.page_size}}}' - return json - - def __iter__(self): - filter_group_list = [dict(item) for item in self._filter_group_list] - output_dict = { - "groupClause": self._group_clause, - "groups": filter_group_list, - "pgSize": self.page_size, - "srtDir": self.sort_direction, - "srtKey": self.sort_key, - } - - if self.page_token is not None: - output_dict["pgToken"] = self.page_token - else: - output_dict["pgNum"] = self.page_number - - for key in output_dict: - yield key, output_dict[key] diff --git a/src/py42/sdk/queries/fileevents/filters/__init__.py b/src/py42/sdk/queries/fileevents/filters/__init__.py deleted file mode 100644 index f7558c956..000000000 --- a/src/py42/sdk/queries/fileevents/filters/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from py42.sdk.queries.fileevents.filters.activity_filter import * -from py42.sdk.queries.fileevents.filters.cloud_filter import * -from py42.sdk.queries.fileevents.filters.device_filter import * -from py42.sdk.queries.fileevents.filters.email_filter import * -from py42.sdk.queries.fileevents.filters.event_filter import * -from py42.sdk.queries.fileevents.filters.exposure_filter import * -from py42.sdk.queries.fileevents.filters.file_filter import * -from py42.sdk.queries.fileevents.filters.print_filter import * -from py42.sdk.queries.fileevents.filters.risk_filter import * diff --git a/src/py42/sdk/queries/fileevents/filters/activity_filter.py b/src/py42/sdk/queries/fileevents/filters/activity_filter.py deleted file mode 100644 index 1dca0cd0b..000000000 --- a/src/py42/sdk/queries/fileevents/filters/activity_filter.py +++ /dev/null @@ -1,16 +0,0 @@ -from py42.sdk.queries.query_filter import ( - QueryFilterBooleanField as _QueryFilterBooleanField, -) - - -class TrustedActivity(_QueryFilterBooleanField): - """V1 filter class that filters events based on whether activity can be trusted.""" - - _term = "trusted" - - -class RemoteActivity(_QueryFilterBooleanField): - """V1 filter class that filters events based on whether the activity was remote - (took place outside of corporate IP range).""" - - _term = "remoteActivity" diff --git a/src/py42/sdk/queries/fileevents/filters/cloud_filter.py b/src/py42/sdk/queries/fileevents/filters/cloud_filter.py deleted file mode 100644 index 979b2b0c4..000000000 --- a/src/py42/sdk/queries/fileevents/filters/cloud_filter.py +++ /dev/null @@ -1,54 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.query_filter import QueryFilterBooleanField - - -class Actor(_FileEventFilterStringField): - """V1 filter class that filters events by the cloud service username of the event originator - (applies to cloud data source events only). - """ - - _term = "actor" - - -class DirectoryID(_FileEventFilterStringField): - """V1 filter class that filters events by unique identifier of the cloud drive or folder where the event - occurred (applies to cloud data source events only). - """ - - _term = "directoryId" - - -class Shared(QueryFilterBooleanField): - """V1 filter class that filters events by the shared status of the file at the time the event occurred - (applies to cloud data source events only). - """ - - _term = "shared" - - -class SharedWith(_FileEventFilterStringField): - """V1 filter class that filters events by the list of users who had been granted access to the file at the - time of the event (applies to cloud data source events only). - """ - - _term = "sharedWith" - - -class SharingTypeAdded(_FileEventFilterStringField, _Choices): - """V1 filter class that filters results to include events where a file's sharing permissions were - changed to a value that increases exposure (applies to cloud data source events only). - - Available options provided as class attributes: - - :attr:`SharingTypeAdded.SHARED_VIA_LINK` - - :attr:`SharingTypeAdded.IS_PUBLIC` - - :attr:`SharingTypeAdded.OUTSIDE_TRUSTED_DOMAIN` - """ - - _term = "sharingTypeAdded" - - SHARED_VIA_LINK = "SharedViaLink" - IS_PUBLIC = "IsPublic" - OUTSIDE_TRUSTED_DOMAIN = "OutsideTrustedDomains" diff --git a/src/py42/sdk/queries/fileevents/filters/device_filter.py b/src/py42/sdk/queries/fileevents/filters/device_filter.py deleted file mode 100644 index 524e4c9ba..000000000 --- a/src/py42/sdk/queries/fileevents/filters/device_filter.py +++ /dev/null @@ -1,31 +0,0 @@ -from py42.sdk.queries.fileevents.util import FileEventFilterStringField - - -class DeviceUsername(FileEventFilterStringField): - """V1 filter class that filters events by the Code42 username of the device that observed the event.""" - - _term = "deviceUserName" - - -class OSHostname(FileEventFilterStringField): - """V1 filter class that filters events by hostname of the device that observed the event.""" - - _term = "osHostName" - - -class PrivateIPAddress(FileEventFilterStringField): - """V1 filter class that filters events by private (LAN) IP address of the device that observed the event.""" - - _term = "privateIpAddresses" - - -class PublicIPAddress(FileEventFilterStringField): - """V1 filter class that filters events by public (WAN) IP address of the device that observed the event.""" - - _term = "publicIpAddress" - - -class DeviceSignedInUserName(FileEventFilterStringField): - """V1 filter class that filters events by signed in user of the device that observed the event.""" - - _term = "operatingSystemUser" diff --git a/src/py42/sdk/queries/fileevents/filters/email_filter.py b/src/py42/sdk/queries/fileevents/filters/email_filter.py deleted file mode 100644 index d448d2ba6..000000000 --- a/src/py42/sdk/queries/fileevents/filters/email_filter.py +++ /dev/null @@ -1,37 +0,0 @@ -from py42.sdk.queries.query_filter import ( - QueryFilterStringField as _QueryFilterStringField, -) - - -class EmailPolicyName(_QueryFilterStringField): - """V1 filter class that filters events based on the email DLP policy that detected this file (applies to - emails sent via Microsoft Office 365 only). - """ - - _term = "emailDlpPolicyNames" - - -class EmailSubject(_QueryFilterStringField): - """V1 filter class that filters events based on the email's subject (applies to email events only).""" - - _term = "emailSubject" - - -class EmailRecipients(_QueryFilterStringField): - """V1 filter class that filters events based on the email's recipient list (applies to email events only).""" - - _term = "emailRecipients" - - -class EmailSender(_QueryFilterStringField): - """V1 filter class that filters events based on the email's sender (applies to email events only).""" - - _term = "emailSender" - - -class EmailFrom(_QueryFilterStringField): - """V1 filter class that filters events based on the display name of the email's sender, as it appears in - the \"From:\" field in the email (applies to email events only). - """ - - _term = "emailFrom" diff --git a/src/py42/sdk/queries/fileevents/filters/event_filter.py b/src/py42/sdk/queries/fileevents/filters/event_filter.py deleted file mode 100644 index b2e0cc90c..000000000 --- a/src/py42/sdk/queries/fileevents/filters/event_filter.py +++ /dev/null @@ -1,121 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterTimestampField as _FileEventFilterTimestampField, -) -from py42.sdk.queries.query_filter import ( - QueryFilterBooleanField as _QueryFilterBooleanField, -) - - -class EventTimestamp(_FileEventFilterTimestampField, _Choices): - """V1 filter class that filters events based on the timestamp of the event that occurred. - - Available event timestamp constants are provided as class attributes, These - constants should be used only with class method `within_the_last`: - - - :attr:`EventTimestamp.FIFTEEN_MINUTES` - - :attr:`EventTimestamp.ONE_HOUR` - - :attr:`EventTimestamp.THREE_HOURS` - - :attr:`EventTimestamp.TWELVE_HOURS` - - :attr:`EventTimestamp.ONE_DAY` - - :attr:`EventTimestamp.THREE_DAYS` - - :attr:`EventTimestamp.SEVEN_DAYS` - - :attr:`EventTimestamp.FOURTEEN_DAYS` - - :attr:`EventTimestamp.THIRTY_DAYS` - - Example:: - filter = EventTimestamp.within_the_last(EventTimestamp.SEVEN_DAYS) - """ - - _term = "eventTimestamp" - - FIFTEEN_MINUTES = "PT15M" - ONE_HOUR = "PT1H" - THREE_HOURS = "PT3H" - TWELVE_HOURS = "PT12H" - ONE_DAY = "P1D" - THREE_DAYS = "P3D" - SEVEN_DAYS = "P7D" - FOURTEEN_DAYS = "P14D" - THIRTY_DAYS = "P30D" - - -class EventType(_FileEventFilterStringField, _Choices): - """V1 filter class that filters file events based on event type. - - Available event types are provided as class attributes: - - - :attr:`EventType.CREATED` - - :attr:`EventType.DELETED` - - :attr:`EventType.EMAILED` - - :attr:`EventType.MODIFIED` - - :attr:`EventType.READ_BY_APP` - - :attr:`EventType.PRINTED` - - Example:: - - filter = EventType.isin([EventType.READ_BY_APP, EventType.EMAILED]) - - """ - - _term = "eventType" - - CREATED = "CREATED" - MODIFIED = "MODIFIED" - DELETED = "DELETED" - READ_BY_APP = "READ_BY_APP" - EMAILED = "EMAILED" - PRINTED = "PRINTED" - - -class InsertionTimestamp(_FileEventFilterTimestampField): - """V1 filter class that filters events based on the timestamp of when the event was actually added to the - event store (which can be after the event occurred on the device itself). - - `value` must be a POSIX timestamp. (see the :ref:`Dates ` section of the Basics - user guide for details on timestamp arguments in py42) - """ - - _term = "insertionTimestamp" - - -class Source(_FileEventFilterStringField, _Choices): - """V1 filter class that filters events by event source. - - Available source types are provided as class attributes: - - :attr:`Source.ENDPOINT` - - :attr:`Source.GOOGLE_DRIVE` - - :attr:`Source.ONE_DRIVE` - - :attr:`Source.BOX` - - :attr:`Source.GMAIL` - - :attr:`Source.OFFICE_365` - - Example:: - - filter = Source.is_in([Source.ENDPOINT, Source.BOX]) - - """ - - _term = "source" - - ENDPOINT = "Endpoint" - GOOGLE_DRIVE = "GoogleDrive" - ONE_DRIVE = "OneDrive" - BOX = "Box" - GMAIL = "Gmail" - OFFICE_365 = "Office365" - - -class MimeTypeMismatch(_QueryFilterBooleanField): - """V1 filter class that filters events by whether or not a file's mime type matches its extension type.""" - - _term = "mimeTypeMismatch" - - -class OutsideActiveHours(_QueryFilterBooleanField): - """V1 filter class that filters events by whether or not they occurred outside a user's typical working hours""" - - _term = "outsideActiveHours" diff --git a/src/py42/sdk/queries/fileevents/filters/exposure_filter.py b/src/py42/sdk/queries/fileevents/filters/exposure_filter.py deleted file mode 100644 index 3b18d9786..000000000 --- a/src/py42/sdk/queries/fileevents/filters/exposure_filter.py +++ /dev/null @@ -1,177 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class ExposureType(_FileEventFilterStringField, _Choices): - """V1 filter class that filters events based on exposure type. - - Available options are provided as class attributes: - - :attr:`ExposureType.SHARED_VIA_LINK` - - :attr:`ExposureType.SHARED_TO_DOMAIN` - - :attr:`ExposureType.APPLICATION_READ` - - :attr:`ExposureType.CLOUD_STORAGE` - - :attr:`ExposureType.REMOVABLE_MEDIA` - - :attr:`ExposureType.IS_PUBLIC` - """ - - _term = "exposure" - - SHARED_VIA_LINK = "SharedViaLink" - SHARED_TO_DOMAIN = "SharedToDomain" - APPLICATION_READ = "ApplicationRead" - CLOUD_STORAGE = "CloudStorage" - REMOVABLE_MEDIA = "RemovableMedia" - IS_PUBLIC = "IsPublic" - OUTSIDE_TRUSTED_DOMAINS = "OutsideTrustedDomains" - - -class ProcessName(_FileEventFilterStringField): - """V1 filter class that filters events based on the process name involved in the exposure (applies to - ``read by browser or other app`` events only). - """ - - _term = "processName" - - -class ProcessOwner(_FileEventFilterStringField): - """V1 filter class that filters events based on the process owner that was involved in the exposure - (applies to ``read by browser or other app`` events only). - """ - - _term = "processOwner" - - -class RemovableMediaName(_FileEventFilterStringField): - """V1 filter class that filters events based on the name of the removable media involved in the exposure - (applies to ``removable media`` events only). - """ - - _term = "removableMediaName" - - -class RemovableMediaVendor(_FileEventFilterStringField): - """V1 filter class that filters events based on the vendor of the removable media device involved in the - exposure (applies to ``removable media`` events only). - """ - - _term = "removableMediaVendor" - - -class RemovableMediaMediaName(_FileEventFilterStringField): - """V1 filter class that filters events based on the name of the removable media (as reported by the - vendor/device, usually very similar to RemovableMediaName) involved in the exposure (applies to - ``removable media`` events only). - """ - - _term = "removableMediaMediaName" - - -class RemovableMediaVolumeName(_FileEventFilterStringField): - """V1 filter class that filters events based on the name of the formatted volume (as reported by the - operating system) of the removable media device involved in the exposure (applies to - ``removable media`` events only). - """ - - _term = "removableMediaVolumeName" - - -class RemovableMediaPartitionID(_FileEventFilterStringField): - """V1 filter class that filters events based on the unique identifier assigned (by the operating system) - to the removable media involved in the exposure (applies to ``removable media`` events only). - """ - - _term = "removableMediaPartitionId" - - -class RemovableMediaSerialNumber(_FileEventFilterStringField): - """V1 filter class that filters events based on the serial number of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "removableMediaSerialNumber" - - -class SyncDestination(_FileEventFilterStringField, _Choices): - """V1 filter class that filters events based on the name of the cloud service the file is synced with - (applies to ``synced to cloud service`` events only). - - Available options are provided as class attributes: - - :attr:`SyncDestination.ICLOUD` - - :attr:`SyncDestination.BOX` - - :attr:`SyncDestination.BOX_DRIVE` - - :attr:`SyncDestination.GOOGLE_DRIVE` - - :attr:`SyncDestination.GOOGLE_BACKUP_AND_SYNC` - - :attr:`SyncDestination.DROPBOX` - - :attr:`SyncDestination.ONEDRIVE` - """ - - _term = "syncDestination" - - ICLOUD = "ICloud" - BOX = "Box" - BOX_DRIVE = "BoxDrive" - GOOGLE_DRIVE = "GoogleDrive" - GOOGLE_BACKUP_AND_SYNC = "GoogleBackupAndSync" - DROPBOX = "Dropbox" - ONEDRIVE = "OneDrive" - - -class SyncDestinationUsername(_FileEventFilterStringField): - """V1 filter class that filters events based on the username associated with the cloud service - the file is synced with (applies to ``synced to cloud service`` events only). - """ - - _term = "syncDestinationUsername" - - -class TabURL(_FileEventFilterStringField): - """V1 filter class that filters events based on all the URLs of the browser tabs at the time the file - contents were read by the browser (applies to ``read by browser or other app`` events only). - """ - - _term = "tabUrls" - - -class WindowTitle(_FileEventFilterStringField): - """V1 filter class that filters events based on the name of all the browser tabs or application windows that were - open when a browser or other app event occurred (applies to ``read by browser or other app`` - events only). - """ - - _term = "tabTitles" - - -class DestinationCategory(_FileEventFilterStringField, _Choices): - """V1 filter class that filters events based on the category of the file event destination. - - Available options are provided as class attributes: - - :attr:`DestinationCategory.CLOUD_STORAGE` - - :attr:`DestinationCategory.DEVICE` - - :attr:`DestinationCategory.EMAIL` - - :attr:`DestinationCategory.MESSAGING` - - :attr:`DestinationCategory.MULTIPLE_POSSIBILITIES` - - :attr:`DestinationCategory.SOCIAL_MEDIA` - - :attr:`DestinationCategory.SOURCE_CODE_REPOSITORY` - - :attr:`DestinationCategory.UNCATEGORIZED` - - :attr:`DestinationCategory.UNKNOWN` - """ - - _term = "destinationCategory" - - CLOUD_STORAGE = "Cloud Storage" - DEVICE = "Device" - EMAIL = "Email" - MESSAGING = "Messaging" - MULTIPLE_POSSIBILITIES = "Multiple Possibilities" - SOCIAL_MEDIA = "Social Media" - SOURCE_CODE_REPOSITORY = "Source Code Repository" - UNCATEGORIZED = "Uncategorized" - UNKNOWN = "Unknown" - - -class DestinationName(_FileEventFilterStringField): - """V1 filter class that filters events based on the name of the file event destination.""" - - _term = "destinationName" diff --git a/src/py42/sdk/queries/fileevents/filters/file_filter.py b/src/py42/sdk/queries/fileevents/filters/file_filter.py deleted file mode 100644 index 59ae2c807..000000000 --- a/src/py42/sdk/queries/fileevents/filters/file_filter.py +++ /dev/null @@ -1,81 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterComparableField as _FileEventFilterComparableField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class FileCategory(_FileEventFilterStringField, _Choices): - """V1 filter class that filters events by category of the file observed. - - Available file categories are provided as class attributes: - - :attr:`FileCategory.AUDIO` - - :attr:`FileCategory.DOCUMENT` - - :attr:`FileCategory.EXECUTABLE` - - :attr:`FileCategory.IMAGE` - - :attr:`FileCategory.PDF` - - :attr:`FileCategory.PRESENTATION` - - :attr:`FileCategory.SCRIPT` - - :attr:`FileCategory.SOURCE_CODE` - - :attr:`FileCategory.SPREADSHEET` - - :attr:`FileCategory.VIDEO` - - :attr:`FileCategory.VIRTUAL_DISK_IMAGE` - - :attr:`FileCategory.ZIP` - - """ - - _term = "fileCategory" - - AUDIO = "Audio" - DOCUMENT = "Document" - EXECUTABLE = "Executable" - IMAGE = "Image" - PDF = "Pdf" - PRESENTATION = "Presentation" - SCRIPT = "Script" - SOURCE_CODE = "SourceCode" - SPREADSHEET = "Spreadsheet" - VIDEO = "Video" - VIRTUAL_DISK_IMAGE = "VirtualDiskImage" - ZIP = "Archive" - - -class FileName(_FileEventFilterStringField): - """V1 filter class that filters events by the name of the file observed.""" - - _term = "fileName" - - -class FileOwner(_FileEventFilterStringField): - """V1 filter class that filters events by the owner of the file observed.""" - - _term = "fileOwner" - - -class FilePath(_FileEventFilterStringField): - """V1 filter class that filters events by path of the file observed.""" - - _term = "filePath" - - -class FileSize(_FileEventFilterComparableField): - """V1 filter class that filters events by size of the file observed. - - Size ``value`` must be bytes. - """ - - _term = "fileSize" - - -class MD5(_FileEventFilterStringField): - """V1 filter class that filters events by the MD5 hash of the file observed.""" - - _term = "md5Checksum" - - -class SHA256(_FileEventFilterStringField): - """V1 filter class that filters events by SHA256 hash of the file observed.""" - - _term = "sha256Checksum" diff --git a/src/py42/sdk/queries/fileevents/filters/print_filter.py b/src/py42/sdk/queries/fileevents/filters/print_filter.py deleted file mode 100644 index f1f33f883..000000000 --- a/src/py42/sdk/queries/fileevents/filters/print_filter.py +++ /dev/null @@ -1,15 +0,0 @@ -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class Printer(_FileEventFilterStringField): - """V1 filter class that filters events by printer name.""" - - _term = "printerName" - - -class PrintJobName(_FileEventFilterStringField): - """V1 filter class that filters events by print job name.""" - - _term = "printJobName" diff --git a/src/py42/sdk/queries/fileevents/filters/risk_filter.py b/src/py42/sdk/queries/fileevents/filters/risk_filter.py deleted file mode 100644 index edc6c2eb9..000000000 --- a/src/py42/sdk/queries/fileevents/filters/risk_filter.py +++ /dev/null @@ -1,206 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterComparableField as _FileEventFilterComparableField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.query_filter import ( - QueryFilterStringField as _QueryFilterStringField, -) - - -class RiskIndicator(_FileEventFilterStringField): - """V1 filter class that filters events by risk indicator. - - Available options are provided as class attributes: - - :attr:`RiskIndicator.CloudDataExposures.PUBLIC_CORPORATE_BOX` - - :attr:`RiskIndicator.CloudDataExposures.PUBLIC_CORPORATE_GOOGLE_DRIVE` - - :attr:`RiskIndicator.CloudDataExposures.PUBLIC_CORPORATE_ONEDRIVE` - - :attr:`RiskIndicator.CloudDataExposures.SENT_CORPORATE_GMAIL` - - :attr:`RiskIndicator.CloudDataExposures.SHARED_CORPORATE_BOX` - - :attr:`RiskIndicator.CloudDataExposures.SHARED_CORPORATE_GOOGLE_DRIVE` - - :attr:`RiskIndicator.CloudDataExposures.SHARED_CORPORATE_ONEDRIVE` - - :attr:`RiskIndicator.CloudStorageUploads.AMAZON_DRIVE` - - :attr:`RiskIndicator.CloudStorageUploads.BOX` - - :attr:`RiskIndicator.CloudStorageUploads.DROPBOX` - - :attr:`RiskIndicator.CloudStorageUploads.GOOGLE_DRIVE` - - :attr:`RiskIndicator.CloudStorageUploads.ICLOUD` - - :attr:`RiskIndicator.CloudStorageUploads.MEGA` - - :attr:`RiskIndicator.CloudStorageUploads.ONEDRIVE` - - :attr:`RiskIndicator.CloudStorageUploads.ZOHO` - - :attr:`RiskIndicator.CodeRepositoryUploads.BITBUCKET` - - :attr:`RiskIndicator.CodeRepositoryUploads.GITHUB` - - :attr:`RiskIndicator.CodeRepositoryUploads.GITLAB` - - :attr:`RiskIndicator.CodeRepositoryUploads.SOURCEFORGE` - - :attr:`RiskIndicator.CodeRepositoryUploads.STASH` - - :attr:`RiskIndicator.EmailServiceUploads.ONESIXTHREE_DOT_COM` - - :attr:`RiskIndicator.EmailServiceUploads.ONETWOSIX_DOT_COM` - - :attr:`RiskIndicator.EmailServiceUploads.AOL` - - :attr:`RiskIndicator.EmailServiceUploads.COMCAST` - - :attr:`RiskIndicator.EmailServiceUploads.GMAIL` - - :attr:`RiskIndicator.EmailServiceUploads.ICLOUD` - - :attr:`RiskIndicator.EmailServiceUploads.MAIL_DOT_COM` - - :attr:`RiskIndicator.EmailServiceUploads.OUTLOOK` - - :attr:`RiskIndicator.EmailServiceUploads.PROTONMAIL` - - :attr:`RiskIndicator.EmailServiceUploads.QQMAIL` - - :attr:`RiskIndicator.EmailServiceUploads.SINA_MAIL` - - :attr:`RiskIndicator.EmailServiceUploads.SOHU_MAIL` - - :attr:`RiskIndicator.EmailServiceUploads.YAHOO` - - :attr:`RiskIndicator.EmailServiceUploads.ZOHO_MAIL` - - :attr:`RiskIndicator.ExternalDevices.AIRDROP` - - :attr:`RiskIndicator.ExternalDevices.REMOVABLE_MEDIA` - - :attr:`RiskIndicator.FileCategories.AUDIO` - - :attr:`RiskIndicator.FileCategories.DOCUMENT` - - :attr:`RiskIndicator.FileCategories.EXECUTABLE` - - :attr:`RiskIndicator.FileCategories.IMAGE` - - :attr:`RiskIndicator.FileCategories.PDF` - - :attr:`RiskIndicator.FileCategories.PRESENTATION` - - :attr:`RiskIndicator.FileCategories.SCRIPT` - - :attr:`RiskIndicator.FileCategories.SOURCE_CODE` - - :attr:`RiskIndicator.FileCategories.SPREADSHEET` - - :attr:`RiskIndicator.FileCategories.VIDEO` - - :attr:`RiskIndicator.FileCategories.VIRTUAL_DISK_IMAGE` - - :attr:`RiskIndicator.FileCategories.ZIP` - - :attr:`RiskIndicator.MessagingServiceUploads.FACEBOOK_MESSENGER` - - :attr:`RiskIndicator.MessagingServiceUploads.MICROSOFT_TEAMS` - - :attr:`RiskIndicator.MessagingServiceUploads.SLACK` - - :attr:`RiskIndicator.MessagingServiceUploads.WHATSAPP` - - :attr:`RiskIndicator.Other.OTHER` - - :attr:`RiskIndicator.Other.UNKNOWN` - - :attr:`RiskIndicator.SocialMediaUploads.FACEBOOK` - - :attr:`RiskIndicator.SocialMediaUploads.LINKEDIN` - - :attr:`RiskIndicator.SocialMediaUploads.REDDIT` - - :attr:`RiskIndicator.SocialMediaUploads.TWITTER` - - :attr:`RiskIndicator.UserBehavior.FILE_MISMATCH` - - :attr:`RiskIndicator.UserBehavior.OFF_HOURS` - - :attr:`RiskIndicator.UserBehavior.REMOTE` - - :attr:`RiskIndicator.UserBehavior.FIRST_DESTINATION_USE` - - :attr:`RiskIndicator.UserBehavior.RARE_DESTINATION_USE` - """ - - _term = "riskIndicatorNames" - - @staticmethod - def choices(): - return ( - RiskIndicator.CloudDataExposures.choices() - + RiskIndicator.CloudStorageUploads.choices() - + RiskIndicator.CodeRepositoryUploads.choices() - + RiskIndicator.EmailServiceUploads.choices() - + RiskIndicator.ExternalDevices.choices() - + RiskIndicator.FileCategories.choices() - + RiskIndicator.MessagingServiceUploads.choices() - + RiskIndicator.Other.choices() - + RiskIndicator.SocialMediaUploads.choices() - + RiskIndicator.UserBehavior.choices() - ) - - class CloudDataExposures(_Choices): - PUBLIC_CORPORATE_BOX = "Public link from corporate Box" - PUBLIC_CORPORATE_GOOGLE_DRIVE = "Public link from corporate Google Drive" - PUBLIC_CORPORATE_ONEDRIVE = "Public link from corporate OneDrive" - SENT_CORPORATE_GMAIL = "Sent from corporate Gmail" - SHARED_CORPORATE_BOX = "Shared from corporate Box" - SHARED_CORPORATE_GOOGLE_DRIVE = "Shared from corporate Google Drive" - SHARED_CORPORATE_ONEDRIVE = "Shared from corporate OneDrive" - - class CloudStorageUploads(_Choices): - AMAZON_DRIVE = "Amazon Drive upload" - BOX = "Box upload" - DROPBOX = "Dropbox upload" - GOOGLE_DRIVE = "Google Drive upload" - ICLOUD = "iCloud upload" - MEGA = "Mega upload" - ONEDRIVE = "OneDrive upload" - ZOHO = "Zoho WorkDrive upload" - - class CodeRepositoryUploads(_Choices): - BITBUCKET = "Bitbucket upload" - GITHUB = "GitHub upload" - GITLAB = "GitLab upload" - SOURCEFORGE = "SourceForge upload" - STASH = "Stash upload" - - class EmailServiceUploads(_Choices): - ONESIXTHREE_DOT_COM = "163.com upload" - ONETWOSIX_DOT_COM = "126.com upload" - AOL = "AOL upload" - COMCAST = "Comcast upload" - GMAIL = "Gmail upload" - ICLOUD = "iCloud Mail upload" - MAIL_DOT_COM = "Mail.com upload" - OUTLOOK = "Outlook upload" - PROTONMAIL = "ProtonMail upload" - QQMAIL = "QQMail upload" - SINA_MAIL = "Sina Mail upload" - SOHU_MAIL = "Sohu Mail upload" - YAHOO = "Yahoo upload" - ZOHO_MAIL = "Zoho Mail upload" - - class ExternalDevices(_Choices): - AIRDROP = "AirDrop" - REMOVABLE_MEDIA = "Removable media" - - class FileCategories(_Choices): - AUDIO = "Audio" - DOCUMENT = "Document" - EXECUTABLE = "Executable" - IMAGE = "Image" - PDF = "PDF" - PRESENTATION = "Presentation" - SCRIPT = "Script" - SOURCE_CODE = "Source code" - SPREADSHEET = "Spreadsheet" - VIDEO = "Video" - VIRTUAL_DISK_IMAGE = "Virtual Disk Image" - ZIP = "Zip" - - class MessagingServiceUploads(_Choices): - FACEBOOK_MESSENGER = "Facebook Messenger upload" - MICROSOFT_TEAMS = "Microsoft Teams upload" - SLACK = "Slack upload" - WHATSAPP = "WhatsApp upload" - - class Other(_Choices): - OTHER = "Other destination" - UNKNOWN = "Unknown destination" - - class SocialMediaUploads(_Choices): - FACEBOOK = "Facebook upload" - LINKEDIN = "LinkedIn upload" - REDDIT = "Reddit upload" - TWITTER = "Twitter upload" - - class UserBehavior(_Choices): - FILE_MISMATCH = "File mismatch" - OFF_HOURS = "Off hours" - REMOTE = "Remote" - FIRST_DESTINATION_USE = "First use of destination" - RARE_DESTINATION_USE = "Rare use of destination" - - -class RiskSeverity(_FileEventFilterStringField, _Choices): - """V1 filter class that filters events by risk severity. - - Available options are provided as class attributes: - - :attr:`RiskSeverity.LOW` - - :attr:`RiskSeverity.MODERATE` - - :attr:`RiskSeverity.HIGH` - - :attr:`RiskSeverity.CRITICAL` - - :attr:`RiskSeverity.NO_RISK_INDICATED` - """ - - _term = "riskSeverity" - - CRITICAL = "CRITICAL" - HIGH = "HIGH" - MODERATE = "MODERATE" - LOW = "LOW" - NO_RISK_INDICATED = "NO_RISK_INDICATED" - - -class RiskScore(_QueryFilterStringField, _FileEventFilterComparableField): - """V1 filter class that filters events by risk score.""" - - _term = "riskScore" diff --git a/src/py42/sdk/queries/fileevents/filters/source_filter.py b/src/py42/sdk/queries/fileevents/filters/source_filter.py deleted file mode 100644 index e0063bf08..000000000 --- a/src/py42/sdk/queries/fileevents/filters/source_filter.py +++ /dev/null @@ -1,53 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class SourceCategory(_FileEventFilterStringField, _Choices): - """ - V1 filter class that filters events based on source category. - - Available options are provided as class attributes: - - :attr:`SourceCategory.BUSINESS_TOOLS` - - :attr:`SourceCategory.CLOUD_STORAGE` - - :attr:`SourceCategory.DEVICE` - - :attr:`SourceCategory.EMAIL` - - :attr:`SourceCategory.MESSAGING` - - :attr:`SourceCategory.MULTIPLE_POSSIBILITIES` - - :attr:`SourceCategory.SOCIAL_MEDIA` - - :attr:`SourceCategory.SOURCE_CODE_REPOSITORY` - - :attr:`SourceCategory.UNCATEGORIZED` - - :attr:`SourceCategory.UNKNOWN` - """ - - _term = "sourceCategory" - - BUSINESS_TOOLS = "Business Tools" - CLOUD_STORAGE = "Cloud Storage" - DEVICE = "Device" - EMAIL = "Email" - MESSAGING = "Messaging" - MULTIPLE_POSSIBILITIES = "Multiple Possibilities" - SOCIAL_MEDIA = "Social Media" - SOURCE_CODE_REPOSITORY = "Source Code Repository" - UNCATEGORIZED = "Uncategorized" - UNKNOWN = "Unknown" - - -class SourceName(_FileEventFilterStringField): - """V1 filter class that filters events based on source name.""" - - _term = "sourceName" - - -class SourceTabTitles(_FileEventFilterStringField): - """V1 filter class that filters events based on source tab titles (for 'browser or other app' events).""" - - _term = "sourceTabTitles" - - -class SourceTabUrls(_FileEventFilterStringField): - """V1 filter class that filters events based on source tab URLs (for 'browser or other app' events).""" - - _term = "sourceTabUrls" diff --git a/src/py42/sdk/queries/fileevents/util.py b/src/py42/sdk/queries/fileevents/util.py deleted file mode 100644 index e13d5b07b..000000000 --- a/src/py42/sdk/queries/fileevents/util.py +++ /dev/null @@ -1,144 +0,0 @@ -from py42.sdk.queries.query_filter import create_filter_group -from py42.sdk.queries.query_filter import create_query_filter -from py42.sdk.queries.query_filter import create_within_the_last_filter_group -from py42.sdk.queries.query_filter import QueryFilterStringField -from py42.sdk.queries.query_filter import QueryFilterTimestampField - - -def create_exists_filter_group(term): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` to find events where - filter data exists. Useful for creating ``EXISTS`` filters that are not yet supported - in py42 or programmatically crafting filter groups. - - Args: - term (str): The term of the filter. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - filter_list = [create_query_filter(term, "EXISTS")] - return create_filter_group(filter_list, "AND") - - -def create_not_exists_filter_group(term): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` to find events where - filter data does not exist. Useful for creating ``DOES_NOT_EXIST`` filters that are - not yet supported in py42 or programmatically crafting filter groups. - - Args: - term (str): The term of the filter. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - filter_list = [create_query_filter(term, "DOES_NOT_EXIST")] - return create_filter_group(filter_list, "AND") - - -def create_greater_than_filter_group(term, value): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for matching file - events where the value with key ``term`` is greater than the given value. Useful for - creating ``GREATER_THAN`` filters that are not yet supported in py42 or programmatically - crafting filter groups. - - Args: - term (str): The term of the filter. - value (str or int): The value used to filter file events. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - filter_list = [create_query_filter(term, "GREATER_THAN", value)] - return create_filter_group(filter_list, "AND") - - -def create_less_than_filter_group(term, value): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for matching file - events where the value with key ``term`` is less than the given value. Useful for creating - ``LESS_THAN`` filters that are not yet supported in py42 or programmatically crafting - filter groups. - - Args: - term (str): The term of the filter. - value (str or int): The value used to filter file events. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - filter_list = [create_query_filter(term, "LESS_THAN", value)] - return create_filter_group(filter_list, "AND") - - -class FileEventFilterStringField(QueryFilterStringField): - """Helper class for creating filters with the ``EXISTS``/``NOT_EXISTS`` filter clauses.""" - - @classmethod - def exists(cls): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` to find events - where filter data exists. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_exists_filter_group(cls._term) - - @classmethod - def not_exists(cls): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` to find events - where filter data does not exist. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_not_exists_filter_group(cls._term) - - -class FileEventFilterComparableField: - """Helper class for creating filters with the ``GREATER_THAN``/``LESS_THAN`` filter clauses.""" - - _term = "override_boolean_field_name" - - @classmethod - def greater_than(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` to find events - where filter data is greater than the provided value. - - Args: - value (str or int or float): The value used to filter file events. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - value = int(value) - return create_greater_than_filter_group(cls._term, value) - - @classmethod - def less_than(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` to find events - where filter data is less than than the provided value. - - Args: - value (str or int or float): The value used to filter file events. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - value = int(value) - return create_less_than_filter_group(cls._term, value) - - -class FileEventFilterTimestampField(QueryFilterTimestampField): - @classmethod - def within_the_last(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the key ``self._term`` is a timestamp-related term, - such as ``EventTimestamp._term``, and ``value`` is one of it's accepted values, - such as one of the values in ``EventTimestamp.choices()``. - - Args: - value (str): The value used to filter file events. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_within_the_last_filter_group(cls._term, value) diff --git a/src/py42/sdk/queries/fileevents/v1/__init__.py b/src/py42/sdk/queries/fileevents/v1/__init__.py deleted file mode 100644 index 466f6f150..000000000 --- a/src/py42/sdk/queries/fileevents/v1/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from py42.sdk.queries.fileevents.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.filters import * diff --git a/src/py42/sdk/queries/fileevents/v2/__init__.py b/src/py42/sdk/queries/fileevents/v2/__init__.py deleted file mode 100644 index bde69d2b8..000000000 --- a/src/py42/sdk/queries/fileevents/v2/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from py42.sdk.queries.fileevents.v2.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.v2.filters import * diff --git a/src/py42/sdk/queries/fileevents/v2/file_event_query.py b/src/py42/sdk/queries/fileevents/v2/file_event_query.py deleted file mode 100644 index ff550437c..000000000 --- a/src/py42/sdk/queries/fileevents/v2/file_event_query.py +++ /dev/null @@ -1,58 +0,0 @@ -from py42.sdk.queries import BaseQuery - - -class FileEventQuery(BaseQuery): - """Helper class for building V2 Code42 Forensic Search queries. - - A FileEventQuery instance's ``all()`` and ``any()`` take one or more - :class:`~py42.sdk.queries.query_filter.FilterGroup` objects to construct a query that - can be passed to the :meth:`FileEventService.search()` method. ``all()`` returns results - that match all of the provided filter criteria, ``any()`` will return results that - match any of the filters. - - For convenience, the :class:`FileEventQuery` constructor does the same as ``all()``. - - Usage example:: - - email_filter = EmailSender.is_in(["test.user@example.com", "test.sender@example.com"]) - exposure_filter = ExposureType.exists() - query = FileEventQuery.all(email_filter, exposure_filter) - - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.sort_key = "event.id" - - @property - def version(self): - return "v2" - - def __str__(self): - groups_string = ",".join( - str(group_item) for group_item in self._filter_group_list - ) - if self.page_token is not None: - paging_prop = f'"srtDir":"{self.sort_direction}", "srtKey":"{self.sort_key}", "pgToken":"{self.page_token}"' - else: - paging_prop = f'"srtDir":"{self.sort_direction}", "srtKey":"{self.sort_key}", "pgNum":{self.page_number}' - json = f'{{"groupClause":"{self._group_clause}", "groups":[{groups_string}], {paging_prop}, "pgSize":{self.page_size}}}' - return json - - def __iter__(self): - filter_group_list = [dict(item) for item in self._filter_group_list] - output_dict = { - "groupClause": self._group_clause, - "groups": filter_group_list, - "pgSize": self.page_size, - "srtDir": self.sort_direction, - "srtKey": self.sort_key, - } - - if self.page_token is not None: - output_dict["pgToken"] = self.page_token - else: - output_dict["pgNum"] = self.page_number - - for key in output_dict: - yield key, output_dict[key] diff --git a/src/py42/sdk/queries/fileevents/v2/filters/__init__.py b/src/py42/sdk/queries/fileevents/v2/filters/__init__.py deleted file mode 100644 index 2555d162b..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from py42.sdk.queries.fileevents.v2.filters import destination -from py42.sdk.queries.fileevents.v2.filters import event -from py42.sdk.queries.fileevents.v2.filters import file -from py42.sdk.queries.fileevents.v2.filters import process -from py42.sdk.queries.fileevents.v2.filters import report -from py42.sdk.queries.fileevents.v2.filters import risk -from py42.sdk.queries.fileevents.v2.filters import source -from py42.sdk.queries.fileevents.v2.filters import timestamp -from py42.sdk.queries.fileevents.v2.filters import user diff --git a/src/py42/sdk/queries/fileevents/v2/filters/destination.py b/src/py42/sdk/queries/fileevents/v2/filters/destination.py deleted file mode 100644 index 0e27ebf91..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/destination.py +++ /dev/null @@ -1,246 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.query_filter import ( - QueryFilterStringField as _QueryFilterStringField, -) - - -class Name(_QueryFilterStringField): - """V2 filter class that filters events based on the destination name.""" - - _term = "destination.name" - - -class EmailSubject(_QueryFilterStringField): - """V2 filter class that filters events based on the email's subject (applies to email events only).""" - - _term = "destination.email.subject" - - -class EmailRecipients(_QueryFilterStringField): - """V2 filter class that filters events based on the email's recipient list (applies to email events only).""" - - _term = "destination.email.recipients" - - -class PrivateIpAddress(_FileEventFilterStringField): - """V2 filter class that filters events by private (LAN) IP address of the destination device.""" - - _term = "destination.privateIp" - - -class IpAddress(_FileEventFilterStringField): - """V2 filter class that filters events by public (WAN) IP address of the destination device.""" - - _term = "destination.ip" - - -class UserEmail(_FileEventFilterStringField): - """V2 filter class that filters events by the signed in user email of the destination device.""" - - _term = "destination.user.email" - - -class TabUrls(_FileEventFilterStringField): - """V2 filter class that filters events based on all the URLs of the browser tabs at the time the file - contents were read by the browser (applies to ``read by browser or other app`` events only). - """ - - _term = "destination.tabs.url" - - -class TabTitles(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of all the browser tabs or application windows that were - open when a browser or other app event occurred (applies to ``read by browser or other app`` - events only). - """ - - _term = "destination.tabs.title" - - -class Category(_FileEventFilterStringField, _Choices): - """V2 filter class that filters events based on the category of the file event destination. - - Available options are provided as class attributes: - - :attr:`destination.category.CLOUD_STORAGE` - - :attr:`destination.category.DEVICE` - - :attr:`destination.category.EMAIL` - - :attr:`destination.category.MESSAGING` - - :attr:`destination.category.MULTIPLE_POSSIBILITIES` - - :attr:`destination.category.SOCIAL_MEDIA` - - :attr:`destination.category.SOURCE_CODE_REPOSITORY` - - :attr:`destination.category.UNCATEGORIZED` - - :attr:`destination.category.UNKNOWN` - - :attr:`destination.category.BUSINESS_INTELLIGENCE_TOOLS` - - :attr:`destination.category.CIVIL_SERVICES` - - :attr:`destination.category.CLOUD_COMPUTING` - - :attr:`destination.category.CODING_TOOLS` - - :attr:`destination.category.CONTRACT_MANAGEMENT` - - :attr:`destination.category.CRM_TOOLS` - - :attr:`destination.category.DESIGN_TOOLS` - - :attr:`destination.category.E_COMMERCE` - - :attr:`destination.category.FILE_CONVERSION_TOOLS` - - :attr:`destination.category.FINANCIAL_SERVICES` - - :attr:`destination.category.HEALTHCARE_AND_INSURANCE` - - :attr:`destination.category.HR_TOOLS` - - :attr:`destination.category.IMAGE_HOSTING` - - :attr:`destination.category.IT_SERVICES` - - :attr:`destination.category.JOB_LISTINGS` - - :attr:`destination.category.LEARNING_PLATFORMS` - - :attr:`destination.category.MARKETING_TOOLS` - - :attr:`destination.category.PDF_MANAGER` - - :attr:`destination.category.PHOTO_PRINTING` - - :attr:`destination.category.PRODUCTIVITY_TOOLS` - - :attr:`destination.category.PROFESSIONAL_SERVICES` - - :attr:`destination.category.REAL_ESTATE` - - :attr:`destination.category.SALES_TOOLS` - - :attr:`destination.category.SEARCH_ENGINE` - - :attr:`destination.category.SHIPPING` - - :attr:`destination.category.SOFTWARE` - - :attr:`destination.category.TRAVEL` - - :attr:`destination.category.WEB_HOSTING` - """ - - _term = "destination.category" - - CLOUD_STORAGE = "Cloud Storage" - DEVICE = "Device" - EMAIL = "Email" - MESSAGING = "Messaging" - MULTIPLE_POSSIBILITIES = "Multiple Possibilities" - SOCIAL_MEDIA = "Social Media" - SOURCE_CODE_REPOSITORY = "Source Code Repository" - UNCATEGORIZED = "Uncategorized" - UNKNOWN = "Unknown" - BUSINESS_INTELLIGENCE_TOOLS = "Business Intelligence Tools" - CIVIL_SERVICES = "Civil Services" - CLOUD_COMPUTING = "Cloud Computing" - CODING_TOOLS = "Coding Tools" - CONTRACT_MANAGEMENT = "Contract Management" - CRM_TOOLS = "CRM Tools" - DESIGN_TOOLS = "Design Tools" - E_COMMERCE = "E-commerce" - FILE_CONVERSION_TOOLS = "File Conversion Tools" - FINANCIAL_SERVICES = "Financial Services" - HEALTHCARE_AND_INSURANCE = "Healthcare & Insurance" - HR_TOOLS = "HR Tools" - IMAGE_HOSTING = "Image Hosting" - IT_SERVICES = "IT Services" - JOB_LISTINGS = "Job Listings" - LEARNING_PLATFORMS = "Learning Platforms" - MARKETING_TOOLS = "Marketing Tools" - PDF_MANAGER = "PDF Manager" - PHOTO_PRINTING = "Photo Printing" - PRODUCTIVITY_TOOLS = "Productivity Tools" - PROFESSIONAL_SERVICES = "Professional Services" - REAL_ESTATE = "Real Estate" - SALES_TOOLS = "Sales Tools" - SEARCH_ENGINE = "Search Engine" - SHIPPING = "Shipping" - SOFTWARE = "Software" - TRAVEL = "Travel" - WEB_HOSTING = "Web Hosting" - - -class PrinterName(_FileEventFilterStringField): - """V2 filter class that filters events by printer name.""" - - _term = "destination.printerName" - - -class PrintJobName(_FileEventFilterStringField): - """V2 filter class that filters events by print job name.""" - - _term = "destination.printJobName" - - -class OperatingSystem(_FileEventFilterStringField): - """V2 filter class that filters events by the operating system of the destination device.""" - - _term = "destination.operatingSystem" - - -class PrintedFilesBackupPath(_FileEventFilterStringField): - """V2 filter class that filters events by the printed file backup path.""" - - _term = "destination.printedFilesBackupPath" - - -class TabTitleErrors(_FileEventFilterStringField): - """V2 filter class that filters events based on destination tab title errors (for 'browser or other app' events).""" - - _term = "destination.tabs.titleError" - - -class TabUrlErrors(_FileEventFilterStringField): - """V2 filter class that filters events based on destination tab URL Errors (for 'browser or other app' events).""" - - _term = "destination.tabs.urlError" - - -class RemovableMediaName(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of the removable media involved in the exposure - (applies to ``removable media`` events only). - """ - - _term = "destination.removableMedia.name" - - -class RemovableMediaVendor(_FileEventFilterStringField): - """V2 filter class that filters events based on the vendor of the removable media device involved in the - exposure (applies to ``removable media`` events only). - """ - - _term = "destination.removableMedia.vendor" - - -class RemovableMediaMediaName(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of the removable media (as reported by the - vendor/device, usually very similar to RemovableMediaName) involved in the exposure (applies to - ``removable media`` events only). - """ - - _term = "destination.removableMedia.mediaName" - - -class RemovableMediaVolumeName(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of the formatted volume (as reported by the - operating system) of the removable media device involved in the exposure (applies to - ``removable media`` events only). - """ - - _term = "destination.removableMedia.volumeName" - - -class RemovableMediaPartitionID(_FileEventFilterStringField): - """V2 filter class that filters events based on the unique identifier assigned (by the operating system) - to the removable media involved in the exposure (applies to ``removable media`` events only). - """ - - _term = "destination.removableMedia.partitionId" - - -class RemovableMediaSerialNumber(_FileEventFilterStringField): - """V2 filter class that filters events based on the serial number of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "destination.removableMedia.serialNumber" - - -class RemovableMediaCapacity(_FileEventFilterStringField): - """V2 filter class that filters events based on the capacity of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "destination.removableMedia.capacity" - - -class RemovableMediaBusType(_FileEventFilterStringField): - """V2 filter class that filters events based on the bus type of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "destination.removableMedia.busType" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/event.py b/src/py42/sdk/queries/fileevents/v2/filters/event.py deleted file mode 100644 index 8c6252a85..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/event.py +++ /dev/null @@ -1,84 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterTimestampField as _FileEventFilterTimestampField, -) - - -class Observer(_FileEventFilterStringField, _Choices): - """V2 filter class that filters events by event observer. - - Available event observer types are provided as class attributes: - - :attr:`event.Observer.ENDPOINT` - - :attr:`event.Observer.GOOGLE_DRIVE` - - :attr:`event.Observer.ONE_DRIVE` - - :attr:`event.Observer.BOX` - - :attr:`event.Observer.GMAIL` - - :attr:`event.Observer.OFFICE_365` - - Example:: - filter = Event.Observer.is_in([event.Observer.ENDPOINT, event.Observer.BOX]) - - """ - - _term = "event.observer" - - ENDPOINT = "Endpoint" - GOOGLE_DRIVE = "GoogleDrive" - ONE_DRIVE = "OneDrive" - BOX = "Box" - GMAIL = "Gmail" - OFFICE_365 = "Office365" - - -class Inserted(_FileEventFilterTimestampField): - """V2 filter class that filters events based on the timestamp of when the event was actually added to the - event store (which can be after the event occurred on the device itself). - - `value` must be a POSIX timestamp. (see the :ref:`Dates ` section of the Basics - user guide for details on timestamp arguments in py42) - """ - - _term = "event.inserted" - - -class Action(_FileEventFilterStringField, _Choices): - """V2 filter class that filters events based on event action.""" - - _term = "event.action" - - # Exposure Type in v1 - REMOVABLE_MEDIA_CREATED = "removable-media-created" - REMOVABLE_MEDIA_MODIFIED = "removable-media-modified" - REMOVABLE_MEDIA_DELETED = "removable-media-deleted" - SYNC_APP_CREATED = "sync-app-created" - SYNC_APP_MODIFIED = "sync-app-modified" - SYNC_APP_DELETED = "sync-app-deleted" - FILE_SHARED = "file-shared" - - # Event Type in v1 - FILE_CREATED = "file-created" - FILE_DELETED = "file-deleted" - FILE_DOWNLOADED = "file-downloaded" - FILE_EMAILED = "file-emailed" - FILE_MODIFIED = "file-modified" - FILE_PRINTED = "file-printed" - APPLICATION_READ = "application-read" - - -class Id(_FileEventFilterStringField): - """V2 filter class that filters events by event ID.""" - - _term = "event.id" - - -class ShareType(_FileEventFilterStringField): - """V2 filter class that filters events by share type.""" - - _term = "event.shareType" - - PUBLIC_LINK_SHARE = "Anyone with the link" - DOMAIN_SHARE = "Anyone in your organization" - DIRECT_USER_SHARE = "Shared with specific people" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/file.py b/src/py42/sdk/queries/fileevents/v2/filters/file.py deleted file mode 100644 index 01e2249dc..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/file.py +++ /dev/null @@ -1,147 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterTimestampField as _FileEventFilterTimestampField, -) - - -class Category(_FileEventFilterStringField, _Choices): - """V2 filter class that filters events by category of the observed file. - - Available file categories are provided as class attributes: - - :attr:`file.Category.AUDIO` - - :attr:`file.Category.DOCUMENT` - - :attr:`file.Category.EXECUTABLE` - - :attr:`file.Category.IMAGE` - - :attr:`file.Category.PDF` - - :attr:`file.Category.PRESENTATION` - - :attr:`file.Category.SCRIPT` - - :attr:`file.Category.SOURCE_CODE` - - :attr:`file.Category.SPREADSHEET` - - :attr:`file.Category.VIDEO` - - :attr:`file.Category.VIRTUAL_DISK_IMAGE` - - :attr:`file.Category.ZIP` - - """ - - AUDIO = "Audio" - DOCUMENT = "Document" - EXECUTABLE = "Executable" - IMAGE = "Image" - PDF = "Pdf" - PRESENTATION = "Presentation" - SCRIPT = "Script" - SOURCE_CODE = "SourceCode" - SPREADSHEET = "Spreadsheet" - VIDEO = "Video" - VIRTUAL_DISK_IMAGE = "VirtualDiskImage" - ZIP = "Archive" - - _term = "file.category" - - -class Name(_FileEventFilterStringField): - """V2 filter class that filters events by the name of the observed file.""" - - _term = "file.name" - - -class Owner(_FileEventFilterStringField): - """V2 filter class that filters events by the owner of the observed file.""" - - _term = "file.owner" - - -class Directory(_FileEventFilterStringField): - """V2 filter class that filters events by directory of the observed file.""" - - _term = "file.directory" - - -class Size(_FileEventFilterTimestampField): - """V2 filter class that filters events by size in bytes of the observed file. - - Size ``value`` must be bytes. - """ - - _term = "file.sizeInBytes" - - -class MD5(_FileEventFilterStringField): - """V2 filter class that filters events by the MD5 hash of the observed file.""" - - _term = "file.hash.md5" - - -class SHA256(_FileEventFilterStringField): - """V2 filter class that filters events by SHA256 hash of the observed file.""" - - _term = "file.hash.sha256" - - -class DirectoryId(_FileEventFilterStringField): - """V2 filter class that filters events by the directory ID of the observed file.""" - - _term = "file.directoryId" - - -class CloudDriveId(_FileEventFilterStringField): - """V2 filter class that filters event by the cloud drive ID of the observed file.""" - - _term = "file.cloudDriveId" - - -class MimeTypeByBytes(_FileEventFilterStringField): - """V2 filter class that filters event by the mime type (by bytes) of the observed file""" - - _term = "file.mimeTypeByBytes" - - -class CategoryByBytes(_FileEventFilterStringField): - """V2 filter class that filters event by the category (by bytes) of the observed file""" - - _term = "file.categoryByBytes" - - -class MimeTypeByExtension(_FileEventFilterStringField): - """V2 filter class that filters event by the mime type (by extension) of the observed file""" - - _term = "file.mimeTypeByExtension" - - -class CategoryByExtension(_FileEventFilterStringField): - """V2 filter class that filters event by the category (by bytes) of the observed file""" - - _term = "file.categoryByExtension" - - -class Created(_FileEventFilterTimestampField): - """V2 filter class that filters events by the creation timestamp of the observed file.""" - - _term = "file.created" - - -class Modified(_FileEventFilterTimestampField): - """V2 filter class that filters events by the modification timestamp of the observed file.""" - - _term = "file.modified" - - -class Id(_FileEventFilterStringField): - """V2 filter class that filters events by the ID of the observed file.""" - - _term = "file.id" - - -class Url(_FileEventFilterStringField): - """V2 filter class that filters events by the URL of the observed file.""" - - _term = "file.url" - - -class Classification(_FileEventFilterStringField): - """V2 filter class that filters events by the classification of the observed file.""" - - _term = "file.classifications" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/process.py b/src/py42/sdk/queries/fileevents/v2/filters/process.py deleted file mode 100644 index 66b1e9b29..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/process.py +++ /dev/null @@ -1,19 +0,0 @@ -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class Executable(_FileEventFilterStringField): - """V2 filter class that filters events based on the process name involved in the exposure (applies to - ``read by browser or other app`` events only). - """ - - _term = "process.executable" - - -class Owner(_FileEventFilterStringField): - """V2 filter class that filters events based on the process owner that was involved in the exposure - (applies to ``read by browser or other app`` events only). - """ - - _term = "process.owner" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/report.py b/src/py42/sdk/queries/fileevents/v2/filters/report.py deleted file mode 100644 index f8c53def1..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/report.py +++ /dev/null @@ -1,50 +0,0 @@ -from py42.sdk.queries.fileevents.util import ( - FileEventFilterComparableField as _FileEventFilterComparableField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class ID(_FileEventFilterStringField): - """V2 filter class that filters events by the ID of the report.""" - - _term = "report.id" - - -class Name(_FileEventFilterStringField): - """V2 filter class that filters events by the name of the report.""" - - _term = "report.name" - - -class Description(_FileEventFilterStringField): - """V2 filter class that filters events by the description of the report.""" - - _term = "report.description" - - -class Headers(_FileEventFilterStringField): - """V2 filter class that filters events by the header(s) of the report.""" - - _term = "report.headers" - - -class Count(_FileEventFilterStringField, _FileEventFilterComparableField): - """V2 filter class that filters events by the record count of the report.""" - - _term = "report.count" - - -class Type(_FileEventFilterStringField): - """V2 filter class that filters events by the type of the report. - - Available options are provided as class attributes: - - :attr: `report.Type.AD_HOC` - - :attr: `report.Type.SAVED` - """ - - _term = "report.type" - - AD_HOC = "REPORT_TYPE_AD_HOC" - SAVED = "REPORT_TYPE_SAVED" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/risk.py b/src/py42/sdk/queries/fileevents/v2/filters/risk.py deleted file mode 100644 index d6d51018c..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/risk.py +++ /dev/null @@ -1,446 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterComparableField as _FileEventFilterComparableField, -) -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.fileevents.v2.filters.risk_indicator_terms import Destinations -from py42.sdk.queries.query_filter import ( - QueryFilterBooleanField as _QueryFilterBooleanField, -) -from py42.sdk.queries.query_filter import ( - QueryFilterStringField as _QueryFilterStringField, -) - - -class Indicators(_FileEventFilterStringField): - """V2 filter class that filters events by the type of risk indicator. - - Available options are provided as class attributes: - - :attr:`risk.Indicators.FileCategories.AUDIO` - - :attr:`risk.Indicators.FileCategories.DOCUMENT` - - :attr:`risk.Indicators.FileCategories.EXECUTABLE` - - :attr:`risk.Indicators.FileCategories.IMAGE` - - :attr:`risk.Indicators.FileCategories.PDF` - - :attr:`risk.Indicators.FileCategories.PRESENTATION` - - :attr:`risk.Indicators.FileCategories.SCRIPT` - - :attr:`risk.Indicators.FileCategories.SOURCE_CODE` - - :attr:`risk.Indicators.FileCategories.SPREADSHEET` - - :attr:`risk.Indicators.FileCategories.VIDEO` - - :attr:`risk.Indicators.FileCategories.VIRTUAL_DISK_IMAGE` - - :attr:`risk.Indicators.FileCategories.ZIP` - - :attr:`risk.Indicators.UserBehavior.FILE_MISMATCH` - - :attr:`risk.Indicators.UserBehavior.OFF_HOURS` - - :attr:`risk.Indicators.UserBehavior.REMOTE` - - :attr:`risk.Indicators.UserBehavior.FIRST_DESTINATION_USE` - - :attr:`risk.Indicators.UserBehavior.RARE_DESTINATION_USE` - - :attr:`risk.Indicators.UserBehavior.CONTRACT` - - :attr:`risk.Indicators.UserBehavior.DEPARTING` - - :attr:`risk.Indicators.UserBehavior.ELEVATED_ACCESS` - - :attr:`risk.Indicators.UserBehavior.FLIGHT_RISK` - - :attr:`risk.Indicators.UserBehavior.HIGH_IMPACT` - - :attr:`risk.Indicators.UserBehavior.HIGH_RISK` - - :attr:`risk.Indicators.UserBehavior.PERFORMANCE_CONCERNS` - - :attr:`risk.Indicators.UserBehavior.POOR_SECURITY_PRACTICES` - - :attr:`risk.Indicators.UserBehavior.SUSPICIOUS_SYSTEM_ACTIVITY` - - :attr:`risk.Indicators.CloudStorageUploads.AMAZON_DRIVE` - - :attr:`risk.Indicators.CloudStorageUploads.BAIDU_NET_DISK_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.BOX` - - :attr:`risk.Indicators.CloudStorageUploads.CRASHPLAN_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.DRAKE_PORTALS_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.DROPBOX` - - :attr:`risk.Indicators.CloudStorageUploads.FILE_DOT_IO_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.FILESTACK_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.GOOGLE_DRIVE` - - :attr:`risk.Indicators.CloudStorageUploads.OPEN_TEXT_HIGHTAIL_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.ICLOUD` - - :attr:`risk.Indicators.CloudStorageUploads.MEGA` - - :attr:`risk.Indicators.CloudStorageUploads.ONEDRIVE` - - :attr:`risk.Indicators.CloudStorageUploads.SECURE_FIRM_PORTAL_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.SHAREFILE_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.SMART_VAULT_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.SUGAR_SYNC_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.WE_TRANSFER_UPLOAD` - - :attr:`risk.Indicators.CloudStorageUploads.ZOHO` - - :attr:`risk.Indicators.EmailServiceUploads.ONESIXTHREE_DOT_COM` - - :attr:`risk.Indicators.EmailServiceUploads.ONETWOSIX_DOT_COM` - - :attr:`risk.Indicators.EmailServiceUploads.AOL` - - :attr:`risk.Indicators.EmailServiceUploads.COMCAST` - - :attr:`risk.Indicators.EmailServiceUploads.FASTMAIL_UPLOAD` - - :attr:`risk.Indicators.EmailServiceUploads.GMAIL` - - :attr:`risk.Indicators.EmailServiceUploads.GMX_UPLOAD` - - :attr:`risk.Indicators.EmailServiceUploads.ICLOUD` - - :attr:`risk.Indicators.EmailServiceUploads.LYCOS_UPLOAD` - - :attr:`risk.Indicators.EmailServiceUploads.MAIL_DOT_COM_UPLOAD` - - :attr:`risk.Indicators.EmailServiceUploads.OUTLOOK` - - :attr:`risk.Indicators.EmailServiceUploads.PROTONMAIL` - - :attr:`risk.Indicators.EmailServiceUploads.QQMAIL` - - :attr:`risk.Indicators.EmailServiceUploads.SINA_MAIL` - - :attr:`risk.Indicators.EmailServiceUploads.SOHU_MAIL` - - :attr:`risk.Indicators.EmailServiceUploads.TUTANOTA_UPLOAD` - - :attr:`risk.Indicators.EmailServiceUploads.YAHOO` - - :attr:`risk.Indicators.EmailServiceUploads.ZIX_UPLOAD` - - :attr:`risk.Indicators.EmailServiceUploads.ZOHO_MAIL` - - :attr:`risk.Indicators.ExternalDevices.AIRDROP` - - :attr:`risk.Indicators.ExternalDevices.SALESFORCE_DOWNLOAD` - - :attr:`risk.Indicators.ExternalDevices.REMOVABLE_MEDIA` - - :attr:`Indicators.CloudDataExposures.PUBLIC_CORPORATE_BOX` - - :attr:`Indicators.CloudDataExposures.PUBLIC_CORPORATE_GOOGLE_DRIVE` - - :attr:`Indicators.CloudDataExposures.PUBLIC_CORPORATE_ONEDRIVE` - - :attr:`Indicators.CloudDataExposures.SENT_CORPORATE_GMAIL` - - :attr:`Indicators.CloudDataExposures.SENT_CORPORATE_OFFICE365` - - :attr:`Indicators.CloudDataExposures.SHARED_CORPORATE_BOX` - - :attr:`Indicators.CloudDataExposures.SHARED_CORPORATE_GOOGLE_DRIVE` - - :attr:`Indicators.CloudDataExposures.SHARED_CORPORATE_ONEDRIVE` - - :attr:`risk.Indicators.FileConversionToolUploads.CLOUD_CONVERT_UPLOAD` - - :attr:`risk.Indicators.FileConversionToolUploads.COMPRESS_JPEG_UPLOAD` - - :attr:`risk.Indicators.FileConversionToolUploads.FREE_CONVERT_UPLOAD` - - :attr:`risk.Indicators.FileConversionToolUploads.HEIC_TO_JPEG_UPLOAD` - - :attr:`risk.Indicators.FileConversionToolUploads.TINY_PNG_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.DISCORD_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.FACEBOOK_MESSENGER` - - :attr:`risk.Indicators.MessagingServiceUploads.GOOGLE_MESSAGES_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.GOOGLE_CHAT_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.GOOGLE_HANGOUTS_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.MICROSOFT_TEAMS` - - :attr:`risk.Indicators.MessagingServiceUploads.SLACK` - - :attr:`risk.Indicators.MessagingServiceUploads.TELEGRAM_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.WEBEX_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.WE_CHAT_UPLOAD` - - :attr:`risk.Indicators.MessagingServiceUploads.WHATSAPP` - - :attr:`risk.Indicators.MessagingServiceUploads.ZOOM_UPLOAD` - - :attr:`risk.Indicators.Other.OTHER_DESTINATION` - - :attr:`risk.Indicators.Other.UNKNOWN_DESTINATION` - - :attr:`risk.Indicators.PdfManagerUploads.ADOBE_ACROBAT_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.COMBINE_PDF_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.FREE_PDF_CONVERT_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.I_LOVE_PDF_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.JPG2_PDF_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.PDF24_TOOLS_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.PDF_ESCAPE_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.PDF_FILLER_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.PDF_SIMPLI_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.SEJDA_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.SMALL_PDF_UPLOAD` - - :attr:`risk.Indicators.PdfManagerUploads.SODA_PDF_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.ADOBE_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.CANVA_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.EVERNOTE_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.FIGMA_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.GOOGLE_KEEP_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.GOOGLE_JAMBOARD_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.IMAGE_COLOR_PICKER_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.KAPWING_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.MIRO_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.MONDAY_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.MURAL_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.NOTION_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.OVERLEAF_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.PHOTOPEA_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.PIXLR_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.REMOVE_DOT_BG_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.TRELLO_UPLOAD` - - :attr:`risk.Indicators.ProductivityToolUploads.VEED_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.FOUR_CHAN_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.FACEBOOK` - - :attr:`risk.Indicators.SocialMediaUploads.IMGUR_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.LINKEDIN` - - :attr:`risk.Indicators.SocialMediaUploads.ODNOKLASSNIKI_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.OK_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.QZONE_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.REDDIT` - - :attr:`risk.Indicators.SocialMediaUploads.STACK_OVERFLOW_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.TUMBLR_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.TWITCH_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.TWITTER` - - :attr:`risk.Indicators.SocialMediaUploads.VIMEO_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.VK_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.WEIBO_UPLOAD` - - :attr:`risk.Indicators.SocialMediaUploads.YOU_TUBE_UPLOAD` - - :attr:`risk.Indicators.CodeRepositoryUploads.BITBUCKET_UPLOAD` - - :attr:`risk.Indicators.CodeRepositoryUploads.COLABORATORY_UPLOAD` - - :attr:`risk.Indicators.CodeRepositoryUploads.GITHUB` - - :attr:`risk.Indicators.CodeRepositoryUploads.GITLAB` - - :attr:`risk.Indicators.CodeRepositoryUploads.GOOGLE_APPS_SCRIPT_UPLOAD` - - :attr:`risk.Indicators.CodeRepositoryUploads.GOOGLE_CLOUD_SHELL_UPLOAD` - - :attr:`risk.Indicators.CodeRepositoryUploads.SOURCE_FORGE` - - :attr:`risk.Indicators.CodeRepositoryUploads.STASH` - - :attr:`risk.Indicators.WebHostingUploads.GIT_HUB_PAGES_UPLOAD` - - :attr:`risk.Indicators.WebHostingUploads.GOOGLE_SITES_UPLOAD` - - :attr:`risk.Indicators.WebHostingUploads.WIX_UPLOAD` - - :attr:`risk.Indicators.WebHostingUploads.WORD_PRESS_UPLOAD` - """ - - _term = "risk.indicators.name" - - @staticmethod - def choices(): - return ( - Indicators.CloudDataExposures.choices() - + Indicators.CloudStorageUploads.choices() - + Indicators.CodeRepositoryUploads.choices() - + Indicators.EmailServiceUploads.choices() - + Indicators.ExternalDevices.choices() - + Indicators.FileCategories.choices() - + Indicators.MessagingServiceUploads.choices() - + Indicators.Other.choices() - + Indicators.SocialMediaUploads.choices() - + Indicators.UserBehavior.choices() - + Indicators.FileConversionToolUploads.choices() - + Indicators.PdfManagerUploads.choices() - + Indicators.ProductivityToolUploads.choices() - + Indicators.WebHostingUploads.choices() - ) - - class CloudDataExposures(_Choices): - PUBLIC_CORPORATE_BOX = Destinations.PUBLIC_LINK_FROM_CORPORATE_BOX - PUBLIC_CORPORATE_GOOGLE_DRIVE = ( - Destinations.PUBLIC_LINK_FROM_CORPORATE_GOOGLE_DRIVE - ) - PUBLIC_CORPORATE_ONEDRIVE = Destinations.PUBLIC_LINK_FROM_CORPORATE_ONE_DRIVE - SENT_CORPORATE_GMAIL = Destinations.SENT_FROM_CORPORATE_GMAIL - SENT_CORPORATE_OFFICE365 = Destinations.SENT_FROM_CORPORATE_OFFICE365 - SHARED_CORPORATE_BOX = Destinations.SHARED_FROM_CORPORATE_BOX - SHARED_CORPORATE_GOOGLE_DRIVE = Destinations.SHARED_FROM_CORPORATE_GOOGLE_DRIVE - SHARED_CORPORATE_ONEDRIVE = Destinations.SHARED_FROM_CORPORATE_ONE_DRIVE - - class CloudStorageUploads(_Choices): - AMAZON_DRIVE = Destinations.AMAZON_DRIVE_UPLOAD - BAIDU_NET_DISK_UPLOAD = Destinations.BAIDU_NET_DISK_UPLOAD - BOX = Destinations.BOX_UPLOAD - CRASHPLAN_UPLOAD = Destinations.CRASHPLAN_UPLOAD - DRAKE_PORTALS_UPLOAD = Destinations.DRAKE_PORTALS_UPLOAD - DROPBOX = Destinations.DROPBOX_UPLOAD - FILE_DOT_IO_UPLOAD = Destinations.FILE_DOT_IO_UPLOAD - FILESTACK_UPLOAD = Destinations.FILESTACK_UPLOAD - GOOGLE_DRIVE = Destinations.GOOGLE_DRIVE_UPLOAD - OPEN_TEXT_HIGHTAIL_UPLOAD = Destinations.OPEN_TEXT_HIGHTAIL_UPLOAD - ICLOUD = Destinations.ICLOUD_UPLOAD - MEGA = Destinations.MEGA_UPLOAD - ONEDRIVE = Destinations.ONE_DRIVE_UPLOAD - SECURE_FIRM_PORTAL_UPLOAD = Destinations.SECURE_FIRM_PORTAL_UPLOAD - SHAREFILE_UPLOAD = Destinations.SHAREFILE_UPLOAD - SMART_VAULT_UPLOAD = Destinations.SMART_VAULT_UPLOAD - SUGAR_SYNC_UPLOAD = Destinations.SUGAR_SYNC_UPLOAD - WE_TRANSFER_UPLOAD = Destinations.WE_TRANSFER_UPLOAD - ZOHO = Destinations.ZOHO_WORK_DRIVE_UPLOAD - - class CodeRepositoryUploads(_Choices): - BITBUCKET = Destinations.BITBUCKET_UPLOAD - COLABORATORY_UPLOAD = Destinations.COLABORATORY_UPLOAD - GITHUB = Destinations.GIT_HUB_UPLOAD - GITLAB_UPLOAD = Destinations.GIT_LAB_UPLOAD - GOOGLE_APPS_SCRIPT_UPLOAD = Destinations.GOOGLE_APPS_SCRIPT_UPLOAD - GOOGLE_CLOUD_SHELL_UPLOAD = Destinations.GOOGLE_CLOUD_SHELL_UPLOAD - SOURCEFORGE = Destinations.SOURCE_FORGE_UPLOAD - STASH = Destinations.STASH_UPLOAD - - class EmailServiceUploads(_Choices): - ONESIXTHREE_DOT_COM = Destinations.ONE_SIX_THREE_DOT_COM_UPLOAD - ONETWOSIX_DOT_COM = Destinations.ONE_TWO_SIX_DOT_COM_UPLOAD - AOL = Destinations.AOL_UPLOAD - COMCAST = Destinations.COMCAST_UPLOAD - FASTMAIL_UPLOAD = Destinations.FASTMAIL_UPLOAD - GMAIL = Destinations.GMAIL_UPLOAD - GMX_UPLOAD = Destinations.GMX_UPLOAD - ICLOUD = Destinations.ICLOUD_MAIL_UPLOAD - LYCOS_UPLOAD = Destinations.LYCOS_UPLOAD - MAIL_DOT_COM = Destinations.MAIL_COM_UPLOAD - OUTLOOK = Destinations.OUTLOOK_UPLOAD - PROTONMAIL = Destinations.PROTON_MAIL_UPLOAD - QQMAIL = Destinations.QQMAIL_UPLOAD - SINA_MAIL = Destinations.SINA_MAIL_UPLOAD - SOHU_MAIL = Destinations.SOHU_MAIL_UPLOAD - TUTANOTA_UPLOAD = Destinations.TUTANOTA_UPLOAD - YAHOO = Destinations.YAHOO_UPLOAD - ZIX_UPLOAD = Destinations.ZIX_UPLOAD - ZOHO_MAIL = Destinations.ZOHO_MAIL_UPLOAD - - class ExternalDevices(_Choices): - AIRDROP = Destinations.AIR_DROP - SALES_FORCE_DOWNLOAD = Destinations.SALESFORCE_DOWNLOAD - REMOVABLE_MEDIA = Destinations.REMOVABLE_MEDIA - - class MessagingServiceUploads(_Choices): - DISCORD_UPLOAD = Destinations.DISCORD_UPLOAD - FACEBOOK_MESSENGER = Destinations.FACEBOOK_MESSENGER_UPLOAD - GOOGLE_MESSAGES_UPLOAD = Destinations.GOOGLE_MESSAGES_UPLOAD - GOOGLE_CHAT_UPLOAD = Destinations.GOOGLE_CHAT_UPLOAD - GOOGLE_HANGOUTS_UPLOAD = Destinations.GOOGLE_HANGOUTS_UPLOAD - MICROSOFT_TEAMS = Destinations.MICROSOFT_TEAMS_UPLOAD - SLACK = Destinations.SLACK_UPLOAD - TELEGRAM_UPLOAD = Destinations.TELEGRAM_UPLOAD - WEBEX_UPLOAD = Destinations.WEBEX_UPLOAD - WE_CHAT_UPLOAD = Destinations.WE_CHAT_UPLOAD - WHATSAPP = Destinations.WHATS_APP_UPLOAD - ZOOM_UPLOAD = Destinations.ZOOM_UPLOAD - - class FileConversionToolUploads(_Choices): - CLOUD_CONVERT_UPLOAD = Destinations.CLOUD_CONVERT_UPLOAD - COMPRESS_JPEG_UPLOAD = Destinations.COMPRESS_JPEG_UPLOAD - FREE_CONVERT_UPLOAD = Destinations.FREE_CONVERT_UPLOAD - HEIC_TO_JPEG_UPLOAD = Destinations.HEIC_TO_JPEG_UPLOAD - TINY_PNG_UPLOAD = Destinations.TINY_PNG_UPLOAD - - class PdfManagerUploads(_Choices): - ADOBE_ACROBAT_UPLOAD = Destinations.ADOBE_ACROBAT_UPLOAD - COMBINE_PDF_UPLOAD = Destinations.COMBINE_PDF_UPLOAD - FREE_PDF_CONVERT_UPLOAD = Destinations.FREE_PDF_CONVERT_UPLOAD - I_LOVE_PDF_UPLOAD = Destinations.I_LOVE_PDF_UPLOAD - JPG2_PDF_UPLOAD = Destinations.JPG2_PDF_UPLOAD - PDF24_TOOLS_UPLOAD = Destinations.PDF24_TOOLS_UPLOAD - PDF_ESCAPE_UPLOAD = Destinations.PDF_ESCAPE_UPLOAD - PDF_FILLER_UPLOAD = Destinations.PDF_FILLER_UPLOAD - PDF_SIMPLI_UPLOAD = Destinations.PDF_SIMPLI_UPLOAD - SEJDA_UPLOAD = Destinations.SEJDA_UPLOAD - SMALL_PDF_UPLOAD = Destinations.SMALL_PDF_UPLOAD - SODA_PDF_UPLOAD = Destinations.SODA_PDF_UPLOAD - - class ProductivityToolUploads(_Choices): - ADOBE_UPLOAD = Destinations.ADOBE_UPLOAD - CANVA_UPLOAD = Destinations.CANVA_UPLOAD - EVERNOTE_UPLOAD = Destinations.EVERNOTE_UPLOAD - FIGMA_UPLOAD = Destinations.FIGMA_UPLOAD - GOOGLE_KEEP_UPLOAD = Destinations.GOOGLE_KEEP_UPLOAD - GOOGLE_JAMBOARD_UPLOAD = Destinations.GOOGLE_JAMBOARD_UPLOAD - IMAGE_COLOR_PICKER_UPLOAD = Destinations.IMAGE_COLOR_PICKER_UPLOAD - KAPWING_UPLOAD = Destinations.KAPWING_UPLOAD - MIRO_UPLOAD = Destinations.MIRO_UPLOAD - MONDAY_UPLOAD = Destinations.MONDAY_UPLOAD - MURAL_UPLOAD = Destinations.MURAL_UPLOAD - NOTION_UPLOAD = Destinations.NOTION_UPLOAD - OVERLEAF_UPLOAD = Destinations.OVERLEAF_UPLOAD - PHOTOPEA_UPLOAD = Destinations.PHOTOPEA_UPLOAD - PIXLR_UPLOAD = Destinations.PIXLR_UPLOAD - REMOVE_DOT_BG_UPLOAD = Destinations.REMOVE_DOT_BG_UPLOAD - TRELLO_UPLOAD = Destinations.TRELLO_UPLOAD - VEED_UPLOAD = Destinations.VEED_UPLOAD - - class WebHostingUploads(_Choices): - GIT_HUB_PAGES_UPLOAD = Destinations.GIT_HUB_PAGES_UPLOAD - GOOGLE_SITES_UPLOAD = Destinations.GOOGLE_SITES_UPLOAD - WIX_UPLOAD = Destinations.WIX_UPLOAD - WORD_PRESS_UPLOAD = Destinations.WORD_PRESS_UPLOAD - - class Other(_Choices): - OTHER = Destinations.OTHER_DESTINATION - UNKNOWN = Destinations.UNKNOWN_DESTINATION - - class SocialMediaUploads(_Choices): - FOUR_CHAN_UPLOAD = Destinations.FOUR_CHAN_UPLOAD - FACEBOOK = Destinations.FACEBOOK_UPLOAD - IMGUR_UPLOAD = Destinations.IMGUR_UPLOAD - LINKEDIN = Destinations.LINKED_IN_UPLOAD - ODNOKLASSNIKI_UPLOAD = Destinations.ODNOKLASSNIKI_UPLOAD - OK_UPLOAD = Destinations.OK_UPLOAD - QZONE_UPLOAD = Destinations.QZONE_UPLOAD - REDDIT = Destinations.REDDIT_UPLOAD - STACK_OVERFLOW_UPLOAD = Destinations.STACK_OVERFLOW_UPLOAD - TUMBLR_UPLOAD = Destinations.TUMBLR_UPLOAD - TWITCH_UPLOAD = Destinations.TWITCH_UPLOAD - TWITTER = Destinations.TWITTER_UPLOAD - VIMEO_UPLOAD = Destinations.VIMEO_UPLOAD - VK_UPLOAD = Destinations.VK_UPLOAD - WEIBO_UPLOAD = Destinations.WEIBO_UPLOAD - YOU_TUBE_UPLOAD = Destinations.YOU_TUBE_UPLOAD - - class FileCategories(_Choices): - AUDIO = "Audio" - DOCUMENT = "Document" - EXECUTABLE = "Executable" - IMAGE = "Image" - PDF = "PDF" - PRESENTATION = "Presentation" - SCRIPT = "Script" - SOURCE_CODE = "Source code" - SPREADSHEET = "Spreadsheet" - VIDEO = "Video" - VIRTUAL_DISK_IMAGE = "Virtual Disk Image" - ZIP = "Zip" - - class UserBehavior(_Choices): - FILE_MISMATCH = "File mismatch" - OFF_HOURS = "Off hours" - REMOTE = "Remote" - FIRST_DESTINATION_USE = "First use of destination" - RARE_DESTINATION_USE = "Rare use of destination" - CONTRACT = "Contract" - DEPARTING = "Departing" - ELEVATED_ACCESS = "Elevated access" - FLIGHT_RISK = "Flight risk" - HIGH_IMPACT = "High impact" - HIGH_RISK = "High risk" - PERFORMANCE_CONCERNS = "Performance concerns" - POOR_SECURITY_PRACTICES = "Poor security practices" - SUSPICIOUS_SYSTEM_ACTIVITY = "Suspicious system activity" - - -class Severity(_FileEventFilterStringField, _Choices): - """V2 filter class that filters events by risk severity. - - Available options are provided as class attributes: - - :attr:`risk.Severity.LOW` - - :attr:`risk.Severity.MODERATE` - - :attr:`risk.Severity.HIGH` - - :attr:`risk.Severity.CRITICAL` - - :attr:`risk.Severity.NO_RISK_INDICATED` - """ - - _term = "risk.severity" - - CRITICAL = "CRITICAL" - HIGH = "HIGH" - MODERATE = "MODERATE" - LOW = "LOW" - NO_RISK_INDICATED = "NO_RISK_INDICATED" - - -class Score(_QueryFilterStringField, _FileEventFilterComparableField): - """V2 filter class that filters events by risk score.""" - - _term = "risk.score" - - -class Trusted(_QueryFilterBooleanField): - """V2 filter class that filters events based on whether activity can be trusted.""" - - _term = "risk.trusted" - - -class TrustReason(_QueryFilterStringField, _Choices): - """V2 filter class that filters events based on the trust reason for the activity. - - Available options are provided as class attributes: - - :attr: `risk.TrustReason.TRUSTED_DOMAIN_BROWSER_URL` - - :attr: `risk.TrustReason.TRUSTED_BROWSER_URL_PATH` - - :attr: `risk.TrustReason.TRUSTED_DOMAIN_BROWSER_TAB_TITLE` - - :attr: `risk.TrustReason.TRUSTED_BROWSER_TAB_INFOS` - - :attr: `risk.TrustReason.TRUSTED_DOMAIN_EMAIL_RECIPIENT` - - :attr: `risk.TrustReason.TRUSTED_DOMAIN_CLOUD_SYNC_USERNAME` - - :attr: `risk.TrustReason.TRUSTED_SLACK_WORKSPACE` - - :attr: `risk.TrustReason.EVENT_PAIRING_SERVICE_MATCH` - - :attr: `risk.TrustReason.EVENT_PAIRING_SERVICE_ENDPOINT_MATCH` - - :attr: `risk.TrustReason.DOWNLOAD_TO_A_MANAGED_DEVICE` - - :attr: `risk.TrustReason.SHARED_WITH_TRUSTED_USERS` - """ - - _term = "risk.trustReason" - - TRUSTED_DOMAIN_BROWSER_URL = "Trusted browser URL" - TRUSTED_BROWSER_URL_PATH = "Trusted specific URL path" - TRUSTED_DOMAIN_BROWSER_TAB_TITLE = "Trusted browser tab title" - TRUSTED_BROWSER_TAB_INFOS = "Trusted browser URL and/or tab title" - TRUSTED_DOMAIN_EMAIL_RECIPIENT = "Trusted email recipient" - TRUSTED_DOMAIN_CLOUD_SYNC_USERNAME = "Trusted sync username" - TRUSTED_SLACK_WORKSPACE = "Trusted Slack workspace" - EVENT_PAIRING_SERVICE_MATCH = "Event matched with cloud activity" - EVENT_PAIRING_SERVICE_ENDPOINT_MATCH = "Event matched with endpoint activity" - DOWNLOAD_TO_A_MANAGED_DEVICE = "Download to a managed device" - SHARED_WITH_TRUSTED_USERS = "Shared with trusted users" - - -class IndicatorsWeight(_QueryFilterStringField, _FileEventFilterComparableField): - """V2 filter class that filters events by the risk indicator weight.""" - - _term = "risk.indicators.weight" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/risk_indicator_terms.py b/src/py42/sdk/queries/fileevents/v2/filters/risk_indicator_terms.py deleted file mode 100644 index 0aeae77e0..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/risk_indicator_terms.py +++ /dev/null @@ -1,136 +0,0 @@ -class Destinations: - ADOBE_UPLOAD = "Adobe upload" - ADOBE_ACROBAT_UPLOAD = "Adobe Acrobat upload" - AIR_DROP = "AirDrop" - AMAZON_DRIVE_UPLOAD = "Amazon Drive upload" - AOL_UPLOAD = "AOL upload" - BAIDU_NET_DISK_UPLOAD = "Baidu NetDisk upload" - BITBUCKET_UPLOAD = "Bitbucket upload" - BOX_UPLOAD = "Box upload" - CANVA_UPLOAD = "Canva upload" - CLOUD_CONVERT_UPLOAD = "CloudConvert upload" - COLABORATORY_UPLOAD = "Colaboratory upload" - COMBINE_PDF_UPLOAD = "CombinePDF upload" - COMCAST_UPLOAD = "Comcast upload" - COMPRESS_JPEG_UPLOAD = "Compress JPEG upload" - CRASHPLAN_UPLOAD = "Crashplan upload" - DISCORD_UPLOAD = "Discord upload" - DRAKE_PORTALS_UPLOAD = "Drake Portals upload" - DROPBOX_UPLOAD = "Dropbox upload" - EVERNOTE_UPLOAD = "Evernote upload" - FACEBOOK_MESSENGER_UPLOAD = "Facebook Messenger upload" - FACEBOOK_UPLOAD = "Facebook upload" - FASTMAIL_UPLOAD = "Fastmail upload" - FIGMA_UPLOAD = "Figma upload" - FILE_DOT_IO_UPLOAD = "File.io upload" - FILESTACK_UPLOAD = "Filestack upload" - FOUR_CHAN_UPLOAD = "4chan upload" - FREE_CONVERT_UPLOAD = "Free Convert upload" - FREE_PDF_CONVERT_UPLOAD = "Free PDF Convert upload" - GIT_HUB_UPLOAD = "GitHub upload" - GIT_HUB_PAGES_UPLOAD = "GitHub Pages upload" - GIT_LAB_UPLOAD = "GitLab upload" - GMAIL_UPLOAD = "Gmail upload" - GMX_UPLOAD = "GMX upload" - GOOGLE_APPS_SCRIPT_UPLOAD = "Google Apps Script upload" - GOOGLE_CHAT_UPLOAD = "Google Chat upload" - GOOGLE_CLOUD_SHELL_UPLOAD = "Google Cloud Shell upload" - GOOGLE_DRIVE_UPLOAD = "Google Drive upload" - GOOGLE_HANGOUTS_UPLOAD = "Google Hangouts upload" - GOOGLE_JAMBOARD_UPLOAD = "Google Jamboard upload" - GOOGLE_KEEP_UPLOAD = "Google Keep upload" - GOOGLE_MESSAGES_UPLOAD = "Google Messages upload" - GOOGLE_SITES_UPLOAD = "Google Sites upload" - HEIC_TO_JPEG_UPLOAD = "HEICtoJPEG upload" - ICLOUD_MAIL_UPLOAD = "iCloud Mail upload" - ICLOUD_UPLOAD = "iCloud upload" - I_LOVE_PDF_UPLOAD = "iLovePDF upload" - IMAGE_COLOR_PICKER_UPLOAD = "Image Color Picker upload" - IMGUR_UPLOAD = "Imgur upload" - JPG2_PDF_UPLOAD = "JPG2PDF upload" - KAPWING_UPLOAD = "Kapwing upload" - LINKED_IN_UPLOAD = "LinkedIn upload" - LYCOS_UPLOAD = "Lycos upload" - MAIL_COM_UPLOAD = "Mail.com upload" - MEGA_UPLOAD = "Mega upload" - MICROSOFT_TEAMS_UPLOAD = "Microsoft Teams upload" - MIRO_UPLOAD = "Miro upload" - MONDAY_UPLOAD = "Monday upload" - MURAL_UPLOAD = "Mural upload" - NOTION_UPLOAD = "Notion upload" - ODNOKLASSNIKI_UPLOAD = "Odnoklassniki upload" - OK_UPLOAD = "OK upload" - ONE_DRIVE_UPLOAD = "OneDrive upload" - ONE_SIX_THREE_DOT_COM_UPLOAD = "163.com upload" - ONE_TWO_SIX_DOT_COM_UPLOAD = "126.com upload" - OPEN_TEXT_HIGHTAIL_UPLOAD = "OpenText Hightail upload" - OTHER_DESTINATION = "Other destination" - OUTLOOK_UPLOAD = "Outlook upload" - OVERLEAF_UPLOAD = "Overleaf upload" - PDF24_TOOLS_UPLOAD = "PDF24 Tools upload" - PDF_ESCAPE_UPLOAD = "PDFescape upload" - PDF_FILLER_UPLOAD = "pdfFiller upload" - PDF_SIMPLI_UPLOAD = "PDFSimpli upload" - PHOTOPEA_UPLOAD = "Photopea upload" - PIXLR_UPLOAD = "Pixlr upload" - PROTON_MAIL_UPLOAD = "ProtonMail upload" - PUBLIC_LINK_FROM_CORPORATE_BOX = "Public link from corporate Box" - PUBLIC_LINK_FROM_CORPORATE_GOOGLE_DRIVE = "Public link from corporate Google Drive" - PUBLIC_LINK_FROM_CORPORATE_ONE_DRIVE = "Public link from corporate OneDrive" - QQMAIL_UPLOAD = "QQMail upload" - QZONE_UPLOAD = "Qzone upload" - REDDIT_UPLOAD = "Reddit upload" - REMOVABLE_MEDIA = "Removable media" - REMOVE_DOT_BG_UPLOAD = "remove.bg upload" - SALESFORCE_DOWNLOAD = "Download to unmonitored device from corporate Salesforce" - SECURE_FIRM_PORTAL_UPLOAD = "Secure Firm Portal upload" - SEJDA_UPLOAD = "Sejda upload" - SENT_FROM_CORPORATE_GMAIL = "Sent from corporate Gmail" - SENT_FROM_CORPORATE_OFFICE365 = "Sent from corporate Microsoft Office 365" - SHARED_FROM_CORPORATE_BOX = "Shared from corporate Box" - SHARED_FROM_CORPORATE_GOOGLE_DRIVE = "Shared from corporate Google Drive" - SHARED_FROM_CORPORATE_ONE_DRIVE = "Shared from corporate OneDrive" - SHAREFILE_UPLOAD = "Sharefile upload" - SINA_MAIL_UPLOAD = "Sina Mail upload" - SLACK_UPLOAD = "Slack upload" - SMALL_PDF_UPLOAD = "SmallPDF upload" - SMART_VAULT_UPLOAD = "SmartVault upload" - SODA_PDF_UPLOAD = "Soda PDF upload" - SOHU_MAIL_UPLOAD = "Sohu Mail upload" - SOURCE_FORGE_UPLOAD = "SourceForge upload" - STACK_OVERFLOW_UPLOAD = "Stack Overflow upload" - STASH_UPLOAD = "Stash upload" - SUGAR_SYNC_UPLOAD = "SugarSync upload" - TELEGRAM_UPLOAD = "Telegram upload" - TINY_PNG_UPLOAD = "TinyPNG upload" - TRELLO_UPLOAD = "Trello upload" - TUMBLR_UPLOAD = "Tumblr upload" - TUTANOTA_UPLOAD = "Tutanota upload" - TWITCH_UPLOAD = "Twitch upload" - TWITTER_UPLOAD = "Twitter upload" - UNKNOWN_DESTINATION = "Unknown destination" - UNMONITORED_DEVICE_DOWNLOAD_BOX = ( - "Download to unmonitored device from corporate Box" - ) - UNMONITORED_DEVICE_DOWNLOAD_GOOGLE_DRIVE = ( - "Download to unmonitored device from corporate Google Drive" - ) - UNMONITORED_DEVICE_DOWNLOAD_ONE_DRIVE = ( - "Download to unmonitored device from corporate OneDrive" - ) - VEED_UPLOAD = "VEED upload" - VIMEO_UPLOAD = "Vimeo upload" - VK_UPLOAD = "Vk upload" - WEBEX_UPLOAD = "Webex upload" - WE_CHAT_UPLOAD = "WeChat upload" - WEIBO_UPLOAD = "Weibo upload" - WE_TRANSFER_UPLOAD = "WeTransfer upload" - WHATS_APP_UPLOAD = "WhatsApp upload" - WIX_UPLOAD = "Wix upload" - WORD_PRESS_UPLOAD = "WordPress upload" - YAHOO_UPLOAD = "Yahoo upload" - YOU_TUBE_UPLOAD = "YouTube upload" - ZIX_UPLOAD = "Zix upload" - ZOHO_MAIL_UPLOAD = "Zoho Mail upload" - ZOHO_WORK_DRIVE_UPLOAD = "Zoho WorkDrive upload" - ZOOM_UPLOAD = "Zoom upload" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/source.py b/src/py42/sdk/queries/fileevents/v2/filters/source.py deleted file mode 100644 index 11e0afec3..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/source.py +++ /dev/null @@ -1,228 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) -from py42.sdk.queries.query_filter import ( - QueryFilterStringField as _QueryFilterStringField, -) - - -class EmailSender(_QueryFilterStringField): - """V2 filter class that filters events based on the email's sender (applies to email events only).""" - - _term = "source.email.sender" - - -class EmailFrom(_QueryFilterStringField): - """V2 filter class that filters events based on the display name of the email's sender, as it appears in - the \"From:\" field in the email (applies to email events only). - """ - - _term = "source.email.from" - - -class RemovableMediaName(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of the removable media involved in the exposure - (applies to ``removable media`` events only). - """ - - _term = "source.removableMedia.name" - - -class RemovableMediaVendor(_FileEventFilterStringField): - """V2 filter class that filters events based on the vendor of the removable media device involved in the - exposure (applies to ``removable media`` events only). - """ - - _term = "source.removableMedia.vendor" - - -class RemovableMediaMediaName(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of the removable media (as reported by the - vendor/device, usually very similar to RemovableMediaName) involved in the exposure (applies to - ``removable media`` events only). - """ - - _term = "source.removableMedia.mediaName" - - -class RemovableMediaVolumeName(_FileEventFilterStringField): - """V2 filter class that filters events based on the name of the formatted volume (as reported by the - operating system) of the removable media device involved in the exposure (applies to - ``removable media`` events only). - """ - - _term = "source.removableMedia.volumeName" - - -class RemovableMediaPartitionID(_FileEventFilterStringField): - """V2 filter class that filters events based on the unique identifier assigned (by the operating system) - to the removable media involved in the exposure (applies to ``removable media`` events only). - """ - - _term = "source.removableMedia.partitionId" - - -class RemovableMediaSerialNumber(_FileEventFilterStringField): - """V2 filter class that filters events based on the serial number of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "source.removableMedia.serialNumber" - - -class RemovableMediaCapacity(_FileEventFilterStringField): - """V2 filter class that filters events based on the capacity of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "source.removableMedia.capacity" - - -class RemovableMediaBusType(_FileEventFilterStringField): - """V2 filter class that filters events based on the bus type of the connected hardware as reported - by the operating system (applies to ``removable media`` events only). - """ - - _term = "source.removableMedia.busType" - - -class Category(_FileEventFilterStringField, _Choices): - """ - V2 filter class that filters events based on source category. - - Available options are provided as class attributes: - - :attr:`source.Category.BUSINESS_TOOLS` - - :attr:`source.Category.CLOUD_STORAGE` - - :attr:`source.Category.DEVICE` - - :attr:`source.Category.EMAIL` - - :attr:`source.Category.MESSAGING` - - :attr:`source.Category.MULTIPLE_POSSIBILITIES` - - :attr:`source.Category.SOCIAL_MEDIA` - - :attr:`source.Category.SOURCE_CODE_REPOSITORY` - - :attr:`source.Category.UNCATEGORIZED` - - :attr:`source.Category.UNKNOWN` - - :attr:`source.category.BUSINESS_INTELLIGENCE_TOOLS` - - :attr:`source.category.CIVIL_SERVICES` - - :attr:`source.category.CLOUD_COMPUTING` - - :attr:`source.category.CODING_TOOLS` - - :attr:`source.category.CONTRACT_MANAGEMENT` - - :attr:`source.category.CRM_TOOLS` - - :attr:`source.category.DESIGN_TOOLS` - - :attr:`source.category.E_COMMERCE` - - :attr:`source.category.FILE_CONVERSION_TOOLS` - - :attr:`source.category.FINANCIAL_SERVICES` - - :attr:`source.category.HEALTHCARE_AND_INSURANCE` - - :attr:`source.category.HR_TOOLS` - - :attr:`source.category.IMAGE_HOSTING` - - :attr:`source.category.IT_SERVICES` - - :attr:`source.category.JOB_LISTINGS` - - :attr:`source.category.LEARNING_PLATFORMS` - - :attr:`source.category.MARKETING_TOOLS` - - :attr:`source.category.PDF_MANAGER` - - :attr:`source.category.PHOTO_PRINTING` - - :attr:`source.category.PRODUCTIVITY_TOOLS` - - :attr:`source.category.PROFESSIONAL_SERVICES` - - :attr:`source.category.REAL_ESTATE` - - :attr:`source.category.SALES_TOOLS` - - :attr:`source.category.SEARCH_ENGINE` - - :attr:`source.category.SHIPPING` - - :attr:`source.category.SOFTWARE` - - :attr:`source.category.TRAVEL` - - :attr:`source.category.WEB_HOSTING` - """ - - _term = "source.category" - - BUSINESS_TOOLS = "Business Tools" - CLOUD_STORAGE = "Cloud Storage" - DEVICE = "Device" - EMAIL = "Email" - MESSAGING = "Messaging" - MULTIPLE_POSSIBILITIES = "Multiple Possibilities" - SOCIAL_MEDIA = "Social Media" - SOURCE_CODE_REPOSITORY = "Source Code Repository" - UNCATEGORIZED = "Uncategorized" - UNKNOWN = "Unknown" - BUSINESS_INTELLIGENCE_TOOLS = "Business Intelligence Tools" - CIVIL_SERVICES = "Civil Services" - CLOUD_COMPUTING = "Cloud Computing" - CODING_TOOLS = "Coding Tools" - CONTRACT_MANAGEMENT = "Contract Management" - CRM_TOOLS = "CRM Tools" - DESIGN_TOOLS = "Design Tools" - E_COMMERCE = "E-commerce" - FILE_CONVERSION_TOOLS = "File Conversion Tools" - FINANCIAL_SERVICES = "Financial Services" - HEALTHCARE_AND_INSURANCE = "Healthcare & Insurance" - HR_TOOLS = "HR Tools" - IMAGE_HOSTING = "Image Hosting" - IT_SERVICES = "IT Services" - JOB_LISTINGS = "Job Listings" - LEARNING_PLATFORMS = "Learning Platforms" - MARKETING_TOOLS = "Marketing Tools" - PDF_MANAGER = "PDF Manager" - PHOTO_PRINTING = "Photo Printing" - PRODUCTIVITY_TOOLS = "Productivity Tools" - PROFESSIONAL_SERVICES = "Professional Services" - REAL_ESTATE = "Real Estate" - SALES_TOOLS = "Sales Tools" - SEARCH_ENGINE = "Search Engine" - SHIPPING = "Shipping" - SOFTWARE = "Software" - TRAVEL = "Travel" - WEB_HOSTING = "Web Hosting" - - -class Name(_FileEventFilterStringField): - """V2 filter class that filters events based on source name.""" - - _term = "source.name" - - -class TabTitles(_FileEventFilterStringField): - """V2 filter class that filters events based on source tab titles (for 'browser or other app' events).""" - - _term = "source.tabs.title" - - -class TabUrls(_FileEventFilterStringField): - """V2 filter class that filters events based on source tab URLs (for 'browser or other app' events).""" - - _term = "source.tabs.url" - - -class TabTitleErrors(_FileEventFilterStringField): - """V2 filter class that filters events based on source tab title errors (for 'browser or other app' events).""" - - _term = "source.tabs.titleError" - - -class TabUrlErrors(_FileEventFilterStringField): - """V2 filter class that filters events based on source tab URL Errors (for 'browser or other app' events).""" - - _term = "source.tabs.urlError" - - -class OperatingSystem(_FileEventFilterStringField): - """V2 filter class that filters events by the operating system of the source device.""" - - _term = "source.operatingSystem" - - -class PrivateIpAddress(_FileEventFilterStringField): - """V2 filter class that filters events by private (LAN) IP address of the source device.""" - - _term = "source.privateIp" - - -class IpAddress(_FileEventFilterStringField): - """V2 filter class that filters events by public (WAN) IP address of the source device.""" - - _term = "source.ip" - - -class Domain(_FileEventFilterStringField): - """V2 filter class that filters events by the domain of the source device.""" - - _term = "source.domain" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/timestamp.py b/src/py42/sdk/queries/fileevents/v2/filters/timestamp.py deleted file mode 100644 index c5d963eb5..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/timestamp.py +++ /dev/null @@ -1,37 +0,0 @@ -from py42.choices import Choices as _Choices -from py42.sdk.queries.fileevents.util import ( - FileEventFilterTimestampField as _FileEventFilterTimestampField, -) - - -class Timestamp(_FileEventFilterTimestampField, _Choices): - """V2 filter class that filters events based on the timestamp of the event that occurred. - - Available event timestamp constants are provided as class attributes, These - constants should be used only with class method `within_the_last`: - - - :attr:`timestamp.Timestamp.FIFTEEN_MINUTES` - - :attr:`timestamp.Timestamp.ONE_HOUR` - - :attr:`timestamp.Timestamp.THREE_HOURS` - - :attr:`timestamp.Timestamp.TWELVE_HOURS` - - :attr:`timestamp.Timestamp.ONE_DAY` - - :attr:`timestamp.Timestamp.THREE_DAYS` - - :attr:`timestamp.Timestamp.SEVEN_DAYS` - - :attr:`timestamp.Timestamp.FOURTEEN_DAYS` - - :attr:`timestamp.Timestamp.THIRTY_DAYS` - - Example:: - filter = timestamp.Timestamp.within_the_last(EventTimestamp.SEVEN_DAYS) - """ - - _term = "@timestamp" - - FIFTEEN_MINUTES = "PT15M" - ONE_HOUR = "PT1H" - THREE_HOURS = "PT3H" - TWELVE_HOURS = "PT12H" - ONE_DAY = "P1D" - THREE_DAYS = "P3D" - SEVEN_DAYS = "P7D" - FOURTEEN_DAYS = "P14D" - THIRTY_DAYS = "P30D" diff --git a/src/py42/sdk/queries/fileevents/v2/filters/user.py b/src/py42/sdk/queries/fileevents/v2/filters/user.py deleted file mode 100644 index e0aecb064..000000000 --- a/src/py42/sdk/queries/fileevents/v2/filters/user.py +++ /dev/null @@ -1,21 +0,0 @@ -from py42.sdk.queries.fileevents.util import ( - FileEventFilterStringField as _FileEventFilterStringField, -) - - -class Email(_FileEventFilterStringField): - """V2 filter class that filters events by the Code42 user email of the actor.""" - - _term = "user.email" - - -class Id(_FileEventFilterStringField): - """V2 filter class that filters events by the Code42 user ID of the actor.""" - - _term = "user.id" - - -class DeviceUid(_FileEventFilterStringField): - """V2 filter class that filters events by the device UID of the actor.""" - - _term = "user.deviceUid" diff --git a/src/py42/sdk/queries/query_filter.py b/src/py42/sdk/queries/query_filter.py deleted file mode 100644 index 07db5c0d5..000000000 --- a/src/py42/sdk/queries/query_filter.py +++ /dev/null @@ -1,532 +0,0 @@ -from datetime import datetime - -from py42.util import convert_datetime_to_epoch -from py42.util import convert_datetime_to_timestamp_str -from py42.util import DATE_STR_FORMAT -from py42.util import parse_timestamp_to_milliseconds_precision - - -def create_query_filter(term, operator, value=None): - """Creates a :class:`~py42.sdk.queries.query_filter.QueryFilter` object. Useful for - programmatically crafting query filters, such as filters not yet defined in py42. - - Args: - term (str): The term of the filter, such as ``actor`` or ``sharedWith``. - operator (str): The operator between ``term`` and ``value``, such as ``IS`` or `IS_NOT`. - value (str): The value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.QueryFilter` - """ - - return QueryFilter(term, operator, value) - - -def create_filter_group(query_filter_list, filter_clause): - """Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` object. Useful for - programmatically crafting query filters, such as filters not yet defined in py42. - Alternatively, if you want to create custom filter groups with already defined - operators (such as `IS` or `IS_IN`), see the other methods in this module, such as - :meth:`~py42.sdk.queries.query_filter.create_eq_filter_group()`. - - Args: - query_filter_list (list): a list of :class:`~py42.sdk.queries.query_filter.QueryFilter` - objects. - filter_clause (str): The clause joining the filters, such as ``AND`` or ``OR``. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - return FilterGroup(query_filter_list, filter_clause) - - -def create_eq_filter_group(term, value): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` equals the given value. Useful for creating ``IS`` - filters that are not yet supported in py42 or programmatically crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``actor`` or ``sharedWith``. - value (str): The value used to match on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "IS", value)] - return create_filter_group(filter_list, "AND") - - -def create_not_eq_filter_group(term, value): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` does not equal the given value. Useful for creating - ``IS_NOT`` filters that are not yet supported in py42 or programmatically crafting filter - groups. - - Args: - term: (str): The term of the filter, such as ``actor`` or ``sharedWith``. - value (str): The value used to exclude on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "IS_NOT", value)] - return create_filter_group(filter_list, "AND") - - -def create_is_in_filter_group(term, value_list): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` is one of several values. Useful for creating ``IS_IN`` - filters that are not yet supported in py42 or programmatically crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``actor`` or ``sharedWith``. - value_list (list): The list of values to match on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "IS", value) for value in value_list] - return create_filter_group(filter_list, "OR" if len(filter_list) > 1 else "AND") - - -def create_not_in_filter_group(term, value_list): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` is not one of several values. Useful for creating - ``NOT_IN`` filters that are not yet supported in py42 or programmatically crafting - filter groups. - - Args: - term: (str): The term of the filter, such as ``actor`` or ``sharedWith``. - value_list (list): The list of values to exclude on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "IS_NOT", value) for value in value_list] - return create_filter_group(filter_list, "AND") - - -def create_on_or_after_filter_group(term, value): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` is on or after the given value. Examples include - values describing dates. Useful for creating ``ON_OR_AFTER`` filters that are not yet - supported in py42 or programmatically crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``eventTimestamp``. - value (str or int): The value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "ON_OR_AFTER", value)] - return create_filter_group(filter_list, "AND") - - -def create_on_or_before_filter_group(term, value): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` is on or before the given value. Examples include - values describing dates. Useful for creating ``ON_OR_BEFORE`` filters that are not - yet supported in py42 or programmatically crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``eventTimestamp``. - value (str or int): The value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [create_query_filter(term, "ON_OR_BEFORE", value)] - return create_filter_group(filter_list, "AND") - - -def create_in_range_filter_group(term, start_value, end_value): - """ "Creates a :class:`~py42.sdk.queries.query_filter.FilterGroup` for filtering results - where the value with key ``term`` is in the given range. Examples include values describing - dates. Useful for creating a combination of ``ON_OR_AFTER`` and ``ON_OR_BEFORE`` filters - that are not yet supported in py42 or programmatically crafting filter groups. - - Args: - term: (str): The term of the filter, such as ``eventTimestamp``. - start_value (str or int): The start value used to filter results. - end_value (str or int): The end value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - - filter_list = [ - create_query_filter(term, "ON_OR_AFTER", start_value), - create_query_filter(term, "ON_OR_BEFORE", end_value), - ] - return create_filter_group(filter_list, "AND") - - -def create_within_the_last_filter_group(term, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the key ``term`` is an ``EventTimestamp._term`` - and the value is one of the `EventTimestamp` attributes as `value`. - - Args: - value (str): `EventTimestamp` attribute. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - filter_list = [create_query_filter(term, "WITHIN_THE_LAST", value)] - return create_filter_group(filter_list, "AND") - - -class QueryFilterStringField: - """Helper class for creating filters where the search value is a string.""" - - _term = "override_string_field_name" - - @classmethod - def eq(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` equals the provided - ``value``. - - Args: - value (str): The value to match on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_eq_filter_group(cls._term, value) - - @classmethod - def not_eq(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` does not equal the provided ``value``. - - Args: - value (str): The value to exclude on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_not_eq_filter_group(cls._term, value) - - @classmethod - def is_in(cls, value_list): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with the key ``self._term`` is in the provided - ``value_list``. - - Args: - value_list (list): The list of values to match on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_is_in_filter_group(cls._term, value_list) - - @classmethod - def not_in(cls, value_list): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with the key ``self._term`` is not in the provided - ``value_list``. - - Args: - value_list (list): The list of values to exclude on. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_not_in_filter_group(cls._term, value_list) - - -class QueryFilterTimestampField: - """Helper class for creating filters where the search value is a timestamp.""" - - _term = "override_timestamp_field_name" - - @staticmethod - def _parse_timestamp(value): - return parse_timestamp_to_milliseconds_precision(value) - - @staticmethod - def _convert_datetime_to_timestamp(value): - return convert_datetime_to_timestamp_str(value) - - @classmethod - def on_or_after(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term` is on or after the - provided ``value``. - - Args: - value (str or int or float or datetime): The value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - formatted_timestamp = cls._parse_timestamp(value) - return create_on_or_after_filter_group(cls._term, formatted_timestamp) - - @classmethod - def on_or_before(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` is on or before the - provided ``value``. - - Args: - value (str or int or float or datetime): The value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - formatted_timestamp = cls._parse_timestamp(value) - return create_on_or_before_filter_group(cls._term, formatted_timestamp) - - @classmethod - def in_range(cls, start_value, end_value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` is in range between - the provided ``start_value`` and ``end_value``. - - Args: - start_value (str or int or float or datetime): The start value used to - filter results. - end_value (str or int or float or datetime): The end value used to - filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - formatted_start_time = cls._parse_timestamp(start_value) - formatted_end_time = cls._parse_timestamp(end_value) - return create_in_range_filter_group( - cls._term, formatted_start_time, formatted_end_time - ) - - @classmethod - def on_same_day(cls, value): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` is within the same - calendar day as the provided ``value``. - - Args: - value (str or int or float or datetime): The value used to filter results. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - if isinstance(value, str): - value = convert_datetime_to_epoch(datetime.strptime(value, DATE_STR_FORMAT)) - elif isinstance(value, datetime): - value = convert_datetime_to_epoch(value) - date_from_value = datetime.utcfromtimestamp(value) - start_time = datetime( - date_from_value.year, date_from_value.month, date_from_value.day, 0, 0, 0 - ) - end_time = datetime( - date_from_value.year, date_from_value.month, date_from_value.day, 23, 59, 59 - ) - formatted_start_time = cls._convert_datetime_to_timestamp(start_time) - formatted_end_time = cls._convert_datetime_to_timestamp(end_time) - return create_in_range_filter_group( - cls._term, formatted_start_time, formatted_end_time - ) - - -class QueryFilterBooleanField: - """Helper class for creating filters where the search value is a boolean.""" - - _term = "override_boolean_field_name" - - @classmethod - def is_true(cls): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` is True. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_eq_filter_group(cls._term, "TRUE") - - @classmethod - def is_false(cls): - """Returns a :class:`~py42.sdk.queries.query_filter.FilterGroup` that is useful - for finding results where the value with key ``self._term`` is False. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - return create_eq_filter_group(cls._term, "FALSE") - - -class QueryFilter: - """Class for constructing a single filter object for use in a search query. - - When :func:`str()` is called on a :class:`~py42.sdk.queries.query_filter.QueryFilter` - instance, the (``term``, ``operator``, ``value``) attribute combination is transformed - into a JSON string to be used as part of a Forensic Search or Alert query. - - When :func:`dict()` is called on a :class:`~py42.sdk.queries.query_filter.QueryFilter` - instance, the (``term``, ``operator``, ``value``) attribute combination is transformed - into the Python `dict` equivalent of their JSON representation. This can be useful - for programmatically manipulating a :class:`~py42.sdk.queries.query_filter.QueryFilter` - after it's been created. - """ - - _term = None - - def __init__(self, term, operator, value=None): - self._term = term - self._operator = operator - self._value = value - - @classmethod - def from_dict(cls, _dict): - """Creates an instance of :class:`~py42.sdk.queries.query_filter.QueryFilter` from - the values found in ``_dict``. ``_dict`` must contain keys ``term``, ``operator``, - and ``value``. - - Args: - _dict (dict): A dictionary containing keys ``term``, ``operator``, and ``value``. - - Returns: - :class:`~py42.sdk.queries.query_filter.QueryFilter` - """ - - return cls(_dict["term"], _dict["operator"], value=_dict.get("value")) - - @property - def term(self): - """The term of the filter, such as ``actor`` or ``sharedWith``.""" - - return self._term - - @property - def operator(self): - """The operator between ``term`` and ``value``, such as ``IS`` or `IS_NOT`.""" - - return self._operator - - @property - def value(self): - """The value used to filter results.""" - - return self._value - - def __str__(self): - value = "null" if self._value is None else f'"{self._value}"' - return ( - f'{{"operator":"{self._operator}", "term":"{self._term}", "value":{value}}}' - ) - - def __iter__(self): - output_dict = { - "operator": self._operator, - "term": self._term, - "value": self._value, - } - for key in output_dict: - yield key, output_dict[key] - - def __eq__(self, other): - if isinstance(other, (QueryFilter, tuple, list)): - return tuple(self) == tuple(other) - elif isinstance(other, str): - return str(self) == other - else: - return False - - def __hash__(self): - return hash(str(self)) - - -class FilterGroup: - """Class for constructing a logical sub-group of related filters from a list of - :class:`~py42.sdk.queries.query_filter.QueryFilter` objects. Takes a list of - :class:`~py42.sdk.queries.query_filter.QueryFilter` objects and combines them - logically using the passed in filter clause (``AND`` or ``OR``). - - When :func:`str()` is called on a :class:`FilterGroup` instance, the combined filter items are - transformed into a JSON string to be used as part of a Forensic Search or Alert query. - - When :func:`dict()` is called on a :class:`~py42.sdk.queries.query_filter.FilterGroup` - instance, the combined filter items are transformed into the Python `dict` equivalent - of their JSON representation. This can be useful for programmatically manipulating a - :class:`~py42.sdk.queries.query_filter.FilterGroup` after it's been created. - """ - - def __init__(self, filter_list, filter_clause="AND"): - self._filter_list = filter_list - self._filter_clause = filter_clause - - @classmethod - def from_dict(cls, _dict): - """Creates an instance of :class:`~py42.sdk.queries.query_filter.FilterGroup` - from the values found in ``_dict``. ``_dict`` must contain keys ``filters`` and - ``filterClause``. - - Args: - _dict (dict): A dictionary containing keys ``term``, ``operator``, and ``value``. - - Returns: - :class:`~py42.sdk.queries.query_filter.FilterGroup` - """ - filter_list = [QueryFilter.from_dict(item) for item in _dict["filters"]] - return cls(filter_list, filter_clause=_dict["filterClause"]) - - @property - def filter_list(self): - """The list of :class:`~py42.sdk.queries.query_filter.QueryFilter` objects in this - group.""" - - return self._filter_list - - @property - def filter_clause(self): - """The clause joining the filters, such as ``AND`` or ``OR``.""" - - return self._filter_clause - - @filter_clause.setter - def filter_clause(self, value): - """The clause joining the filters, such as ``AND`` or ``OR``.""" - - self._filter_clause = value - - @property - def _filter_set(self): - return sorted(list(set(self.filter_list)), key=str) - - def __str__(self): - filters_string = ",".join(str(filter_item) for filter_item in self._filter_set) - return ( - f'{{"filterClause":"{self._filter_clause}", "filters":[{filters_string}]}}' - ) - - def __iter__(self): - filter_list = [dict(item) for item in self._filter_set] - output_dict = {"filterClause": self._filter_clause, "filters": filter_list} - for key in output_dict: - yield key, output_dict[key] - - def __eq__(self, other): - if isinstance(other, FilterGroup): - return ( - self.filter_clause == other.filter_clause - and self._filter_set == other._filter_set - ) - elif isinstance(other, (tuple, list)): - return tuple(self) == tuple(other) - elif isinstance(other, str): - return str(self) == other - else: - return False - - def __contains__(self, item): - return item in self._filter_set diff --git a/src/py42/services/alertrules.py b/src/py42/services/alertrules.py deleted file mode 100644 index 0af3944f9..000000000 --- a/src/py42/services/alertrules.py +++ /dev/null @@ -1,130 +0,0 @@ -from py42.exceptions import Py42InvalidRuleError -from py42.exceptions import Py42NotFoundError -from py42.services import BaseService - - -class AlertRulesService(BaseService): - """A service to manage Alert Rules.""" - - _api_prefix = "/svc/api/v1/Rules/" - - def __init__(self, connection, user_context, user_profile_service): - super().__init__(connection) - self._user_context = user_context - self._user_profile_service = user_profile_service - self._exfiltration = None - self._cloud_share = None - self._file_type_mismatch = None - - @property - def exfiltration(self): - if not self._exfiltration: - tenant_id = self._user_context.get_current_tenant_id() - self._exfiltration = ExfiltrationService(self._connection, tenant_id) - return self._exfiltration - - @property - def cloudshare(self): - if not self._cloud_share: - tenant_id = self._user_context.get_current_tenant_id() - self._cloud_share = CloudShareService(self._connection, tenant_id) - return self._cloud_share - - @property - def filetypemismatch(self): - if not self._file_type_mismatch: - tenant_id = self._user_context.get_current_tenant_id() - self._file_type_mismatch = FileTypeMismatchService( - self._connection, tenant_id - ) - return self._file_type_mismatch - - def add_user(self, rule_id, user_id): - tenant_id = self._user_context.get_current_tenant_id() - user_details = self._user_profile_service.get_by_id(user_id) - user_aliases = user_details.data.get("cloudAliases") or [] - data = { - "tenantId": tenant_id, - "ruleId": rule_id, - "userList": [ - {"userIdFromAuthority": user_id, "userAliasList": user_aliases} - ], - } - uri = f"{self._api_prefix}{'add-users'}" - try: - return self._connection.post(uri, json=data) - except Py42NotFoundError as err: - raise Py42InvalidRuleError(err, rule_id) - - def remove_user(self, rule_id, user_id): - user_ids = [user_id] - tenant_id = self._user_context.get_current_tenant_id() - data = {"tenantId": tenant_id, "ruleId": rule_id, "userIdList": user_ids} - uri = f"{self._api_prefix}{'remove-users'}" - return self._connection.post(uri, json=data) - - def remove_all_users(self, rule_id): - tenant_id = self._user_context.get_current_tenant_id() - data = {"tenantId": tenant_id, "ruleId": rule_id} - uri = f"{self._api_prefix}{'remove-all-users'}" - return self._connection.post(uri, json=data) - - -class CloudShareService(BaseService): - _endpoint = "/svc/api/v1/Rules/query-cloud-share-permissions-rule" - - def __init__(self, connection, tenant_id): - super().__init__(connection) - self._tenant_id = tenant_id - - def get(self, rule_id): - """Fetch cloud share alert rule by rule id. - - Args: - rule_id (str): Observer rule Id of a rule to be fetched. - - Returns - :class:`py42.response.Py42Response` - """ - data = {"tenantId": self._tenant_id, "ruleIds": [rule_id]} - return self._connection.post(self._endpoint, json=data) - - -class ExfiltrationService(BaseService): - _endpoint = "/svc/api/v1/Rules/query-endpoint-exfiltration-rule" - - def __init__(self, connection, tenant_id): - super().__init__(connection) - self._tenant_id = tenant_id - - def get(self, rule_id): - """Fetch exfiltration alert rule by rule id. - - Args: - rule_id (str): Observer rule Id of a rule to be fetched. - - Returns - :class:`py42.response.Py42Response` - """ - data = {"tenantId": self._tenant_id, "ruleIds": [rule_id]} - return self._connection.post(self._endpoint, json=data) - - -class FileTypeMismatchService(BaseService): - _endpoint = "/svc/api/v1/Rules/query-file-type-mismatch-rule" - - def __init__(self, connection, tenant_id): - super().__init__(connection) - self._tenant_id = tenant_id - - def get(self, rule_id): - """Fetch File type mismatch alert rules by rule id. - - Args: - rule_id (str): Observer rule Id of a rule to be fetched. - - Returns - :class:`py42.response.Py42Response` - """ - data = {"tenantId": self._tenant_id, "ruleIds": [rule_id]} - return self._connection.post(self._endpoint, json=data) diff --git a/src/py42/services/alerts.py b/src/py42/services/alerts.py deleted file mode 100644 index d8fc0540b..000000000 --- a/src/py42/services/alerts.py +++ /dev/null @@ -1,152 +0,0 @@ -import json - -from py42 import settings -from py42.sdk.queries.query_filter import create_eq_filter_group -from py42.services import BaseService -from py42.services.util import get_all_pages - - -class AlertService(BaseService): - _uri_prefix = "/svc/api" - - _CREATED_AT = "CreatedAt" - _RULE_METADATA = "ruleMetadata" - _SEARCH_KEY = "alerts" - - def __init__(self, connection, user_context): - super().__init__(connection) - self._user_context = user_context - - def search(self, query, page_num=1, page_size=None): - query.page_number = page_num - 1 - if page_size: - query.page_size = page_size - query = self._add_tenant_id_if_missing(query) - uri = f"{self._uri_prefix}/v1/query-alerts" - return self._connection.post(uri, json=query) - - def get_search_page(self, query, page_num, page_size): - query.page_number = page_num - 1 - query.page_size = page_size - uri = f"{self._uri_prefix}/v1/query-alerts" - query = self._add_tenant_id_if_missing(query) - return self._connection.post(uri, json=query) - - def search_all_pages(self, query): - return get_all_pages( - self.get_search_page, - self._SEARCH_KEY, - query=query, - page_size=query.page_size, - ) - - def get_details(self, alert_ids): - if not isinstance(alert_ids, (list, tuple)): - alert_ids = [alert_ids] - tenant_id = self._user_context.get_current_tenant_id() - uri = f"{self._uri_prefix}/v1/query-details" - data = {"tenantId": tenant_id, "alertIds": alert_ids} - results = self._connection.post(uri, json=data) - return _convert_observation_json_strings_to_objects(results) - - def update_state(self, state, alert_ids, note=None): - if not isinstance(alert_ids, (list, tuple)): - alert_ids = [alert_ids] - tenant_id = self._user_context.get_current_tenant_id() - uri = f"{self._uri_prefix}/v1/update-state" - data = { - "tenantId": tenant_id, - "alertIds": alert_ids, - "note": note, - "state": state, - } - return self._connection.post(uri, json=data) - - def _add_tenant_id_if_missing(self, query): - query_dict = dict(query) - tenant_id = query_dict.get("tenantId", None) - if tenant_id is None: - query_dict["tenantId"] = self._user_context.get_current_tenant_id() - return query_dict - else: - return query_dict - - def get_rules_page( - self, page_num, groups=None, sort_key=None, sort_direction=None, page_size=None - ): - # This API expects the first page to start with zero. - page_num = page_num - 1 - page_size = page_size or settings.items_per_page - data = { - "tenantId": self._user_context.get_current_tenant_id(), - "groups": groups or [], - "groupClause": "AND", - "pgNum": page_num, - "pgSize": page_size, - "srtKey": sort_key, - "srtDirection": sort_direction, - } - uri = f"{self._uri_prefix}/v1/rules/query-rule-metadata" - return self._connection.post(uri, json=data) - - def get_all_rules(self, sort_key=_CREATED_AT, sort_direction="DESC"): - return get_all_pages( - self.get_rules_page, - self._RULE_METADATA, - groups=None, - sort_key=sort_key, - sort_direction=sort_direction, - ) - - def get_all_rules_by_name( - self, rule_name, sort_key=_CREATED_AT, sort_direction="DESC" - ): - return get_all_pages( - self.get_rules_page, - self._RULE_METADATA, - groups=[json.loads(str(create_eq_filter_group("Name", rule_name)))], - sort_key=sort_key, - sort_direction=sort_direction, - ) - - def get_rule_by_observer_id( - self, observer_id, sort_key=_CREATED_AT, sort_direction="DESC" - ): - results = get_all_pages( - self.get_rules_page, - self._RULE_METADATA, - groups=[ - json.loads(str(create_eq_filter_group("ObserverRuleId", observer_id))) - ], - sort_key=sort_key, - sort_direction=sort_direction, - ) - return next(results) - - def update_note(self, alert_id, note): - tenant_id = self._user_context.get_current_tenant_id() - uri = f"{self._uri_prefix}/v1/add-note" - data = { - "tenantId": tenant_id, - "alertId": alert_id, - "note": note, - } - return self._connection.post(uri, json=data) - - def get_aggregate_data(self, alert_id): - uri = f"{self._uri_prefix}/v2/query-details-aggregate" - data = {"alertId": alert_id} - response = self._connection.post(uri, json=data) - response.data["alert"]["ffsUrl"] = response.data["alert"].get("ffsUrlEndpoint") - return response - - -def _convert_observation_json_strings_to_objects(results): - for alert in results["alerts"]: - if "observations" in alert: - for observation in alert["observations"]: - try: - observation["data"] = json.loads(observation["data"]) - except Exception: - continue - return results diff --git a/src/py42/services/cases.py b/src/py42/services/cases.py deleted file mode 100644 index bf7ae0922..000000000 --- a/src/py42/services/cases.py +++ /dev/null @@ -1,140 +0,0 @@ -from py42 import settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42CaseNameExistsError -from py42.exceptions import Py42DescriptionLimitExceededError -from py42.exceptions import Py42InvalidCaseUserError -from py42.exceptions import Py42UpdateClosedCaseError -from py42.services import BaseService -from py42.services.util import get_all_pages - - -class CasesService(BaseService): - - _uri_prefix = "/api/v1/case" - - def __init__(self, connection): - super().__init__(connection) - - def create( - self, name, subject=None, assignee=None, description=None, findings=None - ): - data = { - "assignee": assignee, - "description": description, - "findings": findings, - "name": name, - "subject": subject, - } - try: - return self._connection.post(self._uri_prefix, json=data) - except Py42BadRequestError as err: - _handle_common_invalid_case_parameters_errors(err, name) - - def get_page( - self, - page_num, - name=None, - status=None, - created_at=None, - updated_at=None, - subject=None, - assignee=None, - page_size=None, - sort_direction="asc", - sort_key="number", - **kwargs, - ): - - page_size = page_size or settings.items_per_page - params = { - "name": name, - "subject": subject, - "assignee": assignee, - "createdAt": created_at, - "updatedAt": updated_at, - "status": status, - "pgNum": page_num, - "pgSize": page_size, - "srtDir": sort_direction, - "srtKey": sort_key, - } - params.update(**kwargs) - - return self._connection.get(self._uri_prefix, params=params) - - def get_all( - self, - name=None, - status=None, - created_at=None, - updated_at=None, - subject=None, - assignee=None, - page_size=None, - sort_direction="asc", - sort_key="number", - **kwargs, - ): - return get_all_pages( - self.get_page, - "cases", - name=name, - status=status, - created_at=created_at, - updated_at=updated_at, - assignee=assignee, - subject=subject, - page_size=page_size, - sort_direction=sort_direction, - sort_key=sort_key, - **kwargs, - ) - - def get(self, case_number): - return self._connection.get(f"{self._uri_prefix}/{case_number}") - - def export_summary(self, case_number): - uri_prefix = f"{self._uri_prefix}/{case_number}/export" - return self._connection.get(uri_prefix) - - def update( - self, - case_number, - name=None, - subject=None, - assignee=None, - description=None, - findings=None, - status=None, - ): - current_case_data = self.get(case_number).data - - data = { - "assignee": assignee or current_case_data.get("assignee"), - "description": description or current_case_data.get("description"), - "findings": findings or current_case_data.get("findings"), - "name": name or current_case_data.get("name"), - "subject": subject or current_case_data.get("subject"), - "status": status or current_case_data.get("status"), - } - try: - return self._connection.put(f"{self._uri_prefix}/{case_number}", json=data) - except Py42BadRequestError as err: - if "NO_EDITS_ONCE_CLOSED" in err.response.text: - raise Py42UpdateClosedCaseError(err) - _handle_common_invalid_case_parameters_errors(err, name) - - -def _handle_common_invalid_case_parameters_errors(base_err, name): - if "NAME_EXISTS" in base_err.response.text: - raise Py42CaseNameExistsError(base_err, name) - elif "NO_EDITS_ONCE_CLOSED" in base_err.response.text: - raise Py42UpdateClosedCaseError(base_err) - elif "DESCRIPTION_TOO_LONG" in base_err.response.text: - raise Py42DescriptionLimitExceededError(base_err) - elif "INVALID_USER" in base_err.response.text: - if "subject" in base_err.response.text: - raise Py42InvalidCaseUserError(base_err, "subject") - elif "assignee" in base_err.response.text: - raise Py42InvalidCaseUserError(base_err, "assignee") - raise diff --git a/src/py42/services/casesfileevents.py b/src/py42/services/casesfileevents.py deleted file mode 100644 index 96bd6fa92..000000000 --- a/src/py42/services/casesfileevents.py +++ /dev/null @@ -1,78 +0,0 @@ -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42CaseAlreadyHasEventError -from py42.exceptions import Py42UpdateClosedCaseError -from py42.services import BaseService - - -class CasesFileEventsService(BaseService): - - _uri_prefix = "/api/v1/case/{0}/fileevent" - - def __init__(self, connection): - super().__init__(connection) - - def add(self, case_number, event_id): - """Adds an event to the case. - - Args: - case_number (int): Case number of the case. - event_id (str): Event id to add to the case. - - Returns: - :class:`py42.response.Py42Response` - """ - try: - return self._connection.post( - f"{self._uri_prefix.format(case_number)}/{event_id}" - ) - except Py42BadRequestError as err: - if "CASE_IS_CLOSED" in err.response.text: - raise Py42UpdateClosedCaseError(err) - elif "CASE_ALREADY_HAS_EVENT" in err.response.text: - raise Py42CaseAlreadyHasEventError(err) - else: - raise - - def get(self, case_number, event_id): - """Gets information of a specified event from the case. - - Args: - case_number (int): Case number of the case. - event_id (str): Event id to fetch from the case. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._connection.get( - f"{self._uri_prefix.format(case_number)}/{event_id}" - ) - - def get_all(self, case_number): - """Gets all events associated with the given case. - - Args: - case_number (int): Case number of the case. - - Returns: - :class:`py42.response.Py42Response` - """ - return self._connection.get(self._uri_prefix.format(case_number)) - - def delete(self, case_number, event_id): - """Deletes an event from the case. - - Args: - case_number (int): Case number of the case. - event_id (str): Event id to remove from case. - - Returns: - :class:`py42.response.Py42Response` - """ - try: - return self._connection.delete( - f"{self._uri_prefix.format(case_number)}/{event_id}" - ) - except Py42BadRequestError as err: - if "CASE_IS_CLOSED" in err.response.text: - raise Py42UpdateClosedCaseError(err) - raise diff --git a/src/py42/services/fileevent.py b/src/py42/services/fileevent.py deleted file mode 100644 index 98ddaedd1..000000000 --- a/src/py42/services/fileevent.py +++ /dev/null @@ -1,127 +0,0 @@ -import json -from warnings import warn - -from requests.adapters import HTTPAdapter -from urllib3 import Retry - -import py42.settings.debug as debug -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42InvalidPageTokenError -from py42.services import BaseService - - -class FFSQueryRetryStrategy(Retry): - """The forensic search service helpfully responds with a 'retry-after' header, telling us how long until the rate - limiter is reset. We subclass :class:`urllib3.Retry` just to add a bit of logging so the user can tell why the - request might look like it's hanging. - """ - - def get_retry_after(self, response): - retry_after = super().get_retry_after(response) - if retry_after is not None: - debug.logger.info( - f"Forensic search rate limit hit, retrying after: {int(retry_after)} seconds." - ) - return retry_after - - def get_backoff_time(self): - backoff_time = super().get_backoff_time() - debug.logger.info( - f"Forensic search rate limit hit, retrying after: {backoff_time} seconds." - ) - return backoff_time - - -class FileEventService(BaseService): - """A service for searching file events. - - See the :ref:`Executing Searches User Guide ` to learn more about how - to construct a query. - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._retry_adapter_mounted = False - - def search(self, query): - """Searches for file events matching the query criteria. - `REST Documentation `__ - - The existing data model for file events is deprecated. - To use the updated data model for file events, `update your settings `__. - - Args: - query (:class:`~py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery` or str or unicode): - A composed :class:`~py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery` - object or the raw query as a JSON formatted string. - - Returns: - :class:`py42.response.Py42Response`: A response containing the query results. - """ - self._mount_retry_adapter() - - # if string query - if isinstance(query, str): - query = json.loads(query) - # v2 fields are accessible via dot notation (exception of "@timestamp") - version = "v2" if "." in query["srtKey"] or "@" in query["srtKey"] else "v1" - uri = f"/forensic-search/queryservice/api/{version}/fileevent" - # else query object - else: - # deprecation warning for v1 file events - if query.version == "v1": - warn( - "V1 file events are deprecated. Use V2 queries instead.", - DeprecationWarning, - stacklevel=2, - ) - - uri = f"/forensic-search/queryservice/api/{query.version}/fileevent" - query = dict(query) - - try: - return self._connection.post(uri, json=query) - except Py42BadRequestError as err: - if "INVALID_PAGE_TOKEN" in str(err.response.text): - page_token = query.get("pgToken") - if page_token: - raise Py42InvalidPageTokenError(err, page_token) - raise - - def get_file_location_detail_by_sha256(self, checksum): - """Get file location details based on SHA256 hash. - - Args: - checksum (str): SHA256 checksum of a file. - - Returns: - :class:`py42.response.Py42Response`: A response containing file details. - """ - self._mount_retry_adapter() - - uri = "/forensic-search/queryservice/api/v1/filelocations" - return self._connection.get(uri, params={"sha256": checksum}) - - def _mount_retry_adapter(self): - """Sets custom Retry strategy for FFS url requests to gracefully handle being rate-limited on FFS queries.""" - if not self._retry_adapter_mounted: - retry_strategy = FFSQueryRetryStrategy( - status=3, # retry up to 3 times - backoff_factor=5, # if `retry-after` header isn't present, use 5 second exponential backoff - allowed_methods=[ - "POST" - ], # POST isn't a default allowed method due to it usually modifying resources - status_forcelist=[ - 429 - ], # this only handles 429 errors, it won't retry on 5xx - ) - file_event_adapter = HTTPAdapter( - pool_connections=200, - pool_maxsize=4, - pool_block=True, - max_retries=retry_strategy, - ) - self._connection._session.mount( - self._connection.host_address, file_event_adapter - ) - self._retry_adapter_mounted = True diff --git a/src/py42/services/preservationdata.py b/src/py42/services/preservationdata.py deleted file mode 100644 index 6b3e620fb..000000000 --- a/src/py42/services/preservationdata.py +++ /dev/null @@ -1,27 +0,0 @@ -from urllib.parse import quote - -from py42.services import BaseService - - -class PreservationDataService(BaseService): - def find_file_version(self, file_md5, file_sha256, paths): - """Fetch file version details. - - Args: - file_md5 (str): MD5 encoded hash of the file. - file_sha256 (str): SHA256 encoded hash of the file. - paths (str): File path with filename to fetch. - - Returns: - :class:`py42.response.Py42Response` - """ - - data = {"fileSHA256": file_sha256, "fileMD5": file_md5, "devicePaths": paths} - uri = "/api/v1/FindAvailableVersion" - return self._connection.post(uri, json=data) - - def get_file_version_list(self, device_id, file_md5, file_sha256, path): - params = "fileSHA256={}&fileMD5={}&deviceUid={}&filePath={}" - params = params.format(file_sha256, file_md5, device_id, quote(path)) - uri = f"/api/v2/file-version-listing?{params}" - return self._connection.get(uri) diff --git a/src/py42/services/savedsearch.py b/src/py42/services/savedsearch.py deleted file mode 100644 index 4e73d130d..000000000 --- a/src/py42/services/savedsearch.py +++ /dev/null @@ -1,134 +0,0 @@ -from warnings import warn - -from py42.sdk.queries.fileevents.file_event_query import ( - FileEventQuery as FileEventQueryV1, -) -from py42.sdk.queries.fileevents.v2.file_event_query import ( - FileEventQuery as FileEventQueryV2, -) -from py42.services import BaseService - - -class SavedSearchService(BaseService): - """A service to interact with saved search APIs.""" - - def __init__(self, connection, file_event_service): - super().__init__(connection) - self._file_event_service = file_event_service - self._uri = "" - self._version = "v1" - - @property - def uri(self): - # construct uri every call to see if settings changed - self._uri = f"/forensic-search/queryservice/api/{self._version}/saved" - return self._uri - - def get(self, use_v2=False): - """Fetch details of existing saved searches. - - The existing data model for file events and saved searches is deprecated. - To use the updated data model for file events, `update your settings `__. - Retrieving saved searches with V2 settings enabled will convert existing saved search queries to the V2 data model. Existing V1 queries that cannot be properly converted will be excluded from the response. - - Args: - use_v2 (bool): Flag to use v2 file events and saved searches. Defaults to False. - Returns: - :class:`py42.response.Py42Response` - """ - - # deprecation warning for v1 file events - if not use_v2: - warn( - "V1 file events and saved searches are deprecated. Use v2 apis by passing in the optional use_v2=True arg.", - DeprecationWarning, - stacklevel=2, - ) - - self._version = "v2" if use_v2 else "v1" - return self._connection.get(self.uri) - - def get_by_id(self, search_id, use_v2=False): - """Fetch the details of a saved search by its given search Id. - - The existing data model for file events and saved searches is deprecated. - To use the updated data model for file events, `update your settings `__. - Retrieving saved searches with V2 settings enabled will convert existing saved search queries to the V2 data model. Existing V1 queries that cannot be properly converted will be excluded from the response. - - Args: - search_id (str): Unique search Id of the saved search. - use_v2 (bool): Flag to use v2 file events and saved searches. Defaults to False. - Returns: - :class:`py42.response.Py42Response` - """ - - # deprecation warning for v1 file events - if not use_v2: - warn( - "V1 file events and saved searches are deprecated. Use v2 apis by passing in the optional use_v2=True arg.", - DeprecationWarning, - stacklevel=2, - ) - self._version = "v2" if use_v2 else "v1" - return self._connection.get(f"{self.uri}/{search_id}") - - def get_query(self, search_id, page_number=None, page_size=None, use_v2=False): - """Get the saved search in form of a query(`py42.sdk.queries.fileevents.file_event_query`). - - Args: - search_id (str): Unique search Id of the saved search. - page_number (int, optional): The consecutive group of results of size page_size in the result set to return. Defaults to None. - page_size (int, optional): The maximum number of results to be returned. Defaults to None. - use_v2 (bool): Flag to use v2 file events and saved searches. Defaults to False. - Returns: - :class:`py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery` - """ - - response = self.get_by_id(search_id, use_v2=use_v2) - search = response["searches"][0] - if use_v2: - return FileEventQueryV2.from_dict( - search, page_number=page_number, page_size=page_size - ) - return FileEventQueryV1.from_dict( - search, page_number=page_number, page_size=page_size - ) - - def execute(self, search_id, page_number=None, page_size=None, use_v2=False): - """ - Executes a saved search for given search Id, returns up to the first 10,000 events. - - Args: - search_id (str): Unique search Id of the saved search. - page_number (int, optional): The consecutive group of results of size page_size in the result set to return. Defaults to None. - page_size (int, optional): The maximum number of results to be returned. Defaults to None. - use_v2 (bool): Flag to use v2 file events and saved searches. Defaults to False. - Returns: - :class:`py42.response.Py42Response` - """ - query = self.get_query( - search_id, page_number=page_number, page_size=page_size, use_v2=use_v2 - ) - return self._file_event_service.search(query) - - def search_file_events( - self, search_id, page_number=None, page_size=None, use_v2=False - ): - """ - Alias method for :meth:`~execute()`. Executes a saved search for given search Id, returns up to the first 10,000 events. - - To view more than the first 10,000 events: - * pass the :data:`search_id` to :meth:`~get_query()` - * pass the resulting query (:class:`~py42.sdk.queries.fileevents.v2.file_event_query.FileEventQuery`) to :meth:`~py42.clients.securitydata.SecurityDataClient.search_all_file_events()`, use that method as normal. - - Args: - search_id (str): Unique search Id of the saved search. - page_number (int, optional): The consecutive group of results of size page_size in the result set to return. Defaults to None. - page_size (int, optional): The maximum number of results to be returned. Defaults to None. - use_v2 (bool): Flag to use v2 file events and saved searches. Defaults to False. - Returns: - :class:`py42.response.Py42Response` - """ - return self.execute( - search_id, page_number=page_number, page_size=page_size, use_v2=use_v2 - ) diff --git a/src/py42/services/storage/__init__.py b/src/py42/services/storage/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/py42/services/storage/exfiltrateddata.py b/src/py42/services/storage/exfiltrateddata.py deleted file mode 100644 index 31b8f8324..000000000 --- a/src/py42/services/storage/exfiltrateddata.py +++ /dev/null @@ -1,52 +0,0 @@ -from urllib.parse import quote - -from py42.services import BaseService - - -class ExfiltratedDataService(BaseService): - - _base_uri = "api/v1/" - - def __init__(self, main_session, streaming_session): - super().__init__(main_session) - self._streaming_session = streaming_session - - def get_download_token( - self, event_id, device_id, file_path, file_sha256, timestamp - ): - """Get EDS download token for a file. - - Args: - event_id (str): Id of the file event that references the file desired for download. - device_id (str): Id of the device on which the file desired for download is stored. - file_path (str): Path where the file desired for download resides on the device. - timestamp (int): Last updated timestamp of the file in milliseconds. - - Returns: - :class:`py42.response.Py42Response`: A response containing download token for the file. - """ - params = "deviceUid={}&eventId={}&filePath={}&fileSHA256={}&versionTimestamp={}" - params = params.format( - device_id, event_id, quote(file_path), file_sha256, timestamp - ) - resource = "file-download-token" - headers = {"Accept": "*/*"} - uri = f"{self._base_uri}{resource}?{params}" - return self._connection.get(uri, headers=headers) - - def get_file(self, token): - """Streams a file. - - Args: - token (str):EDS Download token. - - Returns: - Returns a stream of the file indicated by the input token. - """ - resource = "get-file" - uri = f"{self._connection.host_address}/{self._base_uri}{resource}" - params = {"token": token} - headers = {"Accept": "*/*"} - return self._streaming_session.get( - uri, params=params, headers=headers, stream=True - ) diff --git a/src/py42/services/storage/preservationdata.py b/src/py42/services/storage/preservationdata.py deleted file mode 100644 index b77687584..000000000 --- a/src/py42/services/storage/preservationdata.py +++ /dev/null @@ -1,51 +0,0 @@ -from py42.services import BaseService - - -class StoragePreservationDataService(BaseService): - - _base_uri = "api/v3/" - - def __init__(self, main_session, streaming_session): - super().__init__(main_session) - self._streaming_session = streaming_session - - def get_download_token(self, archive_guid, file_id, timestamp): - """Get PDS download token for a file. - - Args: - archive_guid (str): Archive guid of the file - file_id (str): Id of the file. - timestamp (int): Last updated timestamp of the file in milliseconds. - - Returns: - :class:`py42.response.Py42Response`: A response containing download token for the file. - """ - params = { - "archiveGuid": archive_guid, - "fileId": file_id, - "versionTimestamp": timestamp, - } - resource = "FileDownloadToken" - uri = f"{self._base_uri}{resource}" - return self._connection.get(uri, params=params) - - def get_file(self, token): - """Streams a file. - - Args: - token (str): PDS Download token. - - Returns: - Returns a stream of the requested token. - """ - resource = "GetFile" - uri = f"{self._connection.host_address}/{self._base_uri}{resource}" - if "PDSDownloadToken=" in token: - replaced_token = token.replace("PDSDownloadToken=", "") - else: - replaced_token = token - params = {"PDSDownloadToken": replaced_token} - headers = {"Accept": "*/*"} - return self._streaming_session.get( - uri, params=params, headers=headers, stream=True - ) diff --git a/src/py42/services/trustedactivities.py b/src/py42/services/trustedactivities.py deleted file mode 100644 index 6a3fe2379..000000000 --- a/src/py42/services/trustedactivities.py +++ /dev/null @@ -1,90 +0,0 @@ -from py42 import settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42ConflictError -from py42.exceptions import Py42DescriptionLimitExceededError -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42TrustedActivityConflictError -from py42.exceptions import Py42TrustedActivityIdNotFound -from py42.exceptions import Py42TrustedActivityInvalidCharacterError -from py42.services import BaseService -from py42.services.util import get_all_pages - - -class TrustedActivitiesService(BaseService): - - _uri_prefix = "/api/v1/trusted-activities" - - def __init__(self, connection): - super().__init__(connection) - - def get_all(self, type=None, page_size=None, **kwargs): - return get_all_pages( - self.get_page, "trustResources", type=type, page_size=page_size, **kwargs - ) - - def get_page(self, page_num, page_size, type, **kwargs): - page_size = page_size or settings.items_per_page - params = { - "type": type, - "pgNum": page_num, - "pgSize": page_size, - } - params.update(**kwargs) - - return self._connection.get(self._uri_prefix, params=params) - - def create(self, type, value, description=None): - data = { - "type": type, - "value": value, - "description": description, - } - try: - return self._connection.post(self._uri_prefix, json=data) - except Py42BadRequestError as err: - _handle_common_invalid_trust_parameters_errors(err) - except Py42ConflictError as err: - raise Py42TrustedActivityConflictError(err, value) - - def get(self, id): - uri = f"{self._uri_prefix}/{id}" - try: - return self._connection.get(uri) - except Py42NotFoundError as err: - raise Py42TrustedActivityIdNotFound(err, id) - - def update(self, id, value=None, description=None): - uri = f"{self._uri_prefix}/{id}" - current_activity_data = self.get(id).data - - if description is None: - description = current_activity_data.get("description") - - data = { - "type": current_activity_data.get("type"), - "value": value or current_activity_data.get("value"), - "description": description, - } - try: - return self._connection.put(uri, json=data) - except Py42BadRequestError as err: - _handle_common_invalid_trust_parameters_errors(err) - except Py42NotFoundError as err: - raise Py42TrustedActivityIdNotFound(err, id) - except Py42ConflictError as err: - raise Py42TrustedActivityConflictError(err, value) - - def delete(self, id): - uri = f"{self._uri_prefix}/{id}" - try: - return self._connection.delete(uri) - except Py42NotFoundError as err: - raise Py42TrustedActivityIdNotFound(err, id) - - -def _handle_common_invalid_trust_parameters_errors(base_err): - if "DESCRIPTION_TOO_LONG" in base_err.response.text: - raise Py42DescriptionLimitExceededError(base_err) - elif "INVALID_CHARACTERS_IN_VALUE" in base_err.response.text: - raise Py42TrustedActivityInvalidCharacterError(base_err) - raise diff --git a/src/py42/services/userriskprofile.py b/src/py42/services/userriskprofile.py deleted file mode 100644 index 84cb1c0e8..000000000 --- a/src/py42/services/userriskprofile.py +++ /dev/null @@ -1,191 +0,0 @@ -import datetime - -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42CloudAliasCharacterLimitExceededError -from py42.exceptions import Py42CloudAliasLimitExceededError -from py42.exceptions import Py42Error -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42UserRiskProfileNotFound -from py42.services import BaseService -from py42.services.util import get_all_pages - -_DATE_FORMAT = "%Y-%m-%d" - - -class UserRiskProfileService(BaseService): - - _uri_prefix = "/v1/user-risk-profiles" - - def get_by_id(self, user_id): - uri = f"{self._uri_prefix}/{user_id}" - try: - return self._connection.get(uri) - # catch not found error - except Py42NotFoundError as err: - raise Py42UserRiskProfileNotFound(err, user_id) - - def get_by_username(self, username): - user_id = None - generator = self.get_all() - - # get the first page of user profiles - for page in generator: - for user in page.data["userRiskProfiles"]: - if user["username"] == username: - user_id = user["userId"] - break - - try: - return self.get_by_id(user_id) - except Py42NotFoundError as err: - raise Py42UserRiskProfileNotFound(err, username, identifier="username") - - def update(self, user_id, start_date=None, end_date=None, notes=None): - # Build paths field - paths = [] - data = {} - if start_date is not None: - paths += ["startDate"] - if start_date == "": - data["startDate"] = None - else: - start_day, start_month, start_year = _parse_date_string(start_date) - data["startDate"] = { - "day": start_day, - "month": start_month, - "year": start_year, - } - if end_date is not None: - paths += ["endDate"] - if end_date == "": - data["endDate"] = None - else: - end_day, end_month, end_year = _parse_date_string(end_date) - data["endDate"] = {"day": end_day, "month": end_month, "year": end_year} - if notes is not None: - paths += ["notes"] - if notes == "": - data["notes"] = None - else: - data["notes"] = notes - if not paths: - raise Py42Error("No fields provided. No values will be updated.") - - params = {"paths": ", ".join(paths)} - uri = f"{self._uri_prefix}/{user_id}" - try: - return self._connection.patch(uri, json=data, params=params) - # catch not found error - except Py42NotFoundError as err: - raise Py42UserRiskProfileNotFound(err, user_id) - # Backend handles invalid dates - - def get_page( - self, - page_num=None, - page_size=None, - manager_id=None, - title=None, - division=None, - department=None, - employment_type=None, - country=None, - region=None, - locality=None, - active=None, - deleted=None, - support_user=None, - ): - data = { - "page": page_num, - "page_size": page_size, - "manager_id": manager_id, - "title": title, - "division": division, - "department": department, - "employment_type": employment_type, - "country": country, - "region": region, - "locality": locality, - "active": active, - "deleted": deleted, - "support_user": support_user, - } - return self._connection.get(self._uri_prefix, params=data) - - def get_all( - self, - manager_id=None, - title=None, - division=None, - department=None, - employment_type=None, - country=None, - region=None, - locality=None, - active=None, - deleted=None, - support_user=None, - ): - return get_all_pages( - self.get_page, - "userRiskProfiles", - manager_id=manager_id, - title=title, - division=division, - department=department, - employment_type=employment_type, - country=country, - region=region, - locality=locality, - active=active, - deleted=deleted, - support_user=support_user, - ) - - def add_cloud_aliases(self, user_id, cloud_aliases): - if not isinstance(cloud_aliases, (list, tuple)): - cloud_aliases = [cloud_aliases] - - # frontend limits aliases to 50 characters - for alias in cloud_aliases: - if len(alias) > 50: - raise Py42CloudAliasCharacterLimitExceededError - - data = {"cloudAliases": cloud_aliases, "userId": user_id} - uri = f"{self._uri_prefix}/{user_id}/add-cloud-aliases" - - try: - return self._connection.post(uri, json=data) - # catch not found error - except Py42NotFoundError as err: - raise Py42UserRiskProfileNotFound(err, user_id) - # catch cloud username limit exceeded - except Py42BadRequestError as err: - if "Cloud usernames must be less than or equal to" in err.response.text: - raise Py42CloudAliasLimitExceededError(err) - raise - - def delete_cloud_aliases(self, user_id, cloud_aliases): - if not isinstance(cloud_aliases, (list, tuple)): - cloud_aliases = [cloud_aliases] - data = {"cloudAliases": cloud_aliases, "userId": user_id} - uri = f"{self._uri_prefix}/{user_id}/delete-cloud-aliases" - try: - return self._connection.post(uri, json=data) - # catch not found error - except Py42NotFoundError as err: - raise Py42UserRiskProfileNotFound(err, user_id) - - -def _parse_date_string(date): - # handle date-time - if isinstance(date, (datetime.date, datetime.datetime)): - date = date.strftime(_DATE_FORMAT) - - # assumes dates are in the format "yyyy-mm-dd" - try: - year, month, day = (int(i) for i in date.split("-")) - return day, month, year - except ValueError: - raise Py42Error("Unable to parse date. Expected format 'yyyy-mm-dd'.") diff --git a/src/py42/services/util.py b/src/py42/services/util.py deleted file mode 100644 index 93ec5101a..000000000 --- a/src/py42/services/util.py +++ /dev/null @@ -1,35 +0,0 @@ -import re - -import py42.settings as settings - - -def get_all_pages(func, key, *args, **kwargs): - if kwargs.get("page_size") is None: - kwargs["page_size"] = settings.items_per_page - - item_count = page_size = kwargs["page_size"] - page_num = 0 - while item_count >= page_size: - page_num += 1 - response = func(*args, page_num=page_num, **kwargs) - yield response - page_items = response[key] if key else response.data - item_count = len(page_items) - - -def escape_quote_chars(token): - """ - The `nextPgToken` returned in Forensic Search requests with > 10k results is the eventId - of the last event returned in the response. Some eventIds have double-quote chars in - them, which need to be escaped when passing the token in the next search request. - """ - if not isinstance(token, (str, bytes)): - return token - - unescaped_quote_pattern = r'[^\\]"' - - return re.sub( - pattern=unescaped_quote_pattern, - repl=lambda match: match.group().replace('"', r"\""), - string=token, - ) diff --git a/src/py42/services/watchlists.py b/src/py42/services/watchlists.py deleted file mode 100644 index e757b88e5..000000000 --- a/src/py42/services/watchlists.py +++ /dev/null @@ -1,159 +0,0 @@ -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42Error -from py42.exceptions import Py42InvalidWatchlistType -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42WatchlistNotFound -from py42.exceptions import Py42WatchlistOrUserNotFound -from py42.services import BaseService -from py42.services.util import get_all_pages - - -class WatchlistsService(BaseService): - - _uri_prefix = "/v1/watchlists" - - def __init__(self, connection): - super().__init__(connection) - self._watchlist_type_id_map = {} - - @property - def watchlist_type_id_map(self): - """Map watchlist types to IDs, if they exist.""" - if not self._watchlist_type_id_map: - self._watchlist_type_id_map = {} - watchlists = self.get_page(page_size=100).data["watchlists"] - for item in watchlists: - # We will need to custom handle CUSTOM types when they come around - self._watchlist_type_id_map[item["listType"]] = item["watchlistId"] - return self._watchlist_type_id_map - - def get(self, watchlist_id): - uri = f"{self._uri_prefix}/{watchlist_id}" - try: - return self._connection.get(uri) - except Py42NotFoundError as err: - raise Py42WatchlistNotFound(err, watchlist_id) - - def delete(self, watchlist_id): - uri = f"{self._uri_prefix}/{watchlist_id}" - try: - response = self._connection.delete(uri) - # delete dictionary entry if success - if response.status_code == 200: - for k, v in self.watchlist_type_id_map.items(): - if v == watchlist_id: - del self.watchlist_type_id_map[k] - break - return response - except Py42NotFoundError as err: - raise Py42WatchlistNotFound(err, watchlist_id) - - def get_page(self, page_num=1, page_size=None): - data = { - "page": page_num, - "page_size": page_size, - } - return self._connection.get(self._uri_prefix, params=data) - - def get_all(self): - return get_all_pages(self.get_page, "watchlists") - - def create(self, watchlist_type, title=None, description=None): - data = { - "watchlistType": watchlist_type, - "title": title, - "description": description, - } - try: - response = self._connection.post(self._uri_prefix, json=data) - self.watchlist_type_id_map[watchlist_type] = response.data["watchlistId"] - return response - except Py42BadRequestError as err: - if ( - f"Error converting value \\\"{watchlist_type}\\\" to type 'WatchlistSdk.Model.WatchlistType'." - in err.response.text - ): - raise Py42InvalidWatchlistType(err, watchlist_type) - # Api handles Watchlist_Type_Unspecified Case - - def get_page_included_users(self, watchlist_id, page_num=1, page_size=None): - data = { - "page": page_num, - "page_size": page_size, - } - uri = f"{self._uri_prefix}/{watchlist_id}/included-users" - return self._connection.get(uri, params=data) - - def get_all_included_users(self, watchlist_id): - return get_all_pages( - self.get_page_included_users, "includedUsers", watchlist_id=watchlist_id - ) - - def add_included_users_by_watchlist_id(self, user_ids, watchlist_id): - if not isinstance(user_ids, (list, tuple)): - user_ids = [user_ids] - data = {"userIds": user_ids, "watchlistId": watchlist_id} - uri = f"{self._uri_prefix}/{watchlist_id}/included-users/add" - try: - return self._connection.post(uri, json=data) - except Py42BadRequestError as err: - if "Watchlist not found" in err.response.text: - raise Py42WatchlistNotFound(err, watchlist_id) - if "User not found" in err.response.text: - raise Py42NotFoundError(err, message=err.response.text) - raise - - def add_included_users_by_watchlist_type(self, user_ids, watchlist_type): - try: - id = self.watchlist_type_id_map[watchlist_type] - except KeyError: - # if watchlist of specified type not found, create watchlist - id = (self.create(watchlist_type)).data["watchlistId"] - return self.add_included_users_by_watchlist_id(user_ids, id) - - def delete_included_users_by_watchlist_id(self, user_ids, watchlist_id): - if not isinstance(user_ids, (list, tuple)): - user_ids = [user_ids] - data = {"userIds": user_ids, "watchlistId": watchlist_id} - uri = f"{self._uri_prefix}/{watchlist_id}/included-users/delete" - try: - return self._connection.post(uri, json=data) - except Py42BadRequestError as err: - if "Watchlist not found" in err.response.text: - raise Py42WatchlistNotFound(err, watchlist_id) - if "User not found" in err.response.text: - raise Py42NotFoundError(err, message=err.response.text) - raise - - def delete_included_users_by_watchlist_type(self, user_ids, watchlist_type): - try: - id = self.watchlist_type_id_map[watchlist_type] - except KeyError: - # if specified watchlist type not found, raise error - raise Py42Error(f"Couldn't find watchlist of type:'{watchlist_type}'.") - return self.delete_included_users_by_watchlist_id(user_ids, id) - - def get_page_watchlist_members(self, watchlist_id, page_num=1, page_size=None): - data = { - "page": page_num, - "page_size": page_size, - } - uri = f"{self._uri_prefix}/{watchlist_id}/members" - try: - return self._connection.get(uri, params=data) - except Py42NotFoundError as err: - raise Py42WatchlistNotFound(err, watchlist_id) - - def get_all_watchlist_members(self, watchlist_id): - return get_all_pages( - self.get_page_watchlist_members, - "watchlistMembers", - watchlist_id=watchlist_id, - ) - - def get_watchlist_member(self, watchlist_id, user_id): - uri = f"{self._uri_prefix}/{watchlist_id}/members/{user_id}" - try: - return self._connection.get(uri) - except Py42NotFoundError as err: - raise Py42WatchlistOrUserNotFound(err, watchlist_id, user_id) diff --git a/src/py42/__init__.py b/src/pycpg/__init__.py similarity index 100% rename from src/py42/__init__.py rename to src/pycpg/__init__.py diff --git a/src/pycpg/__version__.py b/src/pycpg/__version__.py new file mode 100644 index 000000000..ed29fa2ab --- /dev/null +++ b/src/pycpg/__version__.py @@ -0,0 +1,3 @@ +# pycpg + +__version__ = "1.0.0" diff --git a/src/py42/choices.py b/src/pycpg/choices.py similarity index 86% rename from src/py42/choices.py rename to src/pycpg/choices.py index 785d2398d..52f0d8bf8 100644 --- a/src/py42/choices.py +++ b/src/pycpg/choices.py @@ -1,4 +1,4 @@ -from py42.util import get_attribute_values_from_class +from pycpg.util import get_attribute_values_from_class class Choices: diff --git a/src/py42/clients/__init__.py b/src/pycpg/clients/__init__.py similarity index 55% rename from src/py42/clients/__init__.py rename to src/pycpg/clients/__init__.py index a91fb8402..a8e209438 100644 --- a/src/py42/clients/__init__.py +++ b/src/pycpg/clients/__init__.py @@ -3,15 +3,9 @@ Clients = namedtuple( "Clients", [ - "alerts", "archive", "authority", - "securitydata", "auditlogs", - "cases", "loginconfig", - "trustedactivities", - "userriskprofile", - "watchlists", ], ) diff --git a/src/py42/clients/_archiveaccess/__init__.py b/src/pycpg/clients/_archiveaccess/__init__.py similarity index 96% rename from src/py42/clients/_archiveaccess/__init__.py rename to src/pycpg/clients/_archiveaccess/__init__.py index 51cba2f72..91eecb599 100644 --- a/src/py42/clients/_archiveaccess/__init__.py +++ b/src/pycpg/clients/_archiveaccess/__init__.py @@ -1,7 +1,7 @@ import posixpath from collections import namedtuple -from py42.exceptions import Py42ArchiveFileNotFoundError +from pycpg.exceptions import PycpgArchiveFileNotFoundError # Data for initiating a web or push restore. FileSelection = namedtuple("FileSelection", "file, num_files, num_dirs, num_bytes") @@ -72,7 +72,7 @@ def _get_file_via_walking_tree(self, backup_set_id, file_path): if root["path"].lower() == path_root.lower(): return self._walk_tree(backup_set_id, response, root, path_parts[1:]) - raise Py42ArchiveFileNotFoundError(response, self._device_guid, file_path) + raise PycpgArchiveFileNotFoundError(response, self._device_guid, file_path) def _walk_tree( self, backup_set_id, response, current_file, remaining_path_components @@ -90,7 +90,7 @@ def _walk_tree( backup_set_id, response, child, remaining_path_components[1:] ) - raise Py42ArchiveFileNotFoundError( + raise PycpgArchiveFileNotFoundError( response, self._device_guid, target_child_path ) diff --git a/src/py42/clients/_archiveaccess/accessorfactory.py b/src/pycpg/clients/_archiveaccess/accessorfactory.py similarity index 93% rename from src/py42/clients/_archiveaccess/accessorfactory.py rename to src/pycpg/clients/_archiveaccess/accessorfactory.py index 2e8fdebe4..95606c255 100644 --- a/src/py42/clients/_archiveaccess/accessorfactory.py +++ b/src/pycpg/clients/_archiveaccess/accessorfactory.py @@ -1,10 +1,10 @@ -from py42.clients._archiveaccess import ArchiveContentPusher -from py42.clients._archiveaccess.restoremanager import create_file_size_poller -from py42.clients._archiveaccess.restoremanager import create_restore_job_manager +from pycpg.clients._archiveaccess import ArchiveContentPusher +from pycpg.clients._archiveaccess.restoremanager import create_file_size_poller +from pycpg.clients._archiveaccess.restoremanager import create_restore_job_manager class ArchiveAccessorFactory: - """Creates different types of :class:`py42.clients._archiveaccess.ArchiveAccessor` + """Creates different types of :class:`pycpg.clients._archiveaccess.ArchiveAccessor` for use in a web/push restore.""" def __init__(self, archive_service, storage_service_factory): diff --git a/src/py42/clients/_archiveaccess/restoremanager.py b/src/pycpg/clients/_archiveaccess/restoremanager.py similarity index 95% rename from src/py42/clients/_archiveaccess/restoremanager.py rename to src/pycpg/clients/_archiveaccess/restoremanager.py index 9e7773a1e..bd787c3fa 100644 --- a/src/py42/clients/_archiveaccess/restoremanager.py +++ b/src/pycpg/clients/_archiveaccess/restoremanager.py @@ -1,9 +1,9 @@ import time -from py42.services.storage.restore import PushRestoreExistingFiles -from py42.services.storage.restore import PushRestoreLocation -from py42.settings import debug -from py42.util import format_dict +from pycpg.services.storage.restore import PushRestoreExistingFiles +from pycpg.services.storage.restore import PushRestoreLocation +from pycpg.settings import debug +from pycpg.util import format_dict def create_restore_job_manager( @@ -36,7 +36,7 @@ def __init__(self, storage_archive_service, device_guid, job_polling_interval=No class FileSizePoller(_RestorePoller): """Monitors the status of a poll-job; the bytes and number of files needed for a - restore. This affords py42 users a chance to observe the progress of a web/push + restore. This affords pycpg users a chance to observe the progress of a web/push restore.""" def __init__(self, storage_archive_service, device_guid, job_polling_interval=None): @@ -177,7 +177,7 @@ def _start_web_restore(self, backup_set_id, file_selections, show_deleted): num_dirs = sum(fs.num_dirs for fs in file_selections) num_bytes = sum(fs.num_bytes for fs in file_selections) - # For py42 backwards compat. + # For pycpg backwards compat. if show_deleted is None: show_deleted = True diff --git a/src/py42/clients/archive.py b/src/pycpg/clients/archive.py similarity index 89% rename from src/py42/clients/archive.py rename to src/pycpg/clients/archive.py index 694fecc7d..1d8e830c3 100644 --- a/src/py42/clients/archive.py +++ b/src/pycpg/clients/archive.py @@ -1,6 +1,6 @@ -from py42.clients._archiveaccess import ArchiveContentStreamer -from py42.clients._archiveaccess import ArchiveExplorer -from py42.exceptions import Py42Error +from pycpg.clients._archiveaccess import ArchiveContentStreamer +from pycpg.clients._archiveaccess import ArchiveExplorer +from pycpg.exceptions import PycpgError _FILE_SIZE_CALC_TIMEOUT = 10 @@ -23,7 +23,7 @@ def get_by_archive_guid(self, archive_guid): archive_guid (str): The GUID for the archive. Returns: - :class:`py42.response.Py42Response`: A response containing archive + :class:`pycpg.response.PycpgResponse`: A response containing archive information. """ return self._archive_service.get_single_archive(archive_guid) @@ -35,7 +35,7 @@ def get_all_by_device_guid(self, device_guid): device_guid (str): The GUID for the device. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of archives. """ return self._archive_service.get_all_archives_from_value( @@ -56,7 +56,7 @@ def stream_from_backup( """Streams a file from a backup archive to memory. This method uses the same endpoint as restoring from Console and therefore has all the same considerations. - `Support Documentation `__ + `Support Documentation `__ Args: file_paths (str or list of str): The path or list of paths to the files or directories in @@ -82,7 +82,7 @@ def stream_from_backup( the first in the list of existing backup sets will be used. Returns: - :class:`py42.response.Py42Response`: A response containing the streamed content. + :class:`pycpg.response.PycpgResponse`: A response containing the streamed content. Usage example:: @@ -162,7 +162,7 @@ def stream_to_device( the first in the list of existing backup sets will be used. Returns: - :class:`py42.response.Py42Response`. + :class:`pycpg.response.PycpgResponse`. """ explorer = self._archive_accessor_factory.create_archive_accessor( device_guid, @@ -198,7 +198,7 @@ def _select_backup_set_id(self, device_guid, destination_guid, backup_set_id): backup_set_ids = [bs["backupSetId"] for bs in backup_sets] if backup_set_id: if backup_set_id not in backup_set_ids: - raise Py42Error( + raise PycpgError( f"backup_set_id={backup_set_id} not found in device backup sets: {backup_sets}" ) return backup_set_id @@ -209,12 +209,12 @@ def _select_backup_set_id(self, device_guid, destination_guid, backup_set_id): elif len(backup_set_ids) > 0: return backup_set_ids[0] else: - raise Py42Error("Failed to get backup sets for device.") + raise PycpgError("Failed to get backup sets for device.") def get_backup_sets(self, device_guid, destination_guid): """Gets all backup set names/identifiers referring to a single destination for a specific device. - `Learn more about backup sets. `__ + `Learn more about backup sets. `__ Args: device_guid (str): The GUID of the device to get backup sets for. @@ -222,7 +222,7 @@ def get_backup_sets(self, device_guid, destination_guid): backup sets for. Returns: - :class:`py42.response.Py42Response`: A response containing the backup sets. + :class:`pycpg.response.PycpgResponse`: A response containing the backup sets. """ return self._archive_service.get_backup_sets(device_guid, destination_guid) @@ -234,7 +234,7 @@ def get_all_org_restore_history(self, days, org_id): org_id (int): The identification number of the organization to get restore history for. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of restore history. """ return self._archive_service.get_all_restore_history(days, "orgId", org_id) @@ -247,7 +247,7 @@ def get_all_user_restore_history(self, days, user_id): user_id (int): The identification number of the user to get restore history for. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of restore history. """ return self._archive_service.get_all_restore_history(days, "userId", user_id) @@ -260,7 +260,7 @@ def get_all_device_restore_history(self, days, device_id): device_id (int): The identification number of the device to get restore history for. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of restore history. """ return self._archive_service.get_all_restore_history( @@ -275,7 +275,7 @@ def update_cold_storage_purge_date(self, archive_guid, purge_date): purge_date (str): The date on which the archive should be purged in yyyy-MM-dd format Returns: - :class:`py42.response.Py42Response`: the response from the ColdStorage API. + :class:`pycpg.response.PycpgResponse`: the response from the ColdStorage API. """ return self._archive_service.update_cold_storage_purge_date( archive_guid, purge_date @@ -291,7 +291,7 @@ def get_all_org_cold_storage_archives( """Returns a detailed list of cold storage archive information for a given org ID. Args: - org_id (str): The ID of a Code42 organization. + org_id (str): The ID of a CrashPlan organization. include_child_orgs (bool, optional): Determines whether cold storage information from the Org's children is also returned. Defaults to True. sort_key (str, optional): Sets the property by which the returned results will be sorted. @@ -300,7 +300,7 @@ def get_all_org_cold_storage_archives( asc or desc. Defaults to asc. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of cold storage archive information. """ return self._archive_service.get_all_org_cold_storage_archives( diff --git a/src/py42/clients/auditlogs.py b/src/pycpg/clients/auditlogs.py similarity index 77% rename from src/py42/clients/auditlogs.py rename to src/pycpg/clients/auditlogs.py index 560b69019..7d16f9752 100644 --- a/src/py42/clients/auditlogs.py +++ b/src/pycpg/clients/auditlogs.py @@ -1,5 +1,5 @@ class AuditLogsClient: - """`Rest documentation `__""" + """`Rest documentation `__""" def __init__(self, audit_log_service): self._audit_log_service = audit_log_service @@ -21,27 +21,27 @@ def get_page( """Retrieve a page of audit logs, filtered based on given arguments. Note: `page_num` here can be used same way as other methods that have a - `page_num` parameter in py42. However, under the hood, it subtracts one from - the given `page_num` in the implementation as the Code42 Audit-Logs API expects + `page_num` parameter in pycpg. However, under the hood, it subtracts one from + the given `page_num` in the implementation as the CrashPlan Audit-Logs API expects the start page to be zero. - `Rest Documentation `__ + `Rest Documentation `__ Args: page_num (int, optional): The page number to get. Defaults to 1. - page_size (int, optional): The number of items per page. Defaults to `py42.settings.items_per_page`. + page_size (int, optional): The number of items per page. Defaults to `pycpg.settings.items_per_page`. begin_time (int or float or str or datetime, optional): Timestamp in milliseconds or str format "yyyy-MM-dd HH:MM:SS" or a datetime instance. Defaults to None. end_time (int or float or str or datetime, optional): Timestamp in milliseconds or str format "yyyy-MM-dd HH:MM:SS" or a datetime instance. Defaults to None. event_types (str or list, optional): A str or list of str of valid event types. Defaults to None. - user_ids (str or list, optional): A str or list of str of Code42 userUids. Defaults to None. - usernames (str or list, optional): A str or list of str of Code42 usernames. Defaults to None. + user_ids (str or list, optional): A str or list of str of CrashPlan userUids. Defaults to None. + usernames (str or list, optional): A str or list of str of CrashPlan usernames. Defaults to None. user_ip_addresses (str or list, optional): A str or list of str of user ip addresses. Defaults to None. - affected_user_ids (str or list, optional): A str or list of str of affected Code42 userUids. Defaults to None. - affected_usernames (str or list, optional): A str or list of str of affected Code42 usernames. Defaults to None. + affected_user_ids (str or list, optional): A str or list of str of affected CrashPlan userUids. Defaults to None. + affected_usernames (str or list, optional): A str or list of str of affected CrashPlan usernames. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ return self._audit_log_service.get_page( page_num=page_num, @@ -70,7 +70,7 @@ def get_all( **kwargs ): """Retrieve audit logs, filtered based on given arguments. - `Rest Documentation `__ + `Rest Documentation `__ Args: begin_time (int or float or str or datetime, optional): Timestamp in milliseconds or @@ -78,14 +78,14 @@ def get_all( end_time (int or float or str or datetime, optional): Timestamp in milliseconds or str format "yyyy-MM-dd HH:MM:SS" or a datetime instance. Defaults to None. event_types (str or list, optional): A str or list of str of valid event types. Defaults to None. - user_ids (str or list, optional): A str or list of str of Code42 userUids. Defaults to None. - usernames (str or list, optional): A str or list of str of Code42 usernames. Defaults to None. + user_ids (str or list, optional): A str or list of str of CrashPlan userUids. Defaults to None. + usernames (str or list, optional): A str or list of str of CrashPlan usernames. Defaults to None. user_ip_addresses (str or list, optional): A str or list of str of user ip addresses. Defaults to None. - affected_user_ids (str or list, optional): A str or list of str of affected Code42 userUids. Defaults to None. - affected_usernames (str or list, optional): A str or list of str of affected Code42 usernames. Defaults to None. + affected_user_ids (str or list, optional): A str or list of str of affected CrashPlan userUids. Defaults to None. + affected_usernames (str or list, optional): A str or list of str of affected CrashPlan usernames. Defaults to None. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of audit logs. """ return self._audit_log_service.get_all( diff --git a/src/py42/clients/authority.py b/src/pycpg/clients/authority.py similarity index 100% rename from src/py42/clients/authority.py rename to src/pycpg/clients/authority.py diff --git a/src/py42/clients/loginconfig.py b/src/pycpg/clients/loginconfig.py similarity index 83% rename from src/py42/clients/loginconfig.py rename to src/pycpg/clients/loginconfig.py index 6d5047719..02c286d9a 100644 --- a/src/py42/clients/loginconfig.py +++ b/src/pycpg/clients/loginconfig.py @@ -1,4 +1,4 @@ -from py42.response import Py42Response +from pycpg.response import PycpgResponse class LoginConfigurationClient: @@ -14,8 +14,8 @@ def get_for_user(self, username): username (str): Username to retrieve login configuration for. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._connection.host_address}/api/v3/LoginConfiguration" response = self._connection._session.get(uri, params={"username": username}) - return Py42Response(response) + return PycpgResponse(response) diff --git a/src/py42/clients/settings/__init__.py b/src/pycpg/clients/settings/__init__.py similarity index 100% rename from src/py42/clients/settings/__init__.py rename to src/pycpg/clients/settings/__init__.py diff --git a/src/py42/clients/settings/_converters.py b/src/pycpg/clients/settings/_converters.py similarity index 85% rename from src/py42/clients/settings/_converters.py rename to src/pycpg/clients/settings/_converters.py index 9c1768577..f4bf6f7e7 100644 --- a/src/py42/clients/settings/_converters.py +++ b/src/pycpg/clients/settings/_converters.py @@ -21,17 +21,6 @@ def to_list(value): return [value] -def to_comma_separated(value): - if isinstance(value, (list, tuple)): - return ",".join(value) - else: - return value - - -def comma_separated_to_list(value): - return value.split(",") - - def days_to_minutes(days): minutes = int(float(days) * 1440) return str(minutes) diff --git a/src/py42/clients/settings/device_settings.py b/src/pycpg/clients/settings/device_settings.py similarity index 88% rename from src/py42/clients/settings/device_settings.py rename to src/pycpg/clients/settings/device_settings.py index ef721b1ec..6292ee78b 100644 --- a/src/py42/clients/settings/device_settings.py +++ b/src/pycpg/clients/settings/device_settings.py @@ -1,77 +1,23 @@ from collections import UserDict from collections import UserList -from py42.clients.settings import check_lock -from py42.clients.settings import SettingProperty -from py42.clients.settings import show_change -from py42.clients.settings._converters import bool_to_str -from py42.clients.settings._converters import days_to_minutes -from py42.clients.settings._converters import minutes_to_days -from py42.clients.settings._converters import str_to_bool -from py42.exceptions import Py42Error - -invalid_destination_error = Py42Error( +from pycpg.clients.settings import check_lock +from pycpg.clients.settings import SettingProperty +from pycpg.clients.settings import show_change +from pycpg.clients.settings._converters import bool_to_str +from pycpg.clients.settings._converters import days_to_minutes +from pycpg.clients.settings._converters import minutes_to_days +from pycpg.clients.settings._converters import str_to_bool +from pycpg.exceptions import PycpgError + +invalid_destination_error = PycpgError( "Invalid destination guid or destination not offered to device's Org." ) -destination_not_added_error = Py42Error( +destination_not_added_error = PycpgError( "Destination is not added to device, unable to lock." ) -class IncydrDeviceSettings(UserDict): - """Class used to manage individual Incydr devices. These devices have no backup settings and only the **notes** and **external reference** fields are modifiable.""" - - def __init__(self, settings_dict): - self.changes = {} - self.data = settings_dict - - @property - def name(self): - """Name of this device. Read-only.""" - return self.data["name"] - - @property - def computer_id(self): - """Identifier of this device. Read-only.""" - return self.data["computerId"] - - @property - def device_id(self): - """Identifier of this device (alias of `.computer_id`). Read only.""" - return self.computer_id - - @property - def guid(self): - """Globally unique identifier of this device. Read-only.""" - return self.data["guid"] - - @property - def org_id(self): - """Identifier of the organization this device belongs to. Read-only.""" - return self.data["orgId"] - - @property - def user_id(self): - """Identifier of the user this device belongs to. Read-only.""" - return self.data["userId"] - - @property - def version(self): - """Latest reported Code42 client version number for this device. Read-only.""" - return self.data["version"] - - external_reference = SettingProperty( - name="external_reference", location=["computerExtRef"] - ) - """External reference field for this device.""" - - notes = SettingProperty(name="notes", location=["notes"]) - """Notes field for this device.""" - - def __repr__(self): - return f"" - - class DeviceSettingsDefaults(UserDict): """Class used for managing an Organization's Device Default settings. Also acts as a base class for `DeviceSettings` to manage individual device settings.""" @@ -99,7 +45,7 @@ def _extract_backup_sets(self, backup_sets): if not _backup_set_is_legal_hold(bs) ] else: - raise Py42Error(f"Unable to extract backup sets: {backup_sets}") + raise PycpgError(f"Unable to extract backup sets: {backup_sets}") else: return [ BackupSet(self, bs) @@ -208,7 +154,7 @@ def user_id(self): @property def version(self): - """Latest reported Code42 client version number for this device. Read-only.""" + """Latest reported CrashPlan client version number for this device. Read-only.""" return self.data["version"] @property @@ -416,7 +362,7 @@ def destinations(self): return destination_dict def add_destination(self, destination_guid): - """Adds a destination to be used by this backup set. Raises a :class:`Py42Error` if + """Adds a destination to be used by this backup set. Raises a :class:`PycpgError` if the supplied destination guid is not available to the parent device/org. Args: @@ -457,7 +403,7 @@ def remove_destination(self, destination_guid): def lock_destination(self, destination_guid): """Locks an in-use destination, disallowing the device owner from removing this - destination from their backup. Raises a :class:`Py42Error` if the supplied destination + destination from their backup. Raises a :class:`PycpgError` if the supplied destination guid is not in use on this backup set, or not available to the parent device/org. """ destination_guid = str(destination_guid) @@ -476,7 +422,7 @@ def lock_destination(self, destination_guid): def unlock_destination(self, destination_guid): """Unlocks an in-use destination, allowing the device owner to remove this - destination from their backup. Raises a :class:`~py42.exceptions.Py42Error` if the supplied destination + destination from their backup. Raises a :class:`~pycpg.exceptions.PycpgError` if the supplied destination guid is not in use on this backup set, or not available to the parent device/org. """ destination_guid = str(destination_guid) diff --git a/src/pycpg/clients/settings/org_settings.py b/src/pycpg/clients/settings/org_settings.py new file mode 100644 index 000000000..35ff4b0c0 --- /dev/null +++ b/src/pycpg/clients/settings/org_settings.py @@ -0,0 +1,161 @@ +from collections import UserDict + +from pycpg.clients.settings import SettingProperty +from pycpg.clients.settings import TSettingProperty +from pycpg.clients.settings._converters import bool_to_str +from pycpg.clients.settings._converters import bytes_to_gb +from pycpg.clients.settings._converters import gb_to_bytes +from pycpg.clients.settings._converters import str_to_bool +from pycpg.clients.settings._converters import to_list +from pycpg.clients.settings.device_settings import DeviceSettingsDefaults + + +class OrgSettings(UserDict): + """Class used to manage an Organization's settings.""" + + def __init__(self, org_settings, t_settings): + self.data = org_settings + self._t_settings = t_settings + self._packets = {} + self.changes = {} + try: + self.device_defaults = DeviceSettingsDefaults( + self.data["deviceDefaults"], org_settings=self + ) + except KeyError: + self.device_defaults = None + + @property + def packets(self): + """The setting packets for any modifications to be posted to the /api/v1/OrgSettings + endpoint. + """ + return list(self._packets.values()) + + @property + def org_id(self): + """The identifier for the org.""" + return self.data["orgId"] + + @property + def registration_key(self): + """The registration key for the org.""" + return self.data["registrationKey"] + + org_name = SettingProperty("org_name", ["orgName"]) + """Name for this Org.""" + + external_reference = SettingProperty("external_reference", ["orgExtRef"]) + """External reference field for this Org.""" + + notes = SettingProperty("notes", ["notes"]) + """Notes field for this Org.""" + + quota_settings_inherited = SettingProperty( + "quota_settings_inherited", + ["settings", "isUsingQuotaDefaults"], + ) + """Determines if Org Quota settings (`maximum_user_subscriptions`, `org_backup_quota`, + `user_backup_quota`, `archive_hold_days`) are inherited from parent organization. + + Modifying one of the Org Quota attributes automatically sets this attribute to `False`. + """ + + archive_hold_days = SettingProperty( + "archive_hold_days", + ["settings", "archiveHoldDays"], + inheritance_attr="quota_settings_inherited", + ) + """Number of days backup archives are held in cold storage after deactivation or + destination removal from any devices in this Org. + """ + + maximum_user_subscriptions = SettingProperty( + "maximum_user_subscriptions", + ["settings", "maxSeats"], + inheritance_attr="quota_settings_inherited", + ) + """Number of users allowed to consume a license in this Org. Set to -1 for unlimited.""" + + org_backup_quota = SettingProperty( + "org_backup_quota", + ["settings", "maxBytes"], + get_converter=bytes_to_gb, + set_converter=gb_to_bytes, + inheritance_attr="quota_settings_inherited", + ) + """Backup storage quota (in GB) for this organization. Set to -1 for unlimited.""" + + user_backup_quota = SettingProperty( + "user_backup_quota", + ["settings", "defaultUserMaxBytes"], + get_converter=bytes_to_gb, + set_converter=gb_to_bytes, + inheritance_attr="quota_settings_inherited", + ) + """Backup storage quota (in GB) for each user in this organization. Set to -1 for + unlimited.""" + + web_restore_admin_limit = SettingProperty( + "web_restore_admin_limit", ["settings", "webRestoreAdminLimitMb"] + ) + """Limit (in MB) to amount of data restorable by admin users via web restore.""" + + web_restore_user_limit = SettingProperty( + "web_restore_user_limit", ["settings", "webRestoreUserLimitMb"] + ) + """Limit (in MB) to amount of data restorable by non-admin users via web restore.""" + + reporting_settings_inherited = SettingProperty( + "reporting_settings_inherited", + ["settings", "isUsingReportingDefaults"], + ) + """Determines if Org Reporting settings (`backup_warning_email_days`, + `backup_critical_email_days', `backup_alert_recipient_emails`) are inherited from + parent organization. + + Modifying one of the Org Reporting attributes automatically sets this attribute to + `False`. + """ + + backup_warning_email_days = SettingProperty( + "backup_warning_email_days", + ["settings", "warnInDays"], + inheritance_attr="reporting_settings_inherited", + ) + """The number of days devices in this org can go without any backup before "warning" + alerts get sent to org admins. + """ + + backup_critical_email_days = SettingProperty( + "backup_critical_email_days", + ["settings", "alertInDays"], + inheritance_attr="reporting_settings_inherited", + ) + """The number of days devices in this org can go without any backup before "critical" + alerts get sent to org admins. + """ + + backup_alert_recipient_emails = SettingProperty( + "backup_alert_recipient_emails", + ["settings", "recipients"], + set_converter=to_list, + inheritance_attr="reporting_settings_inherited", + ) + """List of email addresses that organization backup alert emails get sent to (org + admin users get these automatically). + """ + + web_restore_enabled = TSettingProperty( + "web_restore_enabled", + "device_webRestore_enabled", + get_converter=str_to_bool, + set_converter=bool_to_str, + ) + """Determines if web restores are enabled for devices in this org.""" + + def __repr__(self): + return f"" + + def __str__(self): + return str(self.data) diff --git a/src/pycpg/constants/__init__.py b/src/pycpg/constants/__init__.py new file mode 100644 index 000000000..28c57bc6c --- /dev/null +++ b/src/pycpg/constants/__init__.py @@ -0,0 +1,12 @@ +from pycpg.choices import Choices + + +class SortDirection(Choices): + """Constants available to set CrashPlan request `sort_direction` when sorting returned lists in responses. + + * ``ASC`` + * ``DESC`` + """ + + DESC = "DESC" + ASC = "ASC" diff --git a/src/pycpg/exceptions.py b/src/pycpg/exceptions.py new file mode 100644 index 000000000..263683145 --- /dev/null +++ b/src/pycpg/exceptions.py @@ -0,0 +1,379 @@ +from pycpg.settings import debug + + +class PycpgError(Exception): + """A generic, Pycpg custom base exception.""" + + +class PycpgResponseError(PycpgError): + """A base custom class to manage all errors raised because of an HTTP response.""" + + def __init__(self, response, message, *args): + super().__init__(message, *args) + self._response = response + + @property + def response(self): + """The response prior to the error.""" + return self._response + + +class PycpgArchiveFileNotFoundError(PycpgResponseError): + """An exception raised when a resource file is not found or the path is invalid.""" + + def __init__(self, response, device_guid, file_path): + message = ( + f"File not found in archive for device {device_guid} at path {file_path}" + ) + super().__init__(response, message, device_guid, file_path) + self._device_guid = device_guid + self._file_path = file_path + + @property + def device_guid(self): + """The device GUID provided.""" + return self._device_guid + + @property + def file_path(self): + """The file path provided.""" + return self._file_path + + +class PycpgChecksumNotFoundError(PycpgResponseError): + """An exception raised when a user-supplied hash could not successfully locate its corresponding resource.""" + + def __init__(self, response, checksum_name, checksum_value): + message = f"No files found with {checksum_name} checksum {checksum_value}." + super().__init__(response, message, checksum_name, checksum_value) + self._checksum_name = checksum_name + self._checksum_value = checksum_value + + @property + def checksum_name(self): + """The checksum name.""" + return self._checksum_name + + @property + def checksum_value(self): + """The checksum value.""" + return self.checksum_value + + +class PycpgFeatureUnavailableError(PycpgResponseError): + """An exception raised when a requested feature is not supported in your CrashPlan environment.""" + + def __init__(self, response): + super().__init__( + response, + "You may be trying to use a feature that is unavailable in your environment.", + ) + + +class PycpgHTTPError(PycpgResponseError): + """A base custom class to manage all HTTP errors raised by an API endpoint.""" + + def __init__(self, exception, message=None, *args): + if not message: + response_content = f"Response content: {exception.response.text}" + message = f"Failure in HTTP call {exception}. {response_content}" + debug.logger.debug(message) + + super().__init__(exception.response, message, *args) + + +class PycpgDeviceNotConnectedError(PycpgResponseError): + """An exception raised when trying to push a restore to a device that is not + connected to an Authority server.""" + + def __init__(self, response, device_guid): + message = ( + f"Device with GUID '{device_guid}' is not currently connected to the Authority " + "server." + ) + super().__init__(response, message, device_guid) + self._device_guid = device_guid + + @property + def device_guid(self): + """The device GUID.""" + return self._device_guid + + +class PycpgInvalidArchivePassword(PycpgHTTPError): + """An exception raised when the password for unlocking an archive is invalid.""" + + def __init__(self, exception): + message = "Invalid archive password." + super().__init__(exception, message) + + +class PycpgInvalidArchiveEncryptionKey(PycpgHTTPError): + """An exception raised the encryption key for an archive is invalid.""" + + def __init__(self, exception): + message = "Invalid archive encryption key." + super().__init__(exception, message) + + +class PycpgStorageSessionInitializationError(PycpgHTTPError): + """An exception raised when the user is not authorized to initialize a storage session. This + may occur when trying to restore a file or trying to get events for file activity on removable + media, in cloud sync folders, and browser uploads.""" + + def __init__(self, exception, message): + super().__init__(exception, message) + + +class PycpgSessionInitializationError(PycpgError): + """An exception raised when a user connection is invalid. A connection might be invalid due to + connection timeout, invalid token, etc. + """ + + def __init__(self, exception): + message = ( + "An error occurred while requesting " + f"server environment information, caused by {exception}" + ) + super().__init__(exception, message) + + +class PycpgBadRequestError(PycpgHTTPError): + """A wrapper to represent an HTTP 400 error.""" + + +class PycpgUnauthorizedError(PycpgHTTPError): + """A wrapper to represent an HTTP 401 error.""" + + +class PycpgForbiddenError(PycpgHTTPError): + """A wrapper to represent an HTTP 403 error.""" + + +class PycpgNotFoundError(PycpgHTTPError): + """A wrapper to represent an HTTP 404 error.""" + + +class PycpgConflictError(PycpgHTTPError): + """A wrapper to represent an HTTP 409 error.""" + + +class PycpgInternalServerError(PycpgHTTPError): + """A wrapper to represent an HTTP 500 error.""" + + +class PycpgTooManyRequestsError(PycpgHTTPError): + """A wrapper to represent an HTTP 429 error.""" + + +class PycpgOrgNotFoundError(PycpgBadRequestError): + """An exception raised when a 400 HTTP error message indicates that an + organization was not found.""" + + def __init__(self, exception, org_uid): + msg = f"The organization with UID '{org_uid}' was not found." + super().__init__(exception, msg, org_uid) + self._org_uid = org_uid + + @property + def org_uid(self): + """ " The org UID.""" + return self._org_uid + + +class PycpgActiveLegalHoldError(PycpgBadRequestError): + """An exception raised when attempting to deactivate a user or device that is in an + active legal hold.""" + + def __init__(self, exception, resource, resource_id): + msg = f"Cannot deactivate the {resource} with ID {resource_id} as the {resource} is involved in a legal hold matter." + super().__init__(exception, msg, resource, resource_id) + self._resource = resource + self._resource_id = resource_id + + @property + def resource(self): + """The user or device resource.""" + return self._resource + + @property + def resource_id(self): + """The resource ID.""" + return self._resource_id + + +class PycpgUserAlreadyAddedError(PycpgBadRequestError): + """An exception raised when the user is already added to group or list, such as the + Departing Employee list.""" + + def __init__(self, exception, user_id, list_name): + msg = f"User with ID {user_id} is already on the {list_name}." + super().__init__(exception, msg, user_id, list_name) + self._user_id = user_id + + @property + def user_id(self): + """The user ID.""" + return self._user_id + + +class PycpgLegalHoldNotFoundOrPermissionDeniedError(PycpgForbiddenError): + """An exception raised when a legal hold matter is inaccessible from your account or + the matter UID is not valid.""" + + def __init__(self, exception, resource_uid, legal_hold_resource="matter"): + message = f"{legal_hold_resource.capitalize()} with UID '{resource_uid}' can not be found. Your account may not have permission to view the {legal_hold_resource.lower()}." + super().__init__(exception, message, resource_uid) + self._resource_uid = resource_uid + + @property + def uid(self): + """The UID of the legal hold resource.""" + return self._resource_uid + + +class PycpgLegalHoldCriteriaMissingError(PycpgBadRequestError): + """An exception raised when a bad request was made to a Legal Hold endpoint.""" + + def __init__(self, exception): + super().__init__( + exception, + "At least one criteria must be specified: legal_hold_membership_uid, " + "legal_hold_matter_uid, user_uid, or user.", + ) + + +class PycpgLegalHoldAlreadyDeactivatedError(PycpgBadRequestError): + """An exception raised when trying to deactivate a Legal Hold Matter that is already inactive.""" + + def __init__(self, exception, legal_hold_matter_uid): + message = f"Legal Hold Matter with UID '{legal_hold_matter_uid}' has already been deactivated." + super().__init__(exception, message, legal_hold_matter_uid) + self._legal_hold_matter_uid = legal_hold_matter_uid + + @property + def legal_hold_matter_uid(self): + """The legal hold matter UID.""" + return self._legal_hold_matter_uid + + +class PycpgLegalHoldAlreadyActiveError(PycpgBadRequestError): + """An exception raised when trying to activate a Legal Hold Matter that is already active.""" + + def __init__(self, exception, legal_hold_matter_uid): + message = ( + f"Legal Hold Matter with UID '{legal_hold_matter_uid}' is already active." + ) + super().__init__(exception, message, legal_hold_matter_uid) + self._legal_hold_matter_uid = legal_hold_matter_uid + + @property + def legal_hold_matter_uid(self): + """The legal hold matter UID.""" + return self._legal_hold_matter_uid + + +class PycpgMFARequiredError(PycpgUnauthorizedError): + """Deprecated: An exception raised when a request requires multi-factor authentication""" + + def __init__(self, exception, message=None): + message = message or "User requires multi-factor authentication." + super().__init__(exception, message) + + +class PycpgUserAlreadyExistsError(PycpgInternalServerError): + """An exception raised when a user already exists""" + + def __init__(self, exception, message=None): + message = message or "User already exists." + super().__init__(exception, message) + + +class PycpgUsernameMustBeEmailError(PycpgInternalServerError): + """An exception raised when trying to set a non-email as a user's username + in a cloud environment.""" + + def __init__(self, exception): + message = "Username must be an email address." + super().__init__(exception, message) + + +class PycpgInvalidEmailError(PycpgInternalServerError): + """An exception raised when trying to set an invalid email as a user's email.""" + + def __init__(self, email, exception): + message = f"'{email}' is not a valid email." + super().__init__(exception, message, email) + self._email = email + + @property + def email(self): + """The email being assigned to a user.""" + return self._email + + +class PycpgInvalidPasswordError(PycpgInternalServerError): + """An exception raised when trying to set an invalid password as a user's password.""" + + def __init__(self, exception): + message = "Invalid password." + super().__init__(exception, message) + + +class PycpgInvalidUsernameError(PycpgInternalServerError): + """An exception raised when trying to set an invalid username as a user's username.""" + + def __init__(self, exception): + message = "Invalid username." + super().__init__(exception, message) + + +class PycpgBadRestoreRequestError(PycpgBadRequestError): + """An error raised when the given restore arguments are not compatible and cause + a bad request.""" + + def __init__(self, exception): + message = "Unable to create restore session." + super().__init__(exception, message) + + +class PycpgInvalidPageTokenError(PycpgBadRequestError): + """An error raised when the page token given is invalid.""" + + def __init__(self, exception, page_token): + message = ( + f'Invalid page token: "{page_token}".\n' + "Page tokens match the last event ID received in a previous query. " + "Your page token may be invalid if the original query has changed " + "such that the corresponding event is being filtered out of the results, " + "or if the event has expired according to your data retention policy." + ) + super().__init__(exception, message, page_token) + self._page_token = page_token + + @property + def page_token(self): + """The page token.""" + return self._page_token + + +def raise_pycpg_error(raised_error): + """Raises the appropriate :class:`pycpg.exceptions.PycpgHttpError` based on the given + HTTPError's response status code. + """ + if raised_error.response.status_code == 400: + raise PycpgBadRequestError(raised_error) + elif raised_error.response.status_code == 401: + raise PycpgUnauthorizedError(raised_error) + elif raised_error.response.status_code == 403: + raise PycpgForbiddenError(raised_error) + elif raised_error.response.status_code == 404: + raise PycpgNotFoundError(raised_error) + elif raised_error.response.status_code == 409: + raise PycpgConflictError(raised_error) + elif raised_error.response.status_code == 429: + raise PycpgTooManyRequestsError(raised_error) + elif 500 <= raised_error.response.status_code < 600: + raise PycpgInternalServerError(raised_error) + else: + raise PycpgHTTPError(raised_error) diff --git a/src/py42/response.py b/src/pycpg/response.py similarity index 88% rename from src/py42/response.py rename to src/pycpg/response.py index a71e7159d..b3c27241a 100644 --- a/src/py42/response.py +++ b/src/pycpg/response.py @@ -1,10 +1,10 @@ import json import reprlib -from py42.exceptions import Py42Error +from pycpg.exceptions import PycpgError -class Py42Response: +class PycpgResponse: def __init__(self, requests_response): self._response = requests_response self._data = None @@ -15,10 +15,10 @@ def __getitem__(self, key): except TypeError: data_root_type = type(self._data_root) message = ( - f"The Py42Response root is of type {data_root_type}, but __getitem__ " + f"The PycpgResponse root is of type {data_root_type}, but __getitem__ " f"got a key of {key}, which is incompatible." ) - raise Py42Error(message) + raise PycpgError(message) def __setitem__(self, key, value): try: @@ -26,13 +26,13 @@ def __setitem__(self, key, value): except TypeError: data_root_type = type(self._data_root) message = ( - f"The Py42Response root is of type {data_root_type}, but __setitem__ got a key " + f"The PycpgResponse root is of type {data_root_type}, but __setitem__ got a key " f"of {key} and value of {value}, which is incompatible." ) - raise Py42Error(message) + raise PycpgError(message) def __iter__(self): - # looping over a Py42Response will loop through list items, dict keys, or str characters + # looping over a PycpgResponse will loop through list items, dict keys, or str characters return iter(self._data_root) @property @@ -65,7 +65,7 @@ def iter_content(self, chunk_size=1, decode_unicode=False): @property def raw_text(self): """The ``response.Response.text`` property. It contains raw metadata that is not included in - the Py42Response.text property.""" + the PycpgResponse.text property.""" return self._response.text @property diff --git a/src/pycpg/sdk/__init__.py b/src/pycpg/sdk/__init__.py new file mode 100644 index 000000000..646dcfe2c --- /dev/null +++ b/src/pycpg/sdk/__init__.py @@ -0,0 +1,325 @@ +import warnings + +from requests.auth import HTTPBasicAuth + +from pycpg.exceptions import PycpgError +from pycpg.exceptions import PycpgUnauthorizedError +from pycpg.services._auth import ApiClientAuth +from pycpg.services._auth import BearerAuth +from pycpg.services._auth import CustomJWTAuth +from pycpg.services._connection import Connection +from pycpg.usercontext import UserContext + +warnings.simplefilter("always", DeprecationWarning) +warnings.simplefilter("always", UserWarning) + + +def from_api_client(host_address, client_id, secret): + """Creates a :class:`~pycpg.sdk.SDKClient` object for accessing the CrashPlan REST APIs using + an API client ID and secret. + + Args: + host_address (str): The domain name of the CrashPlan instance being authenticated to, e.g. + console.us1.crashplan.com + client_id (str): The client ID of the API client to authenticate with. + secret (str): The secret of the API client to authenticate with. + + Returns: + :class:`pycpg.sdk.SDKClient` + """ + + return SDKClient.from_api_client(host_address, client_id, secret) + + +def from_local_account(host_address, username, password, totp=None): + """Creates a :class:`~pycpg.sdk.SDKClient` object for accessing the CrashPlan REST APIs using the + supplied credentials. This method supports only accounts created within the CrashPlan console or using the + APIs (including pycpg). Username/passwords that are based on Active Directory, + Okta, or other Identity providers cannot be used with this method. + + Args: + host_address (str): The domain name of the CrashPlan instance being authenticated to, e.g. + console.us1.crashplan.com + username (str): The username of the authenticating account. + password (str): The password of the authenticating account. + totp (callable or str, optional): The time-based one-time password of the authenticating account. Include only + if the account uses CrashPlan's two-factor authentication. Defaults to None. + + Returns: + :class:`pycpg.sdk.SDKClient` + """ + client = SDKClient.from_local_account(host_address, username, password, totp) + + # test credentials + try: + client.users.get_current() + except PycpgUnauthorizedError as err: + login_type = client.loginconfig.get_for_user(username)["loginType"] + if login_type == "CLOUD_SSO": + raise PycpgError("SSO users are not supported in `from_local_account()`.") + msg = f"SDK initialization failed, double-check username/password, and provide two-factor TOTP token if Multi-Factor Auth configured for your user. User LoginConfig: {login_type}" + err.args = (msg,) + raise + return client + + +def from_jwt_provider(host_address, jwt_provider): + """Creates a :class:`~pycpg.sdk.SDKClient` object for accessing the CrashPlan REST APIs using a custom + auth mechanism. User can use any authentication mechanism like that returns a JSON Web token on authentication + which would then be used for all subsequent requests. + + Args: + host_address (str): The domain name of the CrashPlan instance being authenticated to, e.g. + console.us1.crashplan.com + jwt_provider (function): A function that accepts no parameters and on execution returns a JSON web token string. + + Returns: + :class:`pycpg.sdk.SDKClient` + """ + + client = SDKClient.from_jwt_provider(host_address, jwt_provider) + client.usercontext.get_current_tenant_id() + return client + + +class SDKClient: + def __init__(self, main_connection, auth, auth_flag=None): + services, user_ctx = _init_services(main_connection, auth, auth_flag) + self._clients = _init_clients(services, main_connection) + self._user_ctx = user_ctx + self._auth_flag = auth_flag + + @classmethod + def from_api_client(cls, host_address, client_id, secret): + """Creates a :class:`~pycpg.sdk.SDKClient` object for accessing the CrashPlan REST APIs using + an API client ID and secret. + + Args: + host_address (str): The domain name of the CrashPlan instance being authenticated to, e.g. + console.us1.crashplan.com + client_id (str): The client ID of the API client to authenticate with. + secret (str): The secret of the API client to authenticate with. + + Returns: + :class:`pycpg.sdk.SDKClient` + """ + + basic_auth = HTTPBasicAuth(client_id, secret) + auth_connection = Connection.from_host_address(host_address, auth=basic_auth) + api_client_auth = ApiClientAuth(auth_connection) + main_connection = Connection.from_host_address( + host_address, auth=api_client_auth + ) + api_client_auth.get_credentials() + return cls(main_connection, api_client_auth, auth_flag=1) + + @classmethod + def from_local_account(cls, host_address, username, password, totp=None): + """Creates a :class:`~pycpg.sdk.SDKClient` object for accessing the CrashPlan REST APIs using + the supplied credentials. This method supports only accounts created within the CrashPlan console or + using the APIs (including pycpg). Username/passwords that are based on Active + Directory, Okta, or other Identity providers should use the `from_jwt_provider` method. + + Args: + host_address (str): The domain name of the CrashPlan instance being authenticated to, e.g. + console.us1.crashplan.com + username (str): The username of the authenticating account. + password (str): The password of the authenticating account. + totp (callable or str, optional): The time-based one-time password of the authenticating account. Include only + if the account uses CrashPlan's two-factor authentication. Defaults to None. + Returns: + :class:`pycpg.sdk.SDKClient` + """ + basic_auth = None + if username and password: + basic_auth = HTTPBasicAuth(username, password) + auth_connection = Connection.from_host_address(host_address, auth=basic_auth) + bearer_auth = BearerAuth(auth_connection, totp) + main_connection = Connection.from_host_address(host_address, auth=bearer_auth) + + return cls(main_connection, bearer_auth) + + @classmethod + def from_jwt_provider(cls, host_address, jwt_provider): + """Creates a :class:`~pycpg.sdk.SDKClient` object for accessing the CrashPlan REST APIs using a custom + auth mechanism. User can use any authentication mechanism like that returns a JSON Web token + on authentication which would then be used for all subsequent requests. + + Args: + host_address (str): The domain name of the CrashPlan instance being authenticated to, e.g. + console.us1.crashplan.com + jwt_provider (function): A function that accepts no parameters and on execution returns a + JSON web token string. + + Returns: + :class:`pycpg.sdk.SDKClient` + """ + custom_auth = CustomJWTAuth(jwt_provider) + main_connection = Connection.from_host_address(host_address, auth=custom_auth) + custom_auth.get_credentials() + return cls(main_connection, custom_auth) + + @property + def loginconfig(self): + """A collection of methods related to getting information about the login configuration + of user accounts. + + Returns: + :class:`pycpg.clients.loginconfig.LoginConfigurationClient.` + """ + return self._clients.loginconfig + + @property + def serveradmin(self): + """A collection of methods for getting server information for on-premise environments + and tenant information for cloud environments. + + Returns: + :class:`pycpg.services.administration.AdministrationService` + """ + return self._clients.authority.administration + + @property + def archive(self): + """A collection of methods for accessing CrashPlan storage archives. Useful for doing + web-restores or finding a file on an archive. + + Returns: + :class:`pycpg.clients.archive.ArchiveClient` + """ + return self._clients.archive + + @property + def users(self): + """A collection of methods for retrieving or updating data about users in the CrashPlan + environment. + + Returns: + :class:`pycpg.services.users.UserService` + """ + return self._clients.authority.users + + @property + def devices(self): + """A collection of methods for retrieving or updating data about devices in the CrashPlan + environment. + + Returns: + :class:`pycpg.services.devices.DeviceService` + """ + return self._clients.authority.devices + + @property + def orgs(self): + """A collection of methods for retrieving or updating data about organizations in the + CrashPlan environment. + + Returns: + :class:`pycpg.services.orgs.OrgService` + """ + return self._clients.authority.orgs + + @property + def legalhold(self): + """A collection of methods for retrieving and updating legal-hold matters, policies, and + custodians. + + Returns: + :class:`pycpg.services.legalhold.LegalHoldService` + """ + return self._clients.authority.legalhold + + @property + def usercontext(self): + """A collection of methods related to getting information about the currently logged in + user, such as the tenant ID. + + Returns: + :class:`pycpg.usercontext.UserContext` + """ + return self._user_ctx + + @property + def auditlogs(self): + """A collection of methods for retrieving audit logs. + + Returns: + :class:`pycpg.clients.auditlogs.AuditLogsClient` + """ + return self._clients.auditlogs + + +def _init_services(main_connection, main_auth, auth_flag=None): + # services are imported within function to prevent circular imports when a service + from pycpg.services import Services + from pycpg.services._keyvaluestore import KeyValueStoreService + from pycpg.services.administration import AdministrationService + from pycpg.services.archive import ArchiveService + from pycpg.services.auditlogs import AuditLogsService + from pycpg.services.devices import DeviceService + from pycpg.services.legalhold import LegalHoldService + from pycpg.services.legalholdapiclient import LegalHoldApiClientService + from pycpg.services.orgs import OrgService + from pycpg.services.users import UserService + + kv_prefix = "simple-key-value-store" + audit_logs_key = "AUDIT-LOG_API-URL" + + kv_connection = Connection.from_microservice_prefix(main_connection, kv_prefix) + kv_service = KeyValueStoreService(kv_connection) + + audit_logs_conn = Connection.from_microservice_key( + kv_service, audit_logs_key, auth=main_auth + ) + administration_svc = AdministrationService(main_connection) + + user_ctx = UserContext(administration_svc) + + services = Services( + administration=administration_svc, + archive=ArchiveService(main_connection), + devices=DeviceService(main_connection), + # Only use updated legal hold client if initialized with API Client authorization + legalhold=LegalHoldApiClientService(main_connection) + if auth_flag + else LegalHoldService(main_connection), + orgs=OrgService(main_connection), + users=UserService(main_connection), + auditlogs=AuditLogsService(audit_logs_conn), + ) + + return services, user_ctx + + +def _init_clients(services, connection): + # clients are imported within function to prevent circular imports when a client + from pycpg.clients import Clients + from pycpg.clients._archiveaccess.accessorfactory import ArchiveAccessorFactory + from pycpg.clients.archive import ArchiveClient + from pycpg.clients.auditlogs import AuditLogsClient + from pycpg.clients.authority import AuthorityClient + from pycpg.clients.loginconfig import LoginConfigurationClient + from pycpg.services.storage._service_factory import StorageServiceFactory + + authority = AuthorityClient( + administration=services.administration, + archive=services.archive, + devices=services.devices, + legalhold=services.legalhold, + orgs=services.orgs, + users=services.users, + ) + storage_service_factory = StorageServiceFactory(connection, services.devices) + archive_accessor_factory = ArchiveAccessorFactory( + services.archive, storage_service_factory + ) + archive = ArchiveClient(archive_accessor_factory, services.archive) + auditlogs = AuditLogsClient(services.auditlogs) + loginconfig = LoginConfigurationClient(connection) + clients = Clients( + authority=authority, + archive=archive, + auditlogs=auditlogs, + loginconfig=loginconfig, + ) + return clients diff --git a/src/py42/services/__init__.py b/src/pycpg/services/__init__.py similarity index 57% rename from src/py42/services/__init__.py rename to src/pycpg/services/__init__.py index 412aeb32a..1d36a3e62 100644 --- a/src/py42/services/__init__.py +++ b/src/pycpg/services/__init__.py @@ -1,11 +1,11 @@ from collections import namedtuple -from py42.exceptions import Py42ActiveLegalHoldError +from pycpg.exceptions import PycpgActiveLegalHoldError def handle_active_legal_hold_error(bad_request_err, resource, resource_id): if "ACTIVE_LEGAL_HOLD" in bad_request_err.response.text: - raise Py42ActiveLegalHoldError(bad_request_err, resource, resource_id) + raise PycpgActiveLegalHoldError(bad_request_err, resource, resource_id) class BaseService: @@ -25,16 +25,6 @@ def __init__(self, connection): "legalhold", "orgs", "users", - "alertrules", - "alerts", - "fileevents", - "savedsearch", - "preservationdata", "auditlogs", - "cases", - "casesfileevents", - "trustedactivities", - "userriskprofile", - "watchlists", ], ) diff --git a/src/py42/services/_auth.py b/src/pycpg/services/_auth.py similarity index 92% rename from src/py42/services/_auth.py rename to src/pycpg/services/_auth.py index 585edb950..cf085b1f6 100644 --- a/src/py42/services/_auth.py +++ b/src/pycpg/services/_auth.py @@ -3,7 +3,7 @@ from requests.auth import AuthBase -class C42RenewableAuth(AuthBase): +class CPGRenewableAuth(AuthBase): def __init__(self): self._auth_lock = Lock() self._credentials = None @@ -28,7 +28,7 @@ def _get_credentials(self): raise NotImplementedError() -class BearerAuth(C42RenewableAuth): +class BearerAuth(CPGRenewableAuth): def __init__(self, auth_connection, totp=None): super().__init__() self._auth_connection = auth_connection @@ -43,7 +43,7 @@ def _get_credentials(self): return f"Bearer {response['v3_user_token']}" -class ApiClientAuth(C42RenewableAuth): +class ApiClientAuth(CPGRenewableAuth): def __init__(self, auth_connnection): super().__init__() self._auth_connection = auth_connnection @@ -55,7 +55,7 @@ def _get_credentials(self): return f"Bearer {response['access_token']}" -class CustomJWTAuth(C42RenewableAuth): +class CustomJWTAuth(CPGRenewableAuth): def __init__(self, jwt_provider): super().__init__() self._jwt_provider = jwt_provider diff --git a/src/py42/services/_connection.py b/src/pycpg/services/_connection.py similarity index 92% rename from src/py42/services/_connection.py rename to src/pycpg/services/_connection.py index 11cb037e8..c5d69d01d 100644 --- a/src/py42/services/_connection.py +++ b/src/pycpg/services/_connection.py @@ -8,15 +8,15 @@ from requests.models import Request from requests.sessions import Session -import py42.settings as settings -from py42.exceptions import Py42DeviceNotConnectedError -from py42.exceptions import Py42Error -from py42.exceptions import Py42FeatureUnavailableError -from py42.exceptions import raise_py42_error -from py42.response import Py42Response -from py42.services._auth import C42RenewableAuth -from py42.settings import debug -from py42.util import format_dict +import pycpg.settings as settings +from pycpg.exceptions import PycpgDeviceNotConnectedError +from pycpg.exceptions import PycpgError +from pycpg.exceptions import PycpgFeatureUnavailableError +from pycpg.exceptions import raise_pycpg_error +from pycpg.response import PycpgResponse +from pycpg.services._auth import CPGRenewableAuth +from pycpg.settings import debug +from pycpg.util import format_dict SESSION_ADAPTER = HTTPAdapter(pool_connections=200, pool_maxsize=4, pool_block=True) @@ -59,7 +59,7 @@ def _get_sts_base_url(self): sts_base_url = response_json.get("stsBaseUrl") if not sts_base_url: - raise Py42FeatureUnavailableError(response) + raise PycpgFeatureUnavailableError(response) return sts_base_url @@ -87,7 +87,7 @@ def get_host_address(self): "api/v1/connectedServerUrl", params={"guid": self._device_guid} ) if response["serverUrl"] is None: - raise Py42DeviceNotConnectedError(response, self._device_guid) + raise PycpgDeviceNotConnectedError(response, self._device_guid) return response["serverUrl"] @@ -199,10 +199,10 @@ def request( debug.logger.debug("Response data: ") if 200 <= response.status_code <= 399: - return Py42Response(response) + return PycpgResponse(response) if response.status_code == 401: - if isinstance(self._auth, C42RenewableAuth): + if isinstance(self._auth, CPGRenewableAuth): self._auth.clear_credentials() else: debug.logger.debug("Error! Could not retrieve response.") @@ -281,12 +281,12 @@ def _create_user_headers(headers): def _handle_error(method, url, response): if response is None: msg = f"No response was returned for {method} request to {url}." - raise Py42Error(msg) + raise PycpgError(msg) try: response.raise_for_status() except HTTPError as ex: - raise_py42_error(ex) + raise_pycpg_error(ex) def _print_request(method, url, params=None, data=None, json=None): diff --git a/src/py42/services/_keyvaluestore.py b/src/pycpg/services/_keyvaluestore.py similarity index 89% rename from src/py42/services/_keyvaluestore.py rename to src/pycpg/services/_keyvaluestore.py index b6056b72f..370a2b273 100644 --- a/src/py42/services/_keyvaluestore.py +++ b/src/pycpg/services/_keyvaluestore.py @@ -1,4 +1,4 @@ -from py42.services import BaseService +from pycpg.services import BaseService class KeyValueStoreService(BaseService): diff --git a/src/py42/services/administration.py b/src/pycpg/services/administration.py similarity index 79% rename from src/py42/services/administration.py rename to src/pycpg/services/administration.py index c217517a9..ac779cd0a 100644 --- a/src/py42/services/administration.py +++ b/src/pycpg/services/administration.py @@ -1,4 +1,4 @@ -from py42.services import BaseService +from pycpg.services import BaseService class AdministrationService(BaseService): diff --git a/src/py42/services/archive.py b/src/pycpg/services/archive.py similarity index 90% rename from src/py42/services/archive.py rename to src/pycpg/services/archive.py index 893ee13ea..2d5e80471 100644 --- a/src/py42/services/archive.py +++ b/src/pycpg/services/archive.py @@ -1,6 +1,6 @@ -from py42 import settings -from py42.services import BaseService -from py42.services.util import get_all_pages +from pycpg import settings +from pycpg.services import BaseService +from pycpg.services.util import get_all_pages class ArchiveService(BaseService): @@ -16,7 +16,7 @@ def get_single_archive(self, archive_guid): archive_guid (str): The GUID for the archive. Returns: - :class:`py42.response.Py42Response`: A response containing archive information. + :class:`pycpg.response.PycpgResponse`: A response containing archive information. """ uri = f"/api/v1/Archive/{archive_guid}" return self._connection.get(uri) @@ -26,10 +26,10 @@ def get_page(self, page_num, page_size=None, **kwargs): Args: page_num (int): The page number to request. - page_size (int, optional): The number of archives to return per page. Defaults to `py42.settings.items_per_page`. + page_size (int, optional): The number of archives to return per page. Defaults to `pycpg.settings.items_per_page`. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/Archive" page_size = page_size or settings.items_per_page @@ -45,7 +45,7 @@ def get_all_archives_from_value(self, id_value, id_type): userUid, destinationGuid) Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of archives. """ params = {id_type: id_value} diff --git a/src/py42/services/auditlogs.py b/src/pycpg/services/auditlogs.py similarity index 87% rename from src/py42/services/auditlogs.py rename to src/pycpg/services/auditlogs.py index afe84b5e0..eece1ced5 100644 --- a/src/py42/services/auditlogs.py +++ b/src/pycpg/services/auditlogs.py @@ -1,8 +1,8 @@ -from py42 import settings -from py42.services import BaseService -from py42.services.util import get_all_pages -from py42.util import parse_timestamp_to_microseconds_precision -from py42.util import to_list +from pycpg import settings +from pycpg.services import BaseService +from pycpg.services.util import get_all_pages +from pycpg.util import parse_timestamp_to_microseconds_precision +from pycpg.util import to_list _FILTER_PARAMS = ( "event_types", @@ -19,7 +19,7 @@ class AuditLogsService(BaseService): - """https://support.code42.com/Administrator/Cloud/Monitoring_and_managing/Search_Audit_Log_events_with_the_Code42_API""" + """https://support.crashplan.com/hc/en-us/articles/9057566861325--Search-Audit-Log-events-with-the-CrashPlan-API""" def get_page( self, diff --git a/src/py42/services/devices.py b/src/pycpg/services/devices.py similarity index 81% rename from src/py42/services/devices.py rename to src/pycpg/services/devices.py index 46b4e1983..aad7faae7 100644 --- a/src/py42/services/devices.py +++ b/src/pycpg/services/devices.py @@ -1,14 +1,13 @@ from collections import namedtuple from time import time -from py42 import settings -from py42.clients.settings.device_settings import DeviceSettings -from py42.clients.settings.device_settings import IncydrDeviceSettings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42OrgNotFoundError -from py42.services import BaseService -from py42.services import handle_active_legal_hold_error -from py42.services.util import get_all_pages +from pycpg import settings +from pycpg.clients.settings.device_settings import DeviceSettings +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgOrgNotFoundError +from pycpg.services import BaseService +from pycpg.services import handle_active_legal_hold_error +from pycpg.services.util import get_all_pages DeviceSettingsResponse = namedtuple( "DeviceSettingsResponse", ["error", "settings_response", "device_settings_response"] @@ -16,7 +15,7 @@ class DeviceService(BaseService): - """A class to interact with Code42 device/computer APIs.""" + """A class to interact with CrashPlan device/computer APIs.""" def get_page( self, @@ -49,12 +48,12 @@ def get_page( include_counts (bool, optional): A flag to denote whether to include total, warning, and critical counts. Defaults to True. page_size (int, optional): The number of devices to return per page. Defaults to - `py42.settings.items_per_page`. + `pycpg.settings.items_per_page`. q (str, optional): Searches results flexibly by incomplete GUID, hostname, computer name, etc. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/Computer" @@ -73,9 +72,9 @@ def get_page( } try: return self._connection.get(uri, params=params) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "Unable to find org" in str(err.response.text): - raise Py42OrgNotFoundError(err, org_uid) + raise PycpgOrgNotFoundError(err, org_uid) raise def get_all( @@ -114,11 +113,11 @@ def get_all( computer name, etc. Defaults to None. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of devices. The devices returned by `get_all()` are based on the role and permissions of the user - authenticating the py42 SDK. + authenticating the pycpg SDK. """ return get_all_pages( @@ -144,7 +143,7 @@ def get_by_id(self, device_id, include_backup_usage=None, **kwargs): destination and its backup stats. Defaults to None. Returns: - :class:`py42.response.Py42Response`: A response containing device information. + :class:`pycpg.response.PycpgResponse`: A response containing device information. """ uri = f"/api/v1/Computer/{device_id}" params = dict(incBackupUsage=include_backup_usage, **kwargs) @@ -159,21 +158,21 @@ def get_by_guid(self, guid, include_backup_usage=None, **kwargs): destination and its backup stats. Defaults to None. Returns: - :class:`py42.response.Py42Response`: A response containing device information. + :class:`pycpg.response.PycpgResponse`: A response containing device information. """ uri = f"/api/v1/Computer/{guid}" params = dict(idType="guid", incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params) def block(self, device_id): - """Blocks a device causing the user not to be able to log in to or restore from Code42 on + """Blocks a device causing the user not to be able to log in to or restore from CrashPlan on that device. Args: device_id (int): The identification number of the device. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v1/ComputerBlock/{device_id}" return self._connection.put(uri) @@ -185,7 +184,7 @@ def unblock(self, device_id): device_id (int): The identification number of the device. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v1/ComputerBlock/{device_id}" return self._connection.delete(uri) @@ -197,13 +196,13 @@ def deactivate(self, device_id): device_id (int): The identification number of the device. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v4/computer-deactivation/update" data = {"id": device_id} try: return self._connection.post(uri, json=data) - except Py42BadRequestError as ex: + except PycpgBadRequestError as ex: handle_active_legal_hold_error(ex, "device", device_id) raise @@ -214,7 +213,7 @@ def reactivate(self, device_id): device_id (int): The identification number of the device. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v4/computer-deactivation/remove" data = {"id": device_id} @@ -228,7 +227,7 @@ def deauthorize(self, device_id): device_id (int): The identification number of the device. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v1/ComputerDeauthorization/{device_id}" return self._connection.put(uri) @@ -241,7 +240,7 @@ def get_agent_state(self, guid, property_name): property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`). Returns: - :class:`py42.response.Py42Response`: A response containing settings information. + :class:`pycpg.response.PycpgResponse`: A response containing settings information. """ uri = "/api/v14/agent-state/view-by-device-guid" params = {"deviceGuid": guid, "propertyName": property_name} @@ -254,7 +253,7 @@ def get_agent_full_disk_access_state(self, guid): guid (str): The globally unique identifier of the device. Returns: - :class:`py42.response.Py42Response`: A response containing settings information. + :class:`pycpg.response.PycpgResponse`: A response containing settings information. """ return self.get_agent_state(guid, "fullDiskAccess") @@ -265,22 +264,19 @@ def get_settings(self, guid): guid (int,str): The globally unique identifier of the device. Returns: - :class:`py42.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings. + :class:`pycpg.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings. """ settings = self.get_by_guid(guid, incSettings=True) - if settings.data["service"].lower() == "crashplan": - return DeviceSettings(settings.data) - else: - return IncydrDeviceSettings(settings.data) + return DeviceSettings(settings.data) def update_settings(self, device_settings): - """Updates a device's settings based on changes to the passed in `DeviceSettings` or `IncydrDeviceSettings` instance. The appropriate instance for each device is returned by the `get_settings()` method. + """Updates a device's settings based on changes to the passed in `DeviceSettings` instance. The appropriate instance for each device is returned by the `get_settings()` method. Args: - device_settings (`DeviceSettings` OR `IncydrDeviceSettings`): An instance of `DeviceSettings` (Crashplan) or `IncydrDeviceSettings` (Incydr) with desired modifications to settings. + device_settings `DeviceSettings`: An instance of `DeviceSettings` (Crashplan) with desired modifications to settings. Returns: - :class:`py42.response.Py42Response`: A response containing the result of the settings changes. + :class:`pycpg.response.PycpgResponse`: A response containing the result of the settings changes. """ device_settings = dict(device_settings) device_id = device_settings["computerId"] @@ -297,7 +293,7 @@ def upgrade(self, guid): guid (str): The globally unique identifier of the device. Returns: - :class:`py42.response.Py42Response`: A response containing the result of the upgrade request. + :class:`pycpg.response.PycpgResponse`: A response containing the result of the upgrade request. """ uri = "/api/v4/device-upgrade/upgrade-device" return self._connection.post(uri, json={"deviceGuid": guid}) diff --git a/src/py42/services/legalhold.py b/src/pycpg/services/legalhold.py similarity index 86% rename from src/py42/services/legalhold.py rename to src/pycpg/services/legalhold.py index 003ceb1d4..e99ebc5bb 100644 --- a/src/py42/services/legalhold.py +++ b/src/pycpg/services/legalhold.py @@ -1,13 +1,13 @@ -from py42 import settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42Error -from py42.exceptions import Py42ForbiddenError -from py42.exceptions import Py42LegalHoldCriteriaMissingError -from py42.exceptions import Py42LegalHoldNotFoundOrPermissionDeniedError -from py42.exceptions import Py42UserAlreadyAddedError -from py42.services import BaseService -from py42.services.util import get_all_pages -from py42.util import parse_timestamp_to_milliseconds_precision +from pycpg import settings +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgError +from pycpg.exceptions import PycpgForbiddenError +from pycpg.exceptions import PycpgLegalHoldCriteriaMissingError +from pycpg.exceptions import PycpgLegalHoldNotFoundOrPermissionDeniedError +from pycpg.exceptions import PycpgUserAlreadyAddedError +from pycpg.services import BaseService +from pycpg.services.util import get_all_pages +from pycpg.util import parse_timestamp_to_milliseconds_precision def _active_state_map(active): @@ -15,15 +15,15 @@ def _active_state_map(active): try: return _map[active] except KeyError: - raise Py42Error( + raise PycpgError( f"Invalid argument: '{active}'. active must be True, False, or None" ) class LegalHoldService(BaseService): - """A service for interacting with Code42 Legal Hold APIs. + """A service for interacting with CrashPlan Legal Hold APIs. - The LegalHoldService provides the ability to manage Code42 Legal Hold Policies and Matters. + The LegalHoldService provides the ability to manage CrashPlan Legal Hold Policies and Matters. It can: - Create, view, and list all existing Policies. - Create, view, deactivate, reactivate, and list all existing Matters. @@ -39,7 +39,7 @@ def create_policy(self, name, policy=None): None (where the server-default backup set is used). Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v4/legal-hold-policy/create" data = {"name": name, "policy": policy} @@ -59,7 +59,7 @@ def create_matter( hold_ext_ref (str, optional): Optional external reference information. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/LegalHold" data = { @@ -78,7 +78,7 @@ def get_policy_by_uid(self, legal_hold_policy_uid): legal_hold_policy_uid (str): The identifier of the Preservation Policy. Returns: - :class:`py42.response.Py42Response`: A response containing the Policy. + :class:`pycpg.response.PycpgResponse`: A response containing the Policy. """ uri = "/api/v4/legal-hold-policy/view" params = {"legalHoldPolicyUid": legal_hold_policy_uid} @@ -88,7 +88,7 @@ def get_policy_list(self): """Gets a list of existing Preservation Policies. Returns: - :class:`py42.response.Py42Response`: A response containing the list of Policies. + :class:`pycpg.response.PycpgResponse`: A response containing the list of Policies. """ uri = "/api/v4/legal-hold-policy/list" return self._connection.get(uri) @@ -100,13 +100,13 @@ def get_matter_by_uid(self, legal_hold_uid): legal_hold_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response`: A response containing the Matter. + :class:`pycpg.response.PycpgResponse`: A response containing the Matter. """ uri = f"/api/v1/LegalHold/{legal_hold_uid}" try: return self._connection.get(uri) - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError(err, legal_hold_uid) + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError(err, legal_hold_uid) def get_matters_page( self, @@ -131,10 +131,10 @@ def get_matters_page( hold_ext_ref (str, optional): Find Matters having a matching external reference field. Defaults to None. page_size (int, optional): The number of legal hold items to return per page. - Defaults to `py42.settings.items_per_page`. + Defaults to `pycpg.settings.items_per_page`. Returns: - :class:`py42.response.Py42Response`: + :class:`pycpg.response.PycpgResponse`: """ active_state = _active_state_map(active) @@ -167,7 +167,7 @@ def get_all_matters( Defaults to None. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of Legal Hold Matters. """ return get_all_pages( @@ -208,10 +208,10 @@ def get_custodians_page( active (bool or None, optional): Find LegalHoldMemberships by their active state. True returns active LegalHoldMemberships, False returns inactive LegalHoldMemberships, None returns all LegalHoldMemberships regardless of state. Defaults to True. - page_size (int, optional): The size of the page. Defaults to `py42.settings.items_per_page`. + page_size (int, optional): The size of the page. Defaults to `pycpg.settings.items_per_page`. Returns: - :class:`py42.response.Py42Response`: + :class:`pycpg.response.PycpgResponse`: """ active_state = _active_state_map(active) page_size = page_size or settings.items_per_page @@ -227,9 +227,9 @@ def get_custodians_page( uri = "/api/v1/LegalHoldMembership" try: return self._connection.get(uri, params=params) - except Py42BadRequestError as ex: + except PycpgBadRequestError as ex: if "At least one criteria must be specified" in ex.response.text: - raise Py42LegalHoldCriteriaMissingError(ex) + raise PycpgLegalHoldCriteriaMissingError(ex) raise def get_all_matter_custodians( @@ -255,7 +255,7 @@ def get_all_matter_custodians( None returns all LegalHoldMemberships regardless of state. Defaults to True. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of LegalHoldMembership objects. """ return get_all_pages( @@ -289,10 +289,10 @@ def get_events_page( E.g. yyyy-MM-dd HH:MM:SS. Defaults to None. page_num (int): The page number to request. Defaults to 1. page_size (int, optional): The size of the page. - Defaults to `py42.settings.items_per_page`. + Defaults to `pycpg.settings.items_per_page`. Returns: - :class:`py42.response.Py42Response`: + :class:`pycpg.response.PycpgResponse`: """ page_size = page_size or settings.items_per_page if min_event_date: @@ -326,7 +326,7 @@ def get_all_events( E.g. yyyy-MM-dd HH:MM:SS. Defaults to None. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of LegalHoldEvent objects. """ return get_all_pages( @@ -345,19 +345,19 @@ def add_to_matter(self, user_uid, legal_hold_uid): legal_hold_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/LegalHoldMembership" data = {"legalHoldUid": legal_hold_uid, "userUid": user_uid} try: return self._connection.post(uri, json=data) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "USER_ALREADY_IN_HOLD" in err.response.text: matter = self.get_matter_by_uid(legal_hold_uid) matter_id_and_name_text = ( f"legal hold matter id={legal_hold_uid}, name={matter['name']}" ) - raise Py42UserAlreadyAddedError(err, user_uid, matter_id_and_name_text) + raise PycpgUserAlreadyAddedError(err, user_uid, matter_id_and_name_text) raise def remove_from_matter(self, legal_hold_membership_uid): @@ -368,7 +368,7 @@ def remove_from_matter(self, legal_hold_membership_uid): representing the Custodian to Matter relationship. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/LegalHoldMembershipDeactivation" data = {"legalHoldMembershipUid": legal_hold_membership_uid} @@ -381,7 +381,7 @@ def deactivate_matter(self, legal_hold_uid): legal_hold_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v4/legal-hold-deactivation/update" data = {"legalHoldUid": legal_hold_uid} @@ -394,7 +394,7 @@ def reactivate_matter(self, legal_hold_uid): legal_hold_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v1/LegalHoldReactivation/{legal_hold_uid}" return self._connection.put(uri) diff --git a/src/py42/services/legalholdapiclient.py b/src/pycpg/services/legalholdapiclient.py similarity index 80% rename from src/py42/services/legalholdapiclient.py rename to src/pycpg/services/legalholdapiclient.py index 46ee03094..781bf0028 100644 --- a/src/py42/services/legalholdapiclient.py +++ b/src/pycpg/services/legalholdapiclient.py @@ -1,14 +1,14 @@ -from py42 import settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42Error -from py42.exceptions import Py42ForbiddenError -from py42.exceptions import Py42LegalHoldAlreadyActiveError -from py42.exceptions import Py42LegalHoldAlreadyDeactivatedError -from py42.exceptions import Py42LegalHoldCriteriaMissingError -from py42.exceptions import Py42LegalHoldNotFoundOrPermissionDeniedError -from py42.exceptions import Py42UserAlreadyAddedError -from py42.services import BaseService -from py42.services.util import get_all_pages +from pycpg import settings +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgError +from pycpg.exceptions import PycpgForbiddenError +from pycpg.exceptions import PycpgLegalHoldAlreadyActiveError +from pycpg.exceptions import PycpgLegalHoldAlreadyDeactivatedError +from pycpg.exceptions import PycpgLegalHoldCriteriaMissingError +from pycpg.exceptions import PycpgLegalHoldNotFoundOrPermissionDeniedError +from pycpg.exceptions import PycpgUserAlreadyAddedError +from pycpg.services import BaseService +from pycpg.services.util import get_all_pages def _active_state_map(active): @@ -16,15 +16,15 @@ def _active_state_map(active): try: return _map[active] except KeyError: - raise Py42Error( + raise PycpgError( f"Invalid argument: '{active}'. active must be True, False, or None" ) class LegalHoldApiClientService(BaseService): - """A service for interacting with Code42 Legal Hold APIs. + """A service for interacting with CrashPlan Legal Hold APIs. - The LegalHoldService provides the ability to manage Code42 Legal Hold Policies and Matters. + The LegalHoldService provides the ability to manage CrashPlan Legal Hold Policies and Matters. It can: - Create, view, and list all existing Policies. - Create, view, deactivate, reactivate, and list all existing Matters. @@ -45,7 +45,7 @@ def create_policy(self, name): name (str): The name of the new Policy. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._uri_prefix}/legal-hold-policy/create" data = {"name": name} @@ -70,7 +70,7 @@ def create_matter( hold_ext_ref (str, optional): Optional external reference information. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._uri_prefix}/legal-hold-matter/create" data = { @@ -89,14 +89,14 @@ def get_policy_by_uid(self, legal_hold_policy_uid): legal_hold_policy_uid (str): The unique identifier of the Preservation Policy. Returns: - :class:`py42.response.Py42Response`: A response containing the Policy. + :class:`pycpg.response.PycpgResponse`: A response containing the Policy. """ uri = f"{self._uri_prefix}/legal-hold-policy/view" params = {"legalHoldPolicyUid": legal_hold_policy_uid} try: return self._connection.get(uri, params=params) - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError( + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError( err, legal_hold_policy_uid, self._policy_string ) @@ -104,7 +104,7 @@ def get_policy_list(self): """Gets a list of existing Preservation Policies. Returns: - :class:`py42.response.Py42Response`: A response containing the list of Policies. + :class:`pycpg.response.PycpgResponse`: A response containing the list of Policies. """ uri = f"{self._uri_prefix}/legal-hold-policy/list" return self._connection.get(uri) @@ -116,7 +116,7 @@ def get_matter_by_uid(self, legal_hold_matter_uid): legal_hold_matter_uid (str): The unique identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response`: A response containing the Matter. + :class:`pycpg.response.PycpgResponse`: A response containing the Matter. """ uri = f"{self._uri_prefix}/legal-hold-matter/view" params = { @@ -124,8 +124,8 @@ def get_matter_by_uid(self, legal_hold_matter_uid): } try: return self._connection.get(uri, params=params) - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError( + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError( err, legal_hold_matter_uid ) @@ -152,11 +152,11 @@ def get_matters_page( hold_ext_ref (str, optional): Find Matters having a matching external reference field. Defaults to None. page_size (int, optional): The number of legal hold items to return per page. - Defaults to `py42.settings.items_per_page`. + Defaults to `pycpg.settings.items_per_page`. Returns: - :class:`py42.response.Py42Response`: + :class:`pycpg.response.PycpgResponse`: """ page_size = page_size or settings.items_per_page @@ -188,7 +188,7 @@ def get_all_matters( Defaults to None. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of Legal Hold Matters. """ return get_all_pages( @@ -226,10 +226,10 @@ def get_custodians_page( active (bool or None, optional): Find LegalHoldMemberships by their active state. True returns active LegalHoldMemberships, False returns inactive LegalHoldMemberships, None returns all LegalHoldMemberships regardless of state. Defaults to True. - page_size (int, optional): The size of the page. Defaults to `py42.settings.items_per_page`. + page_size (int, optional): The size of the page. Defaults to `pycpg.settings.items_per_page`. Returns: - :class:`py42.response.Py42Response`: + :class:`pycpg.response.PycpgResponse`: """ active_state = _active_state_map(active) page_size = page_size or settings.items_per_page @@ -244,9 +244,9 @@ def get_custodians_page( uri = f"{self._uri_prefix}/legal-hold-membership/list" try: return self._connection.get(uri, params=params) - except Py42BadRequestError as ex: + except PycpgBadRequestError as ex: if "At least one criteria must be specified" in ex.response.text: - raise Py42LegalHoldCriteriaMissingError(ex) + raise PycpgLegalHoldCriteriaMissingError(ex) raise def get_all_matter_custodians( @@ -272,7 +272,7 @@ def get_all_matter_custodians( None returns all LegalHoldMemberships regardless of state. Defaults to True. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of LegalHoldMembership objects. """ return get_all_pages( @@ -292,20 +292,20 @@ def add_to_matter(self, user_uid, legal_hold_matter_uid): legal_hold_matter_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._uri_prefix}/legal-hold-membership/create" data = {"legalHoldUid": legal_hold_matter_uid, "userUid": user_uid} try: return self._connection.post(uri, json=data) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "USER_ALREADY_IN_HOLD" in err.response.text: matter = self.get_matter_by_uid(legal_hold_matter_uid) matter_id_and_name_text = f"legal hold matter id={legal_hold_matter_uid}, name={matter['name']}" - raise Py42UserAlreadyAddedError(err, user_uid, matter_id_and_name_text) + raise PycpgUserAlreadyAddedError(err, user_uid, matter_id_and_name_text) raise - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError( + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError( err, legal_hold_matter_uid ) @@ -317,14 +317,14 @@ def remove_from_matter(self, legal_hold_membership_uid): representing the Custodian to Matter relationship. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._uri_prefix}/legal-hold-membership/deactivate" data = {"legalHoldMembershipUid": legal_hold_membership_uid} try: return self._connection.post(uri, json=data) - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError( + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError( err, legal_hold_membership_uid, self._membership_string ) @@ -335,18 +335,18 @@ def deactivate_matter(self, legal_hold_matter_uid): legal_hold_matter_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._uri_prefix}/legal-hold-matter/deactivate" data = {"legalHoldUid": legal_hold_matter_uid} try: return self._connection.post(uri, json=data) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "ALREADY_DEACTIVATED" in err.response.text: - raise Py42LegalHoldAlreadyDeactivatedError(err, legal_hold_matter_uid) + raise PycpgLegalHoldAlreadyDeactivatedError(err, legal_hold_matter_uid) raise - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError( + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError( err, legal_hold_matter_uid ) @@ -357,17 +357,17 @@ def reactivate_matter(self, legal_hold_matter_uid): legal_hold_matter_uid (str): The identifier of the Legal Hold Matter. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"{self._uri_prefix}/legal-hold-matter/activate" data = {"legalHoldUid": legal_hold_matter_uid} try: return self._connection.post(uri, json=data) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "ALREADY_ACTIVE" in err.response.text: - raise Py42LegalHoldAlreadyActiveError(err, legal_hold_matter_uid) + raise PycpgLegalHoldAlreadyActiveError(err, legal_hold_matter_uid) raise - except Py42ForbiddenError as err: - raise Py42LegalHoldNotFoundOrPermissionDeniedError( + except PycpgForbiddenError as err: + raise PycpgLegalHoldNotFoundOrPermissionDeniedError( err, legal_hold_matter_uid ) diff --git a/src/py42/services/orgs.py b/src/pycpg/services/orgs.py similarity index 83% rename from src/py42/services/orgs.py rename to src/pycpg/services/orgs.py index 79ee0abec..f5620d8e1 100644 --- a/src/py42/services/orgs.py +++ b/src/pycpg/services/orgs.py @@ -1,11 +1,11 @@ from collections import namedtuple -from py42 import settings -from py42.clients.settings.org_settings import OrgSettings -from py42.exceptions import Py42Error -from py42.exceptions import Py42InternalServerError -from py42.services import BaseService -from py42.services.util import get_all_pages +from pycpg import settings +from pycpg.clients.settings.org_settings import OrgSettings +from pycpg.exceptions import PycpgError +from pycpg.exceptions import PycpgInternalServerError +from pycpg.services import BaseService +from pycpg.services.util import get_all_pages OrgSettingsResponse = namedtuple( "OrgSettingsResponse", ["error", "org_response", "org_settings_response"] @@ -13,7 +13,7 @@ class OrgService(BaseService): - """A service for interacting with Code42 organization APIs. + """A service for interacting with CrashPlan organization APIs. Use the OrgService to create and retrieve organizations. You can also use it to block and deactivate organizations. @@ -46,7 +46,7 @@ def create_org(self, org_name, org_ext_ref=None, notes=None, parent_org_uid=None None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ # get parent org GUID from UID :) @@ -73,7 +73,7 @@ def get_by_id(self, org_id, **kwargs): org_id (int): An ID for an organization. Returns: - :class:`py42.response.Py42Response`: A response containing the organization. + :class:`pycpg.response.PycpgResponse`: A response containing the organization. """ uri = f"/api/v3/orgs/{self._get_guid_by_id(org_id)}" return self._connection.get(uri, params=kwargs) @@ -85,7 +85,7 @@ def get_by_uid(self, org_uid, **kwargs): org_uid (str): A UID for an organization. Returns: - :class:`py42.response.Py42Response`: A response containing the organization. + :class:`pycpg.response.PycpgResponse`: A response containing the organization. """ uri = f'/api/v3/orgs/{self._get_guid_by_id(org_uid, id_key="orgUid")}' return self._connection.get(uri, params=kwargs) @@ -96,11 +96,11 @@ def get_page(self, page_num=1, page_size=None, **kwargs): Args: page_num (int): The page number to request. page_size (int, optional): The number of organizations to return per page. - Defaults to `py42.settings.items_per_page`. + Defaults to `pycpg.settings.items_per_page`. kwargs (dict, optional): Additional advanced-user arguments. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ page_size = page_size or settings.items_per_page uri = "/api/v1/Org" @@ -111,7 +111,7 @@ def get_all(self, **kwargs): """Gets all organizations. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of organizations. """ return get_all_pages(self.get_page, "orgs", **kwargs) @@ -126,7 +126,7 @@ def block(self, org_id): org_id (int): An ID for an organization. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/orgs/{self._get_guid_by_id(org_id)}/block" return self._connection.post(uri) @@ -139,7 +139,7 @@ def unblock(self, org_id): org_id (int): An ID for an organization. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/orgs/{self._get_guid_by_id(org_id)}/unblock" return self._connection.post(uri) @@ -152,7 +152,7 @@ def deactivate(self, org_id): org_id (int): An ID for an organization. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/orgs/{self._get_guid_by_id(org_id)}/deactivate" return self._connection.post(uri) @@ -165,7 +165,7 @@ def reactivate(self, org_id): org_id (int): An ID for an organization. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/orgs/{self._get_guid_by_id(org_id)}/activate" return self._connection.post(uri) @@ -176,14 +176,14 @@ def get_current(self, **kwargs): WARNING: This method is incompatible with api client authentication. Returns: - :class:`py42.response.Py42Response`: A response containing the organization for the + :class:`pycpg.response.PycpgResponse`: A response containing the organization for the currently signed-in user. """ uri = "/api/v1/Org/my" try: return self._connection.get(uri, params=kwargs) - except Py42InternalServerError as err: - raise Py42InternalServerError( + except PycpgInternalServerError as err: + raise PycpgInternalServerError( err, message="Server Error. Please be aware that this method is incompatible with api client authentication.", ) @@ -196,7 +196,7 @@ def get_agent_state(self, org_id, property_name): property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`). Returns: - :class:`py42.response.Py42Response`: A response containing settings information. + :class:`pycpg.response.PycpgResponse`: A response containing settings information. """ uri = "/api/v14/agent-state/view-by-organization-id" params = {"orgId": org_id, "propertyName": property_name} @@ -209,7 +209,7 @@ def get_agent_full_disk_access_states(self, org_id): org_id (str): The org's identifier. Returns: - :class:`py42.response.Py42Response`: A response containing settings information. + :class:`pycpg.response.PycpgResponse`: A response containing settings information. """ return self.get_agent_state(org_id, "fullDiskAccess") @@ -220,7 +220,7 @@ def get_settings(self, org_id): org_id (int,str): The identifier of the org. Returns: - :class:`py42.clients._settings_managers.OrgSettings`: A class to help manage org settings. + :class:`pycpg.clients._settings_managers.OrgSettings`: A class to help manage org settings. """ org_settings = self.get_by_id( org_id, incSettings=True, incDeviceDefaults=True, incInheritedOrgInfo=True @@ -236,7 +236,7 @@ def update_settings(self, org_settings): org_settings (`OrgSettings`): An `OrgSettings` instance with desired modifications to settings. Returns: - :class:`py42.services.orgs.OrgSettings`: A namedtuple containing the result of the setting change api calls. + :class:`pycpg.services.orgs.OrgSettings`: A namedtuple containing the result of the setting change api calls. """ org_id = org_settings.org_id error = False @@ -247,7 +247,7 @@ def update_settings(self, org_settings): payload = {"packets": org_settings.packets} try: org_settings_response = self._connection.put(uri, json=payload) - except Py42Error as ex: + except PycpgError as ex: error = True org_settings_response = ex @@ -255,7 +255,7 @@ def update_settings(self, org_settings): uri = f"/api/v1/Org/{org_id}" try: org_response = self._connection.put(uri, json=org_settings.data) - except Py42Error as ex: + except PycpgError as ex: error = True org_response = ex return OrgSettingsResponse( @@ -275,7 +275,7 @@ def update_org(self, org_id, name=None, notes=None, ext_ref=None): ext_ref (str, optional): The updated external reference for the org. Returns: - :class:`py42.response.Py42Response`: + :class:`pycpg.response.PycpgResponse`: """ uri = f"/api/v3/orgs/{self._get_guid_by_id(org_id)}" data = {"orgName": name, "orgExtRef": ext_ref, "notes": notes} @@ -293,9 +293,9 @@ def _get_guid_by_id(self, org_id, id_key="orgId"): if org[id_key] == org_id: return org["orgGuid"] if not guid: - raise Py42Error(f"Couldn't find an Org with ID '{org_id}'.") + raise PycpgError(f"Couldn't find an Org with ID '{org_id}'.") else: try: return self.org_id_map[org_id] except KeyError: - raise Py42Error(f"Couldn't find an Org with ID '{org_id}'.") + raise PycpgError(f"Couldn't find an Org with ID '{org_id}'.") diff --git a/src/py42/sdk/queries/alerts/__init__.py b/src/pycpg/services/storage/__init__.py similarity index 100% rename from src/py42/sdk/queries/alerts/__init__.py rename to src/pycpg/services/storage/__init__.py diff --git a/src/py42/services/storage/_service_factory.py b/src/pycpg/services/storage/_service_factory.py similarity index 55% rename from src/py42/services/storage/_service_factory.py rename to src/pycpg/services/storage/_service_factory.py index 1ee178991..e5fba236d 100644 --- a/src/py42/services/storage/_service_factory.py +++ b/src/pycpg/services/storage/_service_factory.py @@ -1,11 +1,9 @@ from functools import lru_cache -from py42.exceptions import Py42Error -from py42.services._connection import Connection -from py42.services.storage.archive import StorageArchiveService -from py42.services.storage.exfiltrateddata import ExfiltratedDataService -from py42.services.storage.preservationdata import StoragePreservationDataService -from py42.services.storage.restore import PushRestoreService +from pycpg.exceptions import PycpgError +from pycpg.services._connection import Connection +from pycpg.services.storage.archive import StorageArchiveService +from pycpg.services.storage.restore import PushRestoreService class StorageServiceFactory: @@ -29,16 +27,6 @@ def get_storage_url(self, device_guid, destination_guid): response = self._connection.get(uri, params=params) return response["serverUrl"] - def create_preservation_data_service(self, host_address): - main_connection = self._connection.clone(host_address) - streaming_connection = Connection.from_host_address(host_address) - return StoragePreservationDataService(main_connection, streaming_connection) - - def create_exfiltrated_data_service(self, host_address): - main_connection = self._connection.clone(host_address) - streaming_connection = Connection.from_host_address(host_address) - return ExfiltratedDataService(main_connection, streaming_connection) - def auto_select_destination_guid(self, device_guid): response = self._device_service.get_by_guid( device_guid, include_backup_usage=True @@ -46,5 +34,5 @@ def auto_select_destination_guid(self, device_guid): # take the first destination guid we find destination_list = response["backupUsage"] if not destination_list: - raise Py42Error(f"No destinations found for device guid: {device_guid}") + raise PycpgError(f"No destinations found for device guid: {device_guid}") return destination_list[0]["targetComputerGuid"] diff --git a/src/py42/services/storage/archive.py b/src/pycpg/services/storage/archive.py similarity index 98% rename from src/py42/services/storage/archive.py rename to src/pycpg/services/storage/archive.py index d698ef3ff..74bd79994 100644 --- a/src/py42/services/storage/archive.py +++ b/src/pycpg/services/storage/archive.py @@ -1,4 +1,4 @@ -from py42.services.storage.restore import RestoreService +from pycpg.services.storage.restore import RestoreService class StorageArchiveService(RestoreService): diff --git a/src/py42/services/storage/restore.py b/src/pycpg/services/storage/restore.py similarity index 81% rename from src/py42/services/storage/restore.py rename to src/pycpg/services/storage/restore.py index b44908a6f..df89c80c3 100644 --- a/src/py42/services/storage/restore.py +++ b/src/pycpg/services/storage/restore.py @@ -1,9 +1,9 @@ -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42BadRestoreRequestError -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42InvalidArchiveEncryptionKey -from py42.exceptions import Py42InvalidArchivePassword -from py42.services import BaseService +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgBadRestoreRequestError +from pycpg.exceptions import PycpgInternalServerError +from pycpg.exceptions import PycpgInvalidArchiveEncryptionKey +from pycpg.exceptions import PycpgInvalidArchivePassword +from pycpg.services import BaseService class PushRestoreLocation: @@ -34,11 +34,11 @@ def create_restore_session( } try: return self._connection.post(uri, json=json_dict) - except Py42InternalServerError as err: + except PycpgInternalServerError as err: if "PRIVATE_PASSWORD_INVALID" in err.response.text: - raise Py42InvalidArchivePassword(err) + raise PycpgInvalidArchivePassword(err) elif "CUSTOM_KEY_INVALID" in err.response.text: - raise Py42InvalidArchiveEncryptionKey(err) + raise PycpgInvalidArchiveEncryptionKey(err) raise def get_restore_status(self, job_id): @@ -86,7 +86,7 @@ def start_push_restore( } try: return self._connection.post(uri, json=json_dict) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "CREATE_FAILED" in err.response.text: - raise Py42BadRestoreRequestError(err) + raise PycpgBadRestoreRequestError(err) raise diff --git a/src/py42/services/users.py b/src/pycpg/services/users.py similarity index 82% rename from src/py42/services/users.py rename to src/pycpg/services/users.py index a3c897673..9b3f9dc38 100644 --- a/src/py42/services/users.py +++ b/src/pycpg/services/users.py @@ -1,20 +1,20 @@ -from py42 import settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42InvalidEmailError -from py42.exceptions import Py42InvalidPasswordError -from py42.exceptions import Py42InvalidUsernameError -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42OrgNotFoundError -from py42.exceptions import Py42UserAlreadyExistsError -from py42.exceptions import Py42UsernameMustBeEmailError -from py42.services import BaseService -from py42.services import handle_active_legal_hold_error -from py42.services.util import get_all_pages +from pycpg import settings +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgInternalServerError +from pycpg.exceptions import PycpgInvalidEmailError +from pycpg.exceptions import PycpgInvalidPasswordError +from pycpg.exceptions import PycpgInvalidUsernameError +from pycpg.exceptions import PycpgNotFoundError +from pycpg.exceptions import PycpgOrgNotFoundError +from pycpg.exceptions import PycpgUserAlreadyExistsError +from pycpg.exceptions import PycpgUsernameMustBeEmailError +from pycpg.services import BaseService +from pycpg.services import handle_active_legal_hold_error +from pycpg.services.util import get_all_pages class UserService(BaseService): - """A service for interacting with Code42 user APIs. Use the UserService to create and retrieve + """A service for interacting with CrashPlan user APIs. Use the UserService to create and retrieve users. You can also use it to block and deactivate users. """ @@ -42,7 +42,7 @@ def create_user( notes (str, optional): Descriptive information about the user. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/User" @@ -58,20 +58,20 @@ def create_user( try: return self._connection.post(uri, json=data) - except Py42InternalServerError as err: + except PycpgInternalServerError as err: if "USER_DUPLICATE" in err.response.text: - raise Py42UserAlreadyExistsError(err) + raise PycpgUserAlreadyExistsError(err) raise def get_by_id(self, user_id, **kwargs): """Gets the user with the given ID. - `Rest Documentation `__ + `Rest Documentation `__ Args: user_id (int): An ID for a user. Returns: - :class:`py42.response.Py42Response`: A response containing the user. + :class:`pycpg.response.PycpgResponse`: A response containing the user. """ uri = f"/api/v1/User/{user_id}" return self._connection.get(uri, params=kwargs) @@ -83,7 +83,7 @@ def get_by_uid(self, user_uid, **kwargs): user_uid (str): A UID for a user. Returns: - :class:`py42.response.Py42Response`: A response containing the user. + :class:`pycpg.response.PycpgResponse`: A response containing the user. """ uri = f"/api/v1/User/{user_uid}" params = dict(idType="uid", **kwargs) @@ -96,7 +96,7 @@ def get_by_username(self, username, **kwargs): username (str or unicode): A username for a user. Returns: - :class:`py42.response.Py42Response`: A response containing the user. + :class:`pycpg.response.PycpgResponse`: A response containing the user. """ uri = "/api/v1/User" @@ -109,13 +109,13 @@ def get_current(self, **kwargs): WARNING: This method is incompatible with api client authentication. Returns: - :class:`py42.response.Py42Response`: A response containing the user. + :class:`pycpg.response.PycpgResponse`: A response containing the user. """ uri = "/api/v1/User/my" try: return self._connection.get(uri, params=kwargs) - except Py42NotFoundError as err: - raise Py42NotFoundError( + except PycpgNotFoundError as err: + raise PycpgNotFoundError( err, message="User not found. Please be aware that this method is incompatible with api client authentication.", ) @@ -142,12 +142,12 @@ def get_page( UID. Defaults to None. role_id (int, optional): Limits users to only those with a given role ID. Defaults to None. - page_size (int, optional): The number of items on the page. Defaults to `py42.settings.items_per_page`. + page_size (int, optional): The number of items on the page. Defaults to `pycpg.settings.items_per_page`. q (str, optional): A generic query filter that searches across name, username, and email. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/User" @@ -164,9 +164,9 @@ def get_page( ) try: return self._connection.get(uri, params=params) - except Py42BadRequestError as err: + except PycpgBadRequestError as err: if "Organization was not found" in str(err.response.text): - raise Py42OrgNotFoundError(err, org_uid) + raise PycpgOrgNotFoundError(err, org_uid) raise def get_all( @@ -186,7 +186,7 @@ def get_all( email. Defaults to None. Returns: - generator: An object that iterates over :class:`py42.response.Py42Response` objects + generator: An object that iterates over :class:`pycpg.response.PycpgResponse` objects that each contain a page of users. """ return get_all_pages( @@ -204,10 +204,10 @@ def get_scim_data_by_uid(self, user_uid): """Returns SCIM data such as division, department, and title for a given user. Args: - user_uid (str): A Code42 user uid. + user_uid (str): A CrashPlan user uid. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v18/scim-user-data/collated-view" params = dict(userId=user_uid) @@ -221,7 +221,7 @@ def block(self, user_id): user_id (int): An ID for a user. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/users/{self._get_user_uid_by_id(user_id)}/block" @@ -235,7 +235,7 @@ def unblock(self, user_id): user_id (int): An ID for a user. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/users/{self._get_user_uid_by_id(user_id)}/unblock" return self._connection.post(uri) @@ -249,13 +249,13 @@ def deactivate(self, user_id, block_user=None): block_user (bool, optional): Blocks the user upon deactivation. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/users/{self._get_user_uid_by_id(user_id)}/deactivate" data = {"block": block_user} try: return self._connection.post(uri, json=data) - except Py42BadRequestError as ex: + except PycpgBadRequestError as ex: handle_active_legal_hold_error(ex, "user", user_id) raise @@ -267,7 +267,7 @@ def reactivate(self, user_id, unblock_user=None): unblock_user (bool, optional): Whether or not to unblock the user. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/users/{self._get_user_uid_by_id(user_id)}/activate" params = {"unblock": unblock_user} @@ -281,7 +281,7 @@ def change_org_assignment(self, user_id, org_id): org_id (int): An ID for the organization to move the user to. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/users/{self._get_user_uid_by_id(user_id)}/move" data = {"orgId": org_id} @@ -292,7 +292,7 @@ def get_available_roles(self): assign to other users. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = "/api/v1/role" return self._connection.get(uri) @@ -304,7 +304,7 @@ def get_roles(self, user_id): user_id (int): An ID for a user. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v3/users/{self._get_user_uid_by_id(user_id)}/roles" return self._connection.get(uri) @@ -317,7 +317,7 @@ def add_role(self, user_id, role_name): role_name (str): The name or ID of the role to assign to the user. e.g. "Desktop User" (name) or "desktop-user" (ID) Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ # api calls broken into helper functions to simplify testing @@ -334,7 +334,7 @@ def remove_role(self, user_id, role_name): role_name (str): The name or ID of the role to unassign from the user. e.g. "Desktop User" (name) or "desktop-user" (ID) Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ # api calls broken into helper functions to simplify testing @@ -357,7 +357,7 @@ def update_user( """Updates an existing user. Args: - user_uid (str or int): A Code42 user UID. + user_uid (str or int): A CrashPlan user UID. username (str, optional): The username to which the user's username will be changed. Defaults to None. email (str, optional): The email to which the user's email will be changed. Defaults to None. password (str, optional): The password to which the user's password will be changed. Defaults to None. @@ -367,7 +367,7 @@ def update_user( archive_size_quota_bytes (int, optional): The quota in bytes that limits the user's archive size. Defaults to None. Returns: - :class:`py42.response.Py42Response` + :class:`pycpg.response.PycpgResponse` """ uri = f"/api/v1/User/{user_uid}?idType=uid" @@ -382,21 +382,21 @@ def update_user( } try: return self._connection.put(uri, json=data) - except Py42InternalServerError as err: + except PycpgInternalServerError as err: response_text = str(err.response.text) if "USERNAME_NOT_AN_EMAIL" in response_text: - raise Py42UsernameMustBeEmailError(err) + raise PycpgUsernameMustBeEmailError(err) elif "EMAIL_INVALID" in response_text: - raise Py42InvalidEmailError(email, err) + raise PycpgInvalidEmailError(email, err) elif "NEW_PASSWORD_INVALID" in response_text: - raise Py42InvalidPasswordError(err) + raise PycpgInvalidPasswordError(err) elif "INVALID_USERNAME" in response_text: - raise Py42InvalidUsernameError(err) + raise PycpgInvalidUsernameError(err) raise def _get_user_uid_by_id(self, user_id): # Identity crisis helper method. - # Old py42 methods accepted IDs. New apis take UIDs. + # Old pycpg methods accepted IDs. New apis take UIDs. # Use additional lookup to prevent breaking changes. return self.get_by_id(user_id)["userUid"] diff --git a/src/pycpg/services/util.py b/src/pycpg/services/util.py new file mode 100644 index 000000000..0c387bfe4 --- /dev/null +++ b/src/pycpg/services/util.py @@ -0,0 +1,15 @@ +import pycpg.settings as settings + + +def get_all_pages(func, key, *args, **kwargs): + if kwargs.get("page_size") is None: + kwargs["page_size"] = settings.items_per_page + + item_count = page_size = kwargs["page_size"] + page_num = 0 + while item_count >= page_size: + page_num += 1 + response = func(*args, page_num=page_num, **kwargs) + yield response + page_items = response[key] if key else response.data + item_count = len(page_items) diff --git a/src/py42/settings/__init__.py b/src/pycpg/settings/__init__.py similarity index 85% rename from src/py42/settings/__init__.py rename to src/pycpg/settings/__init__.py index c53aaa231..8419caff7 100644 --- a/src/py42/settings/__init__.py +++ b/src/pycpg/settings/__init__.py @@ -1,6 +1,6 @@ import sys -from py42.__version__ import __version__ +from pycpg.__version__ import __version__ proxies = None @@ -9,7 +9,6 @@ verify_ssl_certs = True items_per_page = 500 -security_events_per_page = 500 _custom_user_prefix = "" _custom_user_suffix = "" @@ -17,7 +16,7 @@ def get_user_agent_string(): - return "{}py42/{} python/{}{}".format( + return "{}pycpg/{} python/{}{}".format( _custom_user_prefix, __version__, _python_version, _custom_user_suffix ) diff --git a/src/py42/settings/debug.py b/src/pycpg/settings/debug.py similarity index 89% rename from src/py42/settings/debug.py rename to src/pycpg/settings/debug.py index 9f885b935..df00a5bf7 100644 --- a/src/py42/settings/debug.py +++ b/src/pycpg/settings/debug.py @@ -9,7 +9,7 @@ class _DebugSettings: NONE = logging.NOTSET def __init__(self): - self.logger = logging.getLogger("py42") + self.logger = logging.getLogger("pycpg") self.logger.addHandler(logging.StreamHandler(sys.stderr)) @property diff --git a/src/py42/usercontext.py b/src/pycpg/usercontext.py similarity index 100% rename from src/py42/usercontext.py rename to src/pycpg/usercontext.py diff --git a/src/py42/util.py b/src/pycpg/util.py similarity index 94% rename from src/py42/util.py rename to src/pycpg/util.py index ac3dab167..ce406eaf7 100644 --- a/src/py42/util.py +++ b/src/pycpg/util.py @@ -19,11 +19,11 @@ def format_json(json_string): def print_response(response, label=None): - """Prints a :class:`py42.response.Py42Response` as prettified JSON. If unable to load, it + """Prints a :class:`pycpg.response.PycpgResponse` as prettified JSON. If unable to load, it prints the given response. Args: - response (:class:`py42.response.Py42Response`): The response to print. + response (:class:`pycpg.response.PycpgResponse`): The response to print. label (str, optional): A label at the beginning of the printed text. Defaults to None. """ if label: diff --git a/tests/clients/_archiveaccess/conftest.py b/tests/clients/_archiveaccess/conftest.py index d67d99be4..d008adcba 100644 --- a/tests/clients/_archiveaccess/conftest.py +++ b/tests/clients/_archiveaccess/conftest.py @@ -2,10 +2,10 @@ from tests.conftest import create_mock_response from tests.conftest import TEST_SESSION_ID -from py42.clients._archiveaccess.restoremanager import FileSizePoller -from py42.clients._archiveaccess.restoremanager import RestoreJobManager -from py42.services.storage.archive import StorageArchiveService -from py42.services.storage.restore import PushRestoreService +from pycpg.clients._archiveaccess.restoremanager import FileSizePoller +from pycpg.clients._archiveaccess.restoremanager import RestoreJobManager +from pycpg.services.storage.archive import StorageArchiveService +from pycpg.services.storage.restore import PushRestoreService @pytest.fixture diff --git a/tests/clients/_archiveaccess/test_accessorfactory.py b/tests/clients/_archiveaccess/test_accessorfactory.py index 3cdcb1399..833bed386 100644 --- a/tests/clients/_archiveaccess/test_accessorfactory.py +++ b/tests/clients/_archiveaccess/test_accessorfactory.py @@ -8,10 +8,10 @@ from tests.conftest import TEST_PASSWORD from tests.conftest import TEST_SESSION_ID -import py42.clients._archiveaccess.restoremanager -from py42.clients._archiveaccess import ArchiveAccessor -from py42.clients._archiveaccess.accessorfactory import ArchiveAccessorFactory -from py42.services.storage._service_factory import StorageServiceFactory +import pycpg.clients._archiveaccess.restoremanager +from pycpg.clients._archiveaccess import ArchiveAccessor +from pycpg.clients._archiveaccess.accessorfactory import ArchiveAccessorFactory +from pycpg.services.storage._service_factory import StorageServiceFactory INVALID_DEVICE_GUID = "invalid-device-guid" @@ -157,7 +157,7 @@ def test_create_archive_accessor_calls_create_restore_job_manager_with_correct_a storage_archive_service, ): spy = mocker.spy( - py42.clients._archiveaccess.accessorfactory, "create_restore_job_manager" + pycpg.clients._archiveaccess.accessorfactory, "create_restore_job_manager" ) storage_service_factory.create_archive_service.return_value = ( storage_archive_service diff --git a/tests/clients/_archiveaccess/test_init.py b/tests/clients/_archiveaccess/test_init.py index 5579f39eb..8caa9d45c 100644 --- a/tests/clients/_archiveaccess/test_init.py +++ b/tests/clients/_archiveaccess/test_init.py @@ -13,11 +13,11 @@ from tests.conftest import TEST_RESTORE_PATH from tests.conftest import TEST_SESSION_ID -from py42.clients._archiveaccess import ArchiveContentPusher -from py42.clients._archiveaccess import ArchiveContentStreamer -from py42.clients._archiveaccess import FileType -from py42.exceptions import Py42ArchiveFileNotFoundError -from py42.response import Py42Response +from pycpg.clients._archiveaccess import ArchiveContentPusher +from pycpg.clients._archiveaccess import ArchiveContentStreamer +from pycpg.clients._archiveaccess import FileType +from pycpg.exceptions import PycpgArchiveFileNotFoundError +from pycpg.response import PycpgResponse USERS_DIR = "/Users" @@ -293,7 +293,7 @@ def mock_get_file_path_metadata(*args, **kwargs): mock_response = mocker.MagicMock(spec=Response) mock_response.status_code = 200 mock_response.text = file_id_responses[file_id] - get_file_path_metadata_response = Py42Response(mock_response) + get_file_path_metadata_response = PycpgResponse(mock_response) return get_file_path_metadata_response @@ -543,7 +543,7 @@ def test_stream_from_backup_with_unicode_file_path_not_in_archive_raises_excepti ) invalid_path_in_downloads_folder = "/Users/qa/Downloads/吞" - with pytest.raises(Py42ArchiveFileNotFoundError) as e: + with pytest.raises(PycpgArchiveFileNotFoundError) as e: archive_accessor.stream_from_backup( TEST_BACKUP_SET_ID, invalid_path_in_downloads_folder ) @@ -570,7 +570,7 @@ def test_stream_from_backup_with_drive_not_in_archive_raises_exception( invalid_path_in_downloads_folder = ( "C:/Users/qa/Downloads/file-not-in-archive.txt" ) - with pytest.raises(Py42ArchiveFileNotFoundError) as e: + with pytest.raises(PycpgArchiveFileNotFoundError) as e: archive_accessor.stream_from_backup( TEST_BACKUP_SET_ID, invalid_path_in_downloads_folder ) @@ -598,7 +598,7 @@ def test_stream_from_backup_with_case_sensitive_drive_not_in_archive_raises_exce invalid_path_in_downloads_folder = ( "c:/Users/qa/Downloads/file-not-in-archive.txt" ) - with pytest.raises(Py42ArchiveFileNotFoundError) as e: + with pytest.raises(PycpgArchiveFileNotFoundError) as e: archive_accessor.stream_from_backup( TEST_BACKUP_SET_ID, invalid_path_in_downloads_folder ) diff --git a/tests/clients/_archiveaccess/test_restoremanager.py b/tests/clients/_archiveaccess/test_restoremanager.py index 4dc705e6e..4b2691e2e 100644 --- a/tests/clients/_archiveaccess/test_restoremanager.py +++ b/tests/clients/_archiveaccess/test_restoremanager.py @@ -11,11 +11,11 @@ from tests.conftest import TEST_RESTORE_PATH from tests.conftest import TEST_SESSION_ID -from py42.clients._archiveaccess.restoremanager import FileSizePoller -from py42.clients._archiveaccess.restoremanager import RestoreJobManager -from py42.response import Py42Response -from py42.services.storage.restore import PushRestoreExistingFiles -from py42.services.storage.restore import PushRestoreLocation +from pycpg.clients._archiveaccess.restoremanager import FileSizePoller +from pycpg.clients._archiveaccess.restoremanager import RestoreJobManager +from pycpg.response import PycpgResponse +from pycpg.services.storage.restore import PushRestoreExistingFiles +from pycpg.services.storage.restore import PushRestoreLocation class GetWebRestoreJobResponses: @@ -88,7 +88,7 @@ def mock_get_restore_status_responses(mocker, storage_archive_service, json_resp def stream_restore_result_response_mock(mocker, storage_archive_service, chunks): - stream_restore_result_response = mocker.MagicMock(spec=Py42Response) + stream_restore_result_response = mocker.MagicMock(spec=PycpgResponse) def mock_stream_restore_result(job_id, **kwargs): stream_restore_result_response.iter_content.return_value = chunks diff --git a/tests/clients/conftest.py b/tests/clients/conftest.py index 8982c53ad..600c9e03f 100644 --- a/tests/clients/conftest.py +++ b/tests/clients/conftest.py @@ -8,7 +8,7 @@ from tests.conftest import TEST_DEVICE_GUID from tests.conftest import TEST_NODE_GUID -from py42.services.archive import ArchiveService +from pycpg.services.archive import ArchiveService param = namedtuple("param", "name new_val expected_stored_val dict_location") diff --git a/tests/clients/settings/org_settings_inherited.json b/tests/clients/settings/org_settings_inherited.json index 78bd0d85e..a814a892a 100644 --- a/tests/clients/settings/org_settings_inherited.json +++ b/tests/clients/settings/org_settings_inherited.json @@ -548,7 +548,7 @@ }, { "@id": "1", - "name": "Next-Gen Platinum Support Test Environment - Backup Backup Set", + "name": "CrashPlan Platinum Support Test Environment - Backup Backup Set", "priority": "2", "backupPaths": { "lastModified": "1", @@ -631,10 +631,6 @@ "numCloseWorkers": "10", "idleThrottleRate": "80", "activeThrottleRate": "20", - "shouldThrottleFFS": { - "@locked": "true", - "#text": "true" - }, "checksumIncomingBackupData": "true", "checksumIncomingRestoreData": "true", "blockArchiveDataFileSize": "4242538496", diff --git a/tests/clients/settings/org_settings_not_inherited.json b/tests/clients/settings/org_settings_not_inherited.json index 103f07ab5..0eee27a31 100644 --- a/tests/clients/settings/org_settings_not_inherited.json +++ b/tests/clients/settings/org_settings_not_inherited.json @@ -567,7 +567,7 @@ }, { "@id": "1", - "name": "Next-Gen Platinum Support Test Environment - Backup Backup Set", + "name": "CrashPlan Platinum Support Test Environment - Backup Backup Set", "priority": "2", "backupPaths": { "lastModified": "1", @@ -651,10 +651,6 @@ "numCloseWorkers": "10", "idleThrottleRate": "80", "activeThrottleRate": "20", - "shouldThrottleFFS": { - "@locked": "true", - "#text": "true" - }, "checksumIncomingBackupData": "true", "checksumIncomingRestoreData": "true", "blockArchiveDataFileSize": "4242538496", diff --git a/tests/clients/settings/test_device_settings.py b/tests/clients/settings/test_device_settings.py index 2516392b6..fab36d0cd 100644 --- a/tests/clients/settings/test_device_settings.py +++ b/tests/clients/settings/test_device_settings.py @@ -3,9 +3,9 @@ import pytest from tests.clients.conftest import param -from py42.clients.settings import get_val -from py42.clients.settings.device_settings import DeviceSettings -from py42.exceptions import Py42Error +from pycpg.clients.settings import get_val +from pycpg.clients.settings.device_settings import DeviceSettings +from pycpg.exceptions import PycpgError TEST_USER_ID = 13548744 TEST_COMPUTER_ID = 4290210 @@ -185,7 +185,6 @@ "numValidateWorkers": "100", "reduceEnabled": "true", "scrapPercentAllowed": "10", - "shouldThrottleFFS": {"#text": "true", "@locked": "true"}, "smallBlockSize": "4096", "softDeleteOfFiles": "false", "strongBlockCacheLoadFactor": "0.8", @@ -674,7 +673,7 @@ def test_backup_set_add_destination_when_destination_not_available_raises( "4300": "Dest43", "4400": "Dest44", } - with pytest.raises(Py42Error): + with pytest.raises(PycpgError): self.device_settings.backup_sets[0].add_destination(404) assert ( self.device_settings.backup_sets[0].destinations @@ -711,7 +710,7 @@ def test_backup_set_remove_destination_when_destination_not_available_raises( "4200": "Dest42 ", "4300": "Dest43", } - with pytest.raises(Py42Error): + with pytest.raises(PycpgError): self.device_settings.backup_sets[0].remove_destination(404) assert ( self.device_settings.backup_sets[0].destinations diff --git a/tests/clients/settings/test_incydr_device_settings.py b/tests/clients/settings/test_incydr_device_settings.py deleted file mode 100644 index cc6fb2ed1..000000000 --- a/tests/clients/settings/test_incydr_device_settings.py +++ /dev/null @@ -1,109 +0,0 @@ -import pytest -from tests.clients.conftest import param - -from py42.clients.settings import get_val -from py42.clients.settings.device_settings import IncydrDeviceSettings - -TEST_USER_ID = 13548744 -TEST_COMPUTER_ID = 4290210 -TEST_COMPUTER_GUID = "42000000" -TEST_COMPUTER_ORG_ID = 424242 -TEST_DEVICE_VERSION = 4200000000001 -TEST_COMPUTER_NAME = "Incydr Settings Test Device" - -INCYDR_DEVICE_DICT_W_SETTINGS = { - "computerId": 4290210, - "name": "Incydr Settings Test Device", - "osHostname": "DESKTOP-I2SKEML", - "guid": "42000000", - "type": "COMPUTER", - "status": "Active", - "active": True, - "blocked": False, - "alertState": 0, - "alertStates": ["OK"], - "userId": 13548744, - "userUid": "1010090007721726158", - "orgId": 424242, - "orgUid": "985187827481212202", - "computerExtRef": "ext ref", - "notes": "My device setting note!", - "parentComputerId": None, - "parentComputerGuid": None, - "lastConnected": "2021-07-22T12:38:54.854Z", - "osName": "Windows", - "osVersion": "10.0.19041", - "osArch": "x86-64", - "address": "192.168.223.128", - "remoteAddress": "50.237.14.12", - "javaVersion": "", - "modelInfo": "", - "timeZone": "(UTC-06:00) Central Time (US & Cana", - "version": 4200000000001, - "productVersion": "0.0.1", - "buildVersion": 3531, - "creationDate": "2021-06-04T14:30:51.392Z", - "modificationDate": "2021-07-22T12:38:54.854Z", - "loginDate": "2021-06-04T14:30:51.362Z", - "service": "Artemis", - "orgSettings": {"securityKeyLocked": False}, -} - - -class TestIncydrDeviceSettings: - device_settings = IncydrDeviceSettings(INCYDR_DEVICE_DICT_W_SETTINGS) - - @pytest.mark.parametrize( - "param", - [ - ("name", TEST_COMPUTER_NAME), - ("computer_id", TEST_COMPUTER_ID), - ("device_id", TEST_COMPUTER_ID), - ("guid", TEST_COMPUTER_GUID), - ("user_id", TEST_USER_ID), - ("org_id", TEST_COMPUTER_ORG_ID), - ("version", TEST_DEVICE_VERSION), - ], - ) - def test_device_settings_properties_return_expected_value_and_cannot_be_changed( - self, param - ): - name, expected_value = param - assert getattr(self.device_settings, name) == expected_value - with pytest.raises(AttributeError): - setattr(self.device_settings, name, expected_value) - - @pytest.mark.parametrize( - "param", - [("notes", "My device setting note!"), ("external_reference", "ext ref")], - ) - def test_device_settings_get_mutable_properties_return_expected_values(self, param): - name, expected_value = param - assert getattr(self.device_settings, name) == expected_value - - @pytest.mark.parametrize( - "param", - [ - param( - name="notes", - new_val="an Incydr device note.", - expected_stored_val="an Incydr device note.", - dict_location=["notes"], - ), - param( - name="external_reference", - new_val="reference#id", - expected_stored_val="reference#id", - dict_location=["computerExtRef"], - ), - ], - ) - def test_device_settings_setting_mutable_property_updates_dict_correctly_and_registers_changes( - self, param - ): - setattr(self.device_settings, param.name, param.new_val) - assert ( - get_val(self.device_settings.data, param.dict_location) - == param.expected_stored_val - ) - assert param.name in self.device_settings.changes diff --git a/tests/clients/settings/test_org_settings.py b/tests/clients/settings/test_org_settings.py index 1bb537983..79ca09871 100644 --- a/tests/clients/settings/test_org_settings.py +++ b/tests/clients/settings/test_org_settings.py @@ -11,9 +11,9 @@ from tests.clients.conftest import TEST_HOME_DIR from tests.clients.conftest import TEST_PHOTOS_DIR -from py42.clients.settings import get_val -from py42.clients.settings.org_settings import OrgSettings -from py42.exceptions import Py42Error +from pycpg.clients.settings import get_val +from pycpg.clients.settings.org_settings import OrgSettings +from pycpg.exceptions import PycpgError ONEGB = 1000000000 @@ -24,72 +24,18 @@ "locked": False, "id": 510808, }, - "org-securityTools-device-detection-enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, - "device_engine_pause_allowedTypes": { - "scope": "ORG", - "value": '["legalHold","backup"]', - "locked": False, - "id": 537575, - }, - "device_advancedExfiltrationDetection_enabled": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, - "org_securityTools_printer_detection_enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, "device_network_dscp_preferIP4": { "scope": "ORG", "value": "false", "locked": False, "id": 537575, }, - "org-securityTools-cloud-detection-enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, - "org-securityTools-enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, "c42.msa.acceptance": { "scope": "ORG", - "value": "917633711460206173;tim.putnam+legacyadmin@code42.com;2019-09-05T17:05:09:046", + "value": "917633711460206173;tim.putnam+legacyadmin@crashplan.com;2019-09-05T17:05:09:046", "locked": True, "id": 510682, }, - "org-securityTools-yara-scanner-enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 510853, - }, - "org-securityTools-restore-detection-enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 510853, - }, - "device_fileForensics_enabled": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, "device_webRestore_enabled": { "scope": "ORG", "value": "false", @@ -108,12 +54,6 @@ "locked": True, "id": 537575, }, - "org-securityTools-open-file-detection-enable": { - "scope": "ORG", - "value": "true", - "locked": False, - "id": 537575, - }, "device_network_utilization_schedule": { "scope": "ORG", "value": '{"sun":{"included":"true","startTimeOfDay":"09:00","endTimeOfDay":"17:00"},"mon":{"included":"true","startTimeOfDay":"09:00","endTimeOfDay":"17:00"},"tue":{"included":"true","startTimeOfDay":"09:00","endTimeOfDay":"17:00"},"wed":{"included":true,"startTimeOfDay":"12:00","endTimeOfDay":"19:00"},"thu":{"included":"true","startTimeOfDay":"09:00","endTimeOfDay":"17:00"},"fri":{"included":"true","startTimeOfDay":"09:00","endTimeOfDay":"17:00"},"sat":{"included":"true","startTimeOfDay":"09:00","endTimeOfDay":"17:00"}}', @@ -303,136 +243,9 @@ def test_org_settings_device_defaults_retrieve_expected_results( attr, expected = param assert getattr(org_settings.device_defaults, attr) == expected - def test_org_settings_endpoint_monitoring_enabled_returns_expected_results( - self, org_settings_dict - ): - t_setting = deepcopy(TEST_T_SETTINGS_DICT) - t_setting["org-securityTools-enable"]["value"] = "true" - org_settings = OrgSettings(org_settings_dict, t_setting) - assert org_settings.endpoint_monitoring_enabled is True - - t_setting["org-securityTools-enable"]["value"] = "false" - org_settings = OrgSettings(org_settings_dict, t_setting) - assert org_settings.endpoint_monitoring_enabled is False - - def test_org_settings_set_endpoint_monitoring_enabled_to_true_from_false_creates_expected_packets( - self, org_settings_dict - ): - t_setting = deepcopy(TEST_T_SETTINGS_DICT) - t_setting["org-securityTools-enable"]["value"] = "true" - org_settings = OrgSettings(org_settings_dict, t_setting) - org_settings.endpoint_monitoring_enabled = False - assert { - "key": "org-securityTools-enable", - "value": "false", - "locked": False, - } in org_settings.packets - assert { - "key": "device_advancedExfiltrationDetection_enabled", - "value": "false", - "locked": False, - } in org_settings.packets - assert { - "key": "org-securityTools-cloud-detection-enable", - "value": "false", - "locked": False, - } in org_settings.packets - assert { - "key": "org-securityTools-open-file-detection-enable", - "value": "false", - "locked": False, - } in org_settings.packets - assert { - "key": "org-securityTools-device-detection-enable", - "value": "false", - "locked": False, - } in org_settings.packets - assert { - "key": "org_securityTools_printer_detection_enable", - "value": "false", - "locked": False, - } in org_settings.packets - assert len(org_settings.packets) == 6 - - def test_org_settings_set_endpoint_monitoring_enabled_to_false_from_true_creates_expected_packets( - self, org_settings_dict - ): - t_setting = deepcopy(TEST_T_SETTINGS_DICT) - t_setting["org-securityTools-enable"]["value"] = "false" - org_settings = OrgSettings(org_settings_dict, t_setting) - org_settings.endpoint_monitoring_enabled = True - assert { - "key": "org-securityTools-enable", - "value": "true", - "locked": False, - } in org_settings.packets - assert { - "key": "device_advancedExfiltrationDetection_enabled", - "value": "true", - "locked": False, - } in org_settings.packets - assert len(org_settings.packets) == 2 - @pytest.mark.parametrize( "param", [ - ( - "endpoint_monitoring_removable_media_enabled", - "org-securityTools-device-detection-enable", - ), - ( - "endpoint_monitoring_cloud_sync_enabled", - "org-securityTools-cloud-detection-enable", - ), - ( - "endpoint_monitoring_browser_and_applications_enabled", - "org-securityTools-open-file-detection-enable", - ), - ( - "endpoint_monitoring_file_metadata_collection_enabled", - "device_fileForensics_enabled", - ), - ], - ) - def test_org_settings_set_endpoint_monitoring_sub_categories_when_endpoint_monitoring_disabled_sets_endpoint_monitoring_enabled( - self, param, org_settings_dict - ): - attr, key = param - t_setting = deepcopy(TEST_T_SETTINGS_DICT) - settings = deepcopy(org_settings_dict) - t_setting["org-securityTools-enable"]["value"] = "false" - org_settings = OrgSettings(settings, t_setting) - setattr(org_settings, attr, True) - packet_keys = [packet["key"] for packet in org_settings.packets] - assert key in packet_keys - assert "org-securityTools-enable" in packet_keys - for packet in org_settings.packets: - if packet["key"] == "org-securityTools-enable": - assert packet["value"] == "true" - if packet["key"] == key: - assert packet["value"] == "true" - - @pytest.mark.parametrize( - "param", - [ - param( - name="endpoint_monitoring_file_metadata_scan_enabled", - new_val=True, - expected_stored_val="true", - dict_location="device_fileForensics_scan_enabled", - ), - param( - name="endpoint_monitoring_file_metadata_ingest_scan_enabled", - new_val=True, - expected_stored_val="true", - dict_location="device_fileForensics_enqueue_scan_events_during_ingest", - ), - param( - name="endpoint_monitoring_background_priority_enabled", - new_val=True, - expected_stored_val="true", - dict_location="device_background_priority_enabled", - ), param( name="web_restore_enabled", new_val=True, @@ -462,21 +275,6 @@ def test_org_settings_set_independent_t_setting_properties( if packet["key"] == param.dict_location: assert packet["value"] == "false" - def test_missing_t_settings_return_none_when_accessed_by_property( - self, org_settings_dict - ): - org_settings = OrgSettings(org_settings_dict, TEST_T_SETTINGS_DICT) - assert org_settings.endpoint_monitoring_file_metadata_scan_enabled is None - assert ( - org_settings.endpoint_monitoring_file_metadata_ingest_scan_enabled is None - ) - assert org_settings.endpoint_monitoring_background_priority_enabled is None - assert org_settings.endpoint_monitoring_custom_applications_win is None - assert org_settings.endpoint_monitoring_custom_applications_mac is None - assert ( - org_settings.endpoint_monitoring_file_metadata_collection_exclusions is None - ) - @pytest.mark.parametrize( "param", [ @@ -616,7 +414,7 @@ def test_backup_set_add_destination_when_destination_not_available_raises( "43": "PROe Cloud, US ", "673679195225718785": "PROe Cloud, AMS", } - with pytest.raises(Py42Error): + with pytest.raises(PycpgError): org_settings.device_defaults.backup_sets[1].add_destination(404) assert ( org_settings.device_defaults.backup_sets[1].destinations @@ -649,7 +447,7 @@ def test_backup_set_remove_destination_when_destination_not_available_raises( "43": "PROe Cloud, US ", "673679195225718785": "PROe Cloud, AMS", } - with pytest.raises(Py42Error): + with pytest.raises(PycpgError): org_settings.device_defaults.backup_sets[1].remove_destination(404) assert ( org_settings.device_defaults.backup_sets[1].destinations diff --git a/tests/clients/test_alertrules.py b/tests/clients/test_alertrules.py deleted file mode 100644 index 02cfd69e0..000000000 --- a/tests/clients/test_alertrules.py +++ /dev/null @@ -1,190 +0,0 @@ -import json - -import pytest -from tests.conftest import create_mock_error -from tests.conftest import create_mock_response - -from py42.clients.alertrules import AlertRulesClient -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42InvalidRuleOperationError -from py42.services.alertrules import AlertRulesService -from py42.services.alerts import AlertService - -TEST_RULE_ID = "rule-id" -TEST_USER_ID = "user-id" - - -TEST_SYSTEM_RULE_RESPONSE = { - "ruleMetadata": [ - { - "observerRuleId": TEST_RULE_ID, - "type": "FED_FILE_TYPE_MISMATCH", - "isSystem": True, - "ruleSource": "NOTVALID", - } - ] -} - - -@pytest.fixture -def mock_alerts_service_system_rule(mocker, mock_alerts_service): - response = create_mock_response(mocker, json.dumps(TEST_SYSTEM_RULE_RESPONSE)) - mock_alerts_service.get_rule_by_observer_id.return_value = response - return mock_alerts_service - - -@pytest.fixture -def mock_alert_rules_service(mocker): - return mocker.MagicMock(spec=AlertRulesService) - - -@pytest.fixture -def mock_alerts_service(mocker): - return mocker.MagicMock(spec=AlertService) - - -@pytest.fixture -def internal_server_error(mocker): - return create_mock_error(Py42InternalServerError, mocker, "") - - -class TestAlertRulesClient: - def test_add_user_calls_alert_rules_service_add_user_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.add_user(TEST_RULE_ID, TEST_USER_ID) - mock_alert_rules_service.add_user.assert_called_once_with( - TEST_RULE_ID, TEST_USER_ID - ) - - def test_add_user_raises_invalid_rule_type_error_when_adding_to_system_rule( - self, - mock_alerts_service_system_rule, - mock_alert_rules_service, - internal_server_error, - ): - def add(*args, **kwargs): - raise internal_server_error - - mock_alert_rules_service.add_user.side_effect = add - alert_rules_module = AlertRulesClient( - mock_alerts_service_system_rule, mock_alert_rules_service - ) - with pytest.raises(Py42InvalidRuleOperationError) as err: - alert_rules_module.add_user(TEST_RULE_ID, TEST_USER_ID) - - actual = str(err.value) - assert ( - "Only alert rules with a source of 'Alerting' can be targeted by this command." - in actual - ) - assert "Rule rule-id has a source of 'NOTVALID'." in actual - - def test_remove_user_calls_alert_rules_service_remove_user_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.remove_user(TEST_RULE_ID, TEST_USER_ID) - mock_alert_rules_service.remove_user.assert_called_once_with( - TEST_RULE_ID, TEST_USER_ID - ) - - def test_remove_user_raises_invalid_rule_type_error_when_adding_to_system_rule( - self, - mock_alerts_service_system_rule, - mock_alert_rules_service, - internal_server_error, - ): - def add(*args, **kwargs): - raise internal_server_error - - mock_alert_rules_service.remove_user.side_effect = add - alert_rules_module = AlertRulesClient( - mock_alerts_service_system_rule, mock_alert_rules_service - ) - with pytest.raises(Py42InvalidRuleOperationError) as err: - alert_rules_module.remove_user(TEST_RULE_ID, TEST_USER_ID) - - actual = str(err.value) - assert ( - "Only alert rules with a source of 'Alerting' can be targeted by this command." - in actual - ) - assert "Rule rule-id has a source of 'NOTVALID'." in actual - - def test_remove_all_users_calls_alert_service_remove_all_users_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.remove_all_users(TEST_RULE_ID) - mock_alert_rules_service.remove_all_users.assert_called_once_with(TEST_RULE_ID) - - def test_remove_all_users_raises_invalid_rule_type_error_when_adding_to_system_rule( - self, - mock_alerts_service_system_rule, - mock_alert_rules_service, - internal_server_error, - ): - def add(*args, **kwargs): - raise internal_server_error - - mock_alert_rules_service.remove_all_users.side_effect = add - alert_rules_module = AlertRulesClient( - mock_alerts_service_system_rule, mock_alert_rules_service - ) - with pytest.raises(Py42InvalidRuleOperationError) as err: - alert_rules_module.remove_all_users(TEST_RULE_ID) - - actual = str(err.value) - assert ( - "Only alert rules with a source of 'Alerting' can be targeted by this command." - in actual - ) - assert "Rule rule-id has a source of 'NOTVALID'." in actual - - def test_alert_rules_service_calls_get_all_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.get_all() - assert mock_alerts_service.get_all_rules.call_count == 1 - - def test_alert_rules_service_calls_get_all_by_name_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - rule_name = "test rule" - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.get_all_by_name(rule_name) - mock_alerts_service.get_all_rules_by_name.assert_called_once_with(rule_name) - - def test_alert_rules_service_calls_get_rules_by_observer_id_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - rule_id = "test-rule-id" - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.get_by_observer_id(rule_id) - mock_alerts_service.get_rule_by_observer_id.assert_called_once_with(rule_id) - - def test_alert_rules_service_calls_get_rules_page_with_expected_params( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_rules_client = AlertRulesClient( - mock_alerts_service, mock_alert_rules_service - ) - alert_rules_client.get_page("key", "dir", 70, 700) - mock_alerts_service.get_rules_page.assert_called_once_with( - sort_key="key", sort_direction="dir", page_num=70, page_size=700 - ) diff --git a/tests/clients/test_alerts.py b/tests/clients/test_alerts.py deleted file mode 100644 index c7019bf35..000000000 --- a/tests/clients/test_alerts.py +++ /dev/null @@ -1,404 +0,0 @@ -import json - -import pytest - -from .conftest import create_mock_response -from py42.clients.alerts import AlertsClient -from py42.sdk.queries.alerts.alert_query import AlertQuery -from py42.services.alertrules import AlertRulesService -from py42.services.alerts import AlertService - -ALERT_A = { - "id": "A", - "createdAt": "2021-01-01T09:40:05.2837100Z", - "actor": "A", - "type": "A", - "name": "A", - "description": "A", - "actorId": "A", - "target": "A", - "severity": "A", - "ruleSource": "A", - "observations": "A", - "notes": "A", - "state": "A", - "stateLastModifiedAt": "2021-01-01T09:40:05.2837100Z", - "stateLastModifiedBy": "A", - "lastModifiedTime": "2021-01-01T09:40:05.2837100Z", - "lastModifiedBy": "A", - "ruleId": "A", - "tenantId": "A", -} -ALERT_B = { - "id": "B", - "createdAt": "2021-02-02T18:24:15.5284760Z", - "actor": "B", - "type": "B", - "name": "B", - "description": "B", - "actorId": "B", - "target": "B", - "severity": "B", - "ruleSource": "B", - "observations": "B", - "notes": "B", - "state": "B", - "stateLastModifiedAt": "2021-02-02T18:24:15.5284760Z", - "stateLastModifiedBy": "B", - "lastModifiedTime": "2021-02-02T18:24:15.5284760Z", - "lastModifiedBy": "B", - "ruleId": "B", - "tenantId": "B", -} -ALERT_C = { - "id": "C", - "createdAt": "2021-03-03T09:40:26.6477830Z", - "actor": "C", - "type": "C", - "name": "C", - "description": "C", - "actorId": "C", - "target": "C", - "severity": "C", - "ruleSource": "C", - "observations": "C", - "notes": "C", - "state": "C", - "stateLastModifiedAt": "2021-03-03T09:40:26.6477830Z", - "stateLastModifiedBy": "C", - "lastModifiedTime": "2021-03-03T09:40:26.6477830Z", - "lastModifiedBy": "C", - "ruleId": "C", - "tenantId": "C", -} -ALERT_D = { - "id": "D", - "createdAt": "2021-04-04T09:40:50.7749710Z", - "actor": "D", - "type": "D", - "name": "D", - "description": "D", - "actorId": "D", - "target": "D", - "severity": "D", - "ruleSource": "D", - "observations": "D", - "notes": "D", - "state": "D", - "stateLastModifiedAt": "2021-04-04T09:40:50.7749710Z", - "stateLastModifiedBy": "D", - "lastModifiedTime": "2021-04-04T09:40:50.7749710Z", - "lastModifiedBy": "D", - "ruleId": "D", - "tenantId": "D", -} -ALERT_E = { - "id": "E", - "createdAt": "2021-05-05T21:29:34.5510380Z", - "actor": "E", - "type": "E", - "name": "E", - "description": "E", - "actorId": "E", - "target": "E", - "severity": "E", - "ruleSource": "E", - "observations": "E", - "notes": "E", - "state": "E", - "stateLastModifiedAt": "2021-05-05T21:29:34.5510380Z", - "stateLastModifiedBy": "E", - "lastModifiedTime": "2021-05-05T21:29:34.5510380Z", - "lastModifiedBy": "E", - "ruleId": "E", - "tenantId": "E", -} -ALERT_F = { - "id": "F", - "createdAt": "2021-06-06T21:24:50.7541390Z", - "actor": "F", - "type": "F", - "name": "F", - "description": "F", - "actorId": "F", - "target": "F", - "severity": "F", - "ruleSource": "F", - "observations": "F", - "notes": "F", - "state": "F", - "stateLastModifiedAt": "2021-06-06T21:24:50.7541390Z", - "stateLastModifiedBy": "F", - "lastModifiedTime": "2021-06-06T21:24:50.7541390Z", - "lastModifiedBy": "F", - "ruleId": "F", - "tenantId": "F", -} -TEST_ALERTS = [ALERT_A, ALERT_B, ALERT_C, ALERT_D, ALERT_E, ALERT_F] - - -@pytest.fixture -def mock_alerts_service(mocker): - return mocker.MagicMock(spec=AlertService) - - -@pytest.fixture -def mock_alert_rules_service(mocker): - return mocker.MagicMock(spec=AlertRulesService) - - -@pytest.fixture -def mock_alert_query(mocker): - return mocker.MagicMock(spec=AlertQuery) - - -@pytest.fixture -def mock_alerts_service_with_pages(mocker, mock_alerts_service): - def _func(ascending=True): - alerts = TEST_ALERTS if ascending else TEST_ALERTS[::-1] - alert_page_1 = create_mock_response(mocker, json.dumps({"alerts": alerts[:3]})) - alert_page_2 = create_mock_response(mocker, json.dumps({"alerts": alerts[3:]})) - - def page_gen(): - yield alert_page_1 - yield alert_page_2 - - mock_alerts_service.search_all_pages.return_value = page_gen() - return mock_alerts_service - - return _func - - -@pytest.fixture -def mock_alerts_service_with_no_alerts(mocker, mock_alerts_service): - no_alerts_page = create_mock_response(mocker, json.dumps({"alerts": []})) - - def page_gen(): - yield no_alerts_page - - mock_alerts_service.search_all_pages.return_value = page_gen() - return mock_alerts_service - - -@pytest.fixture -def mock_details(mocker): - detail_page_1 = create_mock_response( - mocker, json.dumps({"alerts": [ALERT_B, ALERT_C, ALERT_A]}) - ) - detail_page_2 = create_mock_response( - mocker, json.dumps({"alerts": [ALERT_F, ALERT_E, ALERT_D]}) - ) - - def mock_get_details(alert_ids): - if set(alert_ids) == {"A", "B", "C"}: - return detail_page_1 - if set(alert_ids) == {"D", "E", "F"}: - return detail_page_2 - - return mock_get_details - - -class TestAlertsClient: - _alert_ids = ["test-id1", "test-id2"] - - def test_rules_returns_rules_client( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - assert alert_client.rules - - def test_alerts_client_calls_search_with_expected_value( - self, - mock_alerts_service, - mock_alert_rules_service, - mock_alert_query, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.search(mock_alert_query) - mock_alerts_service.search.assert_called_once_with(mock_alert_query, 1, None) - - def test_alerts_client_calls_get_details_with_expected_value( - self, mock_alerts_service, mock_alert_rules_service - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.get_details(self._alert_ids) - mock_alerts_service.get_details.assert_called_once_with(self._alert_ids) - - def test_alerts_client_calls_update_state_with_resolve_state_and_expected_value( - self, - mock_alerts_service, - mock_alert_rules_service, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.resolve(self._alert_ids) - mock_alerts_service.update_state.assert_called_once_with( - "RESOLVED", self._alert_ids, note=None - ) - - def test_alerts_client_calls_update_state_with_reopen_state_and_expected_value( - self, - mock_alerts_service, - mock_alert_rules_service, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.reopen(self._alert_ids) - mock_alerts_service.update_state.assert_called_once_with( - "OPEN", self._alert_ids, note=None - ) - - def test_alerts_client_calls_update_state_with_state_and_expected_value( - self, - mock_alerts_service, - mock_alert_rules_service, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.update_state("RESOLVED", self._alert_ids) - mock_alerts_service.update_state.assert_called_once_with( - "RESOLVED", self._alert_ids, note=None - ) - - def test_alerts_client_calls_update_note_with_expected_value_and_param( - self, - mock_alerts_service, - mock_alert_rules_service, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.update_note("alert-id", "a note") - mock_alerts_service.update_note.assert_called_once_with("alert-id", "a note") - - def test_alerts_client_calls_search_all_pages_with_expected_value_and_param( - self, - mock_alerts_service, - mock_alert_rules_service, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - query = AlertQuery() - alert_client.search_all_pages(query) - mock_alerts_service.search_all_pages.assert_called_once_with(query) - - def test_alerts_client_calls_get_aggregate_data_with_expected_value_and_param( - self, - mock_alerts_service, - mock_alert_rules_service, - ): - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - alert_client.get_aggregate_data("alert-id") - mock_alerts_service.get_aggregate_data.assert_called_once_with("alert-id") - - def test_alerts_client_get_all_alert_details_calls_get_details_for_each_page( - self, mock_alerts_service_with_pages, mock_alert_rules_service - ): - mock_alerts_service = mock_alerts_service_with_pages(ascending=True) - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - query = AlertQuery() - list(alert_client.get_all_alert_details(query)) - assert mock_alerts_service.get_details.call_args_list[0][0][0] == [ - "A", - "B", - "C", - ] - assert mock_alerts_service.get_details.call_args_list[1][0][0] == [ - "D", - "E", - "F", - ] - - @pytest.mark.parametrize( - "sort_key", - [ - "AlertId", - "TenantId", - "Type", - "Name", - "Description", - "Actor", - "ActorId", - "Target", - "Severity", - "RuleSource", - "CreatedAt", - "Observations", - "Notes", - "State", - "StateLastModifiedAt", - "StateLastModifiedBy", - "LastModifiedTime", - "LastModifiedBy", - "RuleId", - ], - ) - def test_alerts_client_get_all_alert_details_sorts_results_according_to_query_sort_key( - self, - mock_alerts_service_with_pages, - mock_alert_rules_service, - mock_details, - sort_key, - ): - mock_alerts_service = mock_alerts_service_with_pages(ascending=True) - mock_alerts_service.get_details = mock_details - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - query = AlertQuery() - query.sort_direction = "asc" - query.sort_key = sort_key - results = list(alert_client.get_all_alert_details(query)) - assert results == [ALERT_A, ALERT_B, ALERT_C, ALERT_D, ALERT_E, ALERT_F] - - @pytest.mark.parametrize( - "sort_key", - [ - "AlertId", - "TenantId", - "Type", - "Name", - "Description", - "Actor", - "ActorId", - "Target", - "Severity", - "RuleSource", - "CreatedAt", - "Observations", - "Notes", - "State", - "StateLastModifiedAt", - "StateLastModifiedBy", - "LastModifiedTime", - "LastModifiedBy", - "RuleId", - ], - ) - def test_alerts_client_get_all_alert_details_sorts_results_descending_when_specified( - self, - mock_alerts_service_with_pages, - mock_alert_rules_service, - mock_details, - sort_key, - ): - mock_alerts_service = mock_alerts_service_with_pages(ascending=False) - mock_alerts_service.get_details = mock_details - alert_client = AlertsClient(mock_alerts_service, mock_alert_rules_service) - query = AlertQuery() - query.sort_direction = "desc" - query.sort_key = sort_key - results = list(alert_client.get_all_alert_details(query)) - assert results == [ALERT_F, ALERT_E, ALERT_D, ALERT_C, ALERT_B, ALERT_A] - - def test_alerts_client_get_all_alert_details_returns_empty_generator_when_no_alerts_found( - self, mock_alerts_service_with_no_alerts, mock_alert_rules_service - ): - alert_client = AlertsClient( - mock_alerts_service_with_no_alerts, mock_alert_rules_service - ) - query = AlertQuery() - results = list(alert_client.get_all_alert_details(query)) - assert results == [] - - def test_alerts_client_get_all_alert_details_does_not_call_get_details_when_no_alerts_found( - self, mock_alerts_service_with_no_alerts, mock_alert_rules_service - ): - alert_client = AlertsClient( - mock_alerts_service_with_no_alerts, mock_alert_rules_service - ) - query = AlertQuery() - list(alert_client.get_all_alert_details(query)) - assert mock_alerts_service_with_no_alerts.get_details.call_count == 0 diff --git a/tests/clients/test_archive.py b/tests/clients/test_archive.py deleted file mode 100644 index 931df2eb5..000000000 --- a/tests/clients/test_archive.py +++ /dev/null @@ -1,377 +0,0 @@ -import pytest -from tests.conftest import create_mock_response -from tests.conftest import get_file_selection -from tests.conftest import TEST_ACCEPTING_GUID -from tests.conftest import TEST_BACKUP_SET_ID -from tests.conftest import TEST_DESTINATION_GUID_1 -from tests.conftest import TEST_DEVICE_GUID -from tests.conftest import TEST_ENCRYPTION_KEY -from tests.conftest import TEST_PASSWORD -from tests.conftest import TEST_RESTORE_PATH - -from py42.clients._archiveaccess import ArchiveContentPusher -from py42.clients._archiveaccess import ArchiveContentStreamer -from py42.clients._archiveaccess import ArchiveExplorer -from py42.clients._archiveaccess import FileType -from py42.clients._archiveaccess.accessorfactory import ArchiveAccessorFactory -from py42.clients.archive import ArchiveClient -from py42.exceptions import Py42Error - - -TEST_ARCHIVE_GUID = "4224" -TEST_DAYS = 42 -TEST_ORG_ID = 424242 -TEST_PATHS = ["path/to/first/file", "path/to/second/file"] -TEST_FILE_SELECTIONS = [ - get_file_selection(TEST_PATHS[0], FileType.FILE), - get_file_selection(TEST_PATHS[1], FileType.FILE), -] - - -@pytest.fixture -def archive_accessor_factory(mocker): - return mocker.MagicMock(spec=ArchiveAccessorFactory) - - -@pytest.fixture -def archive_content_streamer(mocker): - mock = mocker.MagicMock(spec=ArchiveContentStreamer) - mock.destination_guid = TEST_DESTINATION_GUID_1 - return mock - - -@pytest.fixture -def archive_content_pusher(mocker): - return mocker.MagicMock(spec=ArchiveContentPusher) - - -@pytest.fixture -def archive_explorer(mocker): - mock = mocker.MagicMock(spec=ArchiveExplorer) - mock.destination_guid = TEST_DESTINATION_GUID_1 - mock.create_file_selections.return_value = [ - get_file_selection(TEST_PATHS[0], FileType.FILE), - get_file_selection(TEST_PATHS[1], FileType.FILE), - ] - return mock - - -class TestArchiveClient: - def test_get_by_archive_guid_calls_get_single_archive_with_expected_params( - self, archive_service, archive_accessor_factory - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.get_by_archive_guid(TEST_ARCHIVE_GUID) - archive_service.get_single_archive.assert_called_once_with(TEST_ARCHIVE_GUID) - - def test_get_all_by_device_guid_calls_get_all_archives_from_value_with_expected_params( - self, archive_service, archive_accessor_factory - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - for _ in archive.get_all_by_device_guid(TEST_DEVICE_GUID): - pass - archive_service.get_all_archives_from_value.assert_called_once_with( - TEST_DEVICE_GUID, "backupSourceGuid" - ) - - def test_stream_from_backup_calls_get_archive_accessor_with_expected_params( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.stream_from_backup( - TEST_PATHS[0], - TEST_DEVICE_GUID, - TEST_DESTINATION_GUID_1, - TEST_PASSWORD, - TEST_ENCRYPTION_KEY, - ) - archive_accessor_factory.create_archive_accessor.assert_called_once_with( - TEST_DEVICE_GUID, - ArchiveContentStreamer, - destination_guid=TEST_DESTINATION_GUID_1, - private_password=TEST_PASSWORD, - encryption_key=TEST_ENCRYPTION_KEY, - ) - - def test_stream_from_backup_when_given_multiple_paths_calls_archive_accessor_stream_from_backup_with_expected_params( - self, archive_accessor_factory, archive_service, archive_content_streamer - ): - archive_accessor_factory.create_archive_accessor.return_value = ( - archive_content_streamer - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.stream_from_backup( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_DESTINATION_GUID_1, - TEST_PASSWORD, - TEST_ENCRYPTION_KEY, - file_size_calc_timeout=10000, - show_deleted=True, - ) - archive_content_streamer.stream_from_backup.assert_called_once_with( - TEST_BACKUP_SET_ID, - TEST_PATHS, - file_size_calc_timeout=10000, - show_deleted=True, - ) - - def test_stream_from_backup_when_given_backup_set_id_calls_archive_accessor_stream_from_backup_with_expected_params( - self, - mocker, - archive_accessor_factory, - archive_service, - archive_content_streamer, - ): - backup_set_text = f'{{"backupSets": [{{"backupSetId": "{TEST_BACKUP_SET_ID}"}}, {{"backupSetId": "1"}}]}}' - backup_set_response = create_mock_response(mocker, backup_set_text) - archive_service.get_backup_sets.return_value = backup_set_response - archive_accessor_factory.create_archive_accessor.return_value = ( - archive_content_streamer - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.stream_from_backup( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_DESTINATION_GUID_1, - TEST_PASSWORD, - TEST_ENCRYPTION_KEY, - file_size_calc_timeout=10000, - backup_set_id=TEST_BACKUP_SET_ID, - show_deleted=True, - ) - archive_content_streamer.stream_from_backup.assert_called_once_with( - TEST_BACKUP_SET_ID, - TEST_PATHS, - file_size_calc_timeout=10000, - show_deleted=True, - ) - - def test_stream_from_backup_raises_error_when_given_invalid_backup_set_id( - self, - mocker, - archive_accessor_factory, - archive_service, - archive_content_streamer, - ): - backup_set_text = f'{{"backupSets": [{{"backupSetId": "{TEST_BACKUP_SET_ID}"}}, {{"backupSetId": "1"}}]}}' - backup_set_response = create_mock_response(mocker, backup_set_text) - archive_service.get_backup_sets.return_value = backup_set_response - archive_accessor_factory.create_archive_accessor.return_value = ( - archive_content_streamer - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - with pytest.raises(Py42Error) as err: - archive.stream_from_backup( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_DESTINATION_GUID_1, - TEST_PASSWORD, - TEST_ENCRYPTION_KEY, - file_size_calc_timeout=10000, - backup_set_id="100", - show_deleted=True, - ) - assert ( - "backup_set_id=100 not found in device backup sets: [{'backupSetId': 'backup-set-id'}, {'backupSetId': '1'}]" - in str(err) - ) - - def test_stream_to_device_calls_accessor_stream_to_device( - self, - archive_accessor_factory, - archive_service, - archive_explorer, - archive_content_pusher, - ): - archive_accessor_factory.create_archive_accessor.return_value = archive_explorer - archive_accessor_factory.create_archive_content_pusher.return_value = ( - archive_content_pusher - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.stream_to_device( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_ACCEPTING_GUID, - TEST_RESTORE_PATH, - destination_guid=TEST_DESTINATION_GUID_1, - archive_password=TEST_PASSWORD, - encryption_key=TEST_ENCRYPTION_KEY, - file_size_calc_timeout=100, - show_deleted=True, - overwrite_existing_files=True, - ) - archive_content_pusher.stream_to_device.assert_called_once_with( - TEST_RESTORE_PATH, - TEST_ACCEPTING_GUID, - TEST_FILE_SELECTIONS, - TEST_BACKUP_SET_ID, - True, - True, - ) - - def test_stream_to_device_prefers_backup_set_id_of_1_if_no_backup_set_provided( - self, - mocker, - archive_accessor_factory, - archive_service, - archive_explorer, - archive_content_pusher, - ): - backup_set_text = f'{{"backupSets": [{{"backupSetId": "{TEST_BACKUP_SET_ID}"}}, {{"backupSetId": "1"}}]}}' - backup_set_response = create_mock_response(mocker, backup_set_text) - archive_service.get_backup_sets.return_value = backup_set_response - archive_accessor_factory.create_archive_accessor.return_value = archive_explorer - archive_accessor_factory.create_archive_content_pusher.return_value = ( - archive_content_pusher - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.stream_to_device( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_ACCEPTING_GUID, - TEST_RESTORE_PATH, - destination_guid=TEST_DESTINATION_GUID_1, - archive_password=TEST_PASSWORD, - encryption_key=TEST_ENCRYPTION_KEY, - file_size_calc_timeout=100, - show_deleted=True, - ) - archive_content_pusher.stream_to_device.assert_called_once_with( - TEST_RESTORE_PATH, - TEST_ACCEPTING_GUID, - TEST_FILE_SELECTIONS, - "1", - True, - False, - ) - - def test_stream_to_device_uses_provided_backup_set_id( - self, - mocker, - archive_accessor_factory, - archive_service, - archive_explorer, - archive_content_pusher, - ): - backup_set_text = f'{{"backupSets": [{{"backupSetId": "{TEST_BACKUP_SET_ID}"}}, {{"backupSetId": "1"}}]}}' - backup_set_response = create_mock_response(mocker, backup_set_text) - archive_service.get_backup_sets.return_value = backup_set_response - archive_accessor_factory.create_archive_accessor.return_value = archive_explorer - archive_accessor_factory.create_archive_content_pusher.return_value = ( - archive_content_pusher - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.stream_to_device( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_ACCEPTING_GUID, - TEST_RESTORE_PATH, - destination_guid=TEST_DESTINATION_GUID_1, - archive_password=TEST_PASSWORD, - encryption_key=TEST_ENCRYPTION_KEY, - file_size_calc_timeout=100, - backup_set_id=TEST_BACKUP_SET_ID, - show_deleted=True, - ) - archive_content_pusher.stream_to_device.assert_called_once_with( - TEST_RESTORE_PATH, - TEST_ACCEPTING_GUID, - TEST_FILE_SELECTIONS, - TEST_BACKUP_SET_ID, - True, - False, - ) - - def test_stream_to_device_raises_error_if_provided_backup_set_id_invalid( - self, - mocker, - archive_accessor_factory, - archive_service, - archive_explorer, - archive_content_pusher, - ): - backup_set_text = f'{{"backupSets": [{{"backupSetId": "{TEST_BACKUP_SET_ID}"}}, {{"backupSetId": "1"}}]}}' - backup_set_response = create_mock_response(mocker, backup_set_text) - archive_service.get_backup_sets.return_value = backup_set_response - archive_accessor_factory.create_archive_accessor.return_value = archive_explorer - archive_accessor_factory.create_archive_content_pusher.return_value = ( - archive_content_pusher - ) - archive = ArchiveClient(archive_accessor_factory, archive_service) - with pytest.raises(Py42Error) as err: - archive.stream_to_device( - TEST_PATHS, - TEST_DEVICE_GUID, - TEST_ACCEPTING_GUID, - TEST_RESTORE_PATH, - destination_guid=TEST_DESTINATION_GUID_1, - archive_password=TEST_PASSWORD, - encryption_key=TEST_ENCRYPTION_KEY, - file_size_calc_timeout=100, - backup_set_id="100", - show_deleted=True, - ) - assert ( - "backup_set_id=100 not found in device backup sets: [{'backupSetId': 'backup-set-id'}, {'backupSetId': '1'}]" - in str(err) - ) - - def test_get_backup_sets_calls_archive_service_get_backup_sets_with_expected_params( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.get_backup_sets(TEST_DEVICE_GUID, TEST_DESTINATION_GUID_1) - archive_service.get_backup_sets.assert_called_once_with( - TEST_DEVICE_GUID, TEST_DESTINATION_GUID_1 - ) - - def test_get_all_org_restore_history_calls_get_all_restore_history_with_expected_id( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.get_all_org_restore_history(TEST_DAYS, TEST_ORG_ID) - archive_service.get_all_restore_history.assert_called_once_with( - TEST_DAYS, "orgId", TEST_ORG_ID - ) - - def test_get_all_user_restore_history_calls_get_all_restore_history_with_expected_id( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.get_all_user_restore_history(TEST_DAYS, TEST_ORG_ID) - archive_service.get_all_restore_history.assert_called_once_with( - TEST_DAYS, "userId", TEST_ORG_ID - ) - - def test_get_all_device_restore_history_calls_get_all_restore_history_with_expected_id( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.get_all_device_restore_history(TEST_DAYS, TEST_ORG_ID) - archive_service.get_all_restore_history.assert_called_once_with( - TEST_DAYS, "computerId", TEST_ORG_ID - ) - - def test_update_cold_storage_purge_date_calls_update_cold_storage_with_expected_data( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.update_cold_storage_purge_date("123", "2020-04-24") - archive_service.update_cold_storage_purge_date.assert_called_once_with( - "123", "2020-04-24" - ) - - def test_get_all_org_cold_storage_archives_calls_client_with_expected_data( - self, archive_accessor_factory, archive_service - ): - archive = ArchiveClient(archive_accessor_factory, archive_service) - archive.get_all_org_cold_storage_archives( - "TEST ORG ID", True, "sort_key", "sort_dir" - ) - archive_service.get_all_org_cold_storage_archives.assert_called_once_with( - org_id="TEST ORG ID", - include_child_orgs=True, - sort_key="sort_key", - sort_dir="sort_dir", - ) diff --git a/tests/clients/test_auditlogs.py b/tests/clients/test_auditlogs.py deleted file mode 100644 index 0b0d603b5..000000000 --- a/tests/clients/test_auditlogs.py +++ /dev/null @@ -1,77 +0,0 @@ -import pytest - -from py42.clients.auditlogs import AuditLogsClient -from py42.services.auditlogs import AuditLogsService - - -@pytest.fixture -def auditlog_service(mocker): - return mocker.MagicMock(spec=AuditLogsService) - - -class TestAuditLogsClient: - def test_get_all_calls_expected_auditlogs_service(self, auditlog_service): - client = AuditLogsClient(auditlog_service) - for _ in client.get_all(): - pass - auditlog_service.get_all.assert_called_once_with( - begin_time=None, - end_time=None, - event_types=None, - user_ids=None, - usernames=None, - user_ip_addresses=None, - affected_user_ids=None, - affected_usernames=None, - ) - - def test_get_page_calls_expected_auditlogs_service(self, auditlog_service): - client = AuditLogsClient(auditlog_service) - client.get_page() - auditlog_service.get_page.assert_called_once_with( - page_num=1, - page_size=None, - begin_time=None, - end_time=None, - event_types=None, - user_ids=None, - usernames=None, - user_ip_addresses=None, - affected_user_ids=None, - affected_usernames=None, - ) - - def test_get_all_passes_undefined_param_to_service(self, auditlog_service): - client = AuditLogsClient(auditlog_service) - for _ in client.get_all(customParam="abc"): - pass - auditlog_service.get_all.assert_called_once_with( - begin_time=None, - end_time=None, - event_types=None, - user_ids=None, - usernames=None, - user_ip_addresses=None, - affected_user_ids=None, - affected_usernames=None, - customParam="abc", - ) - - def test_get_page_passes_undefined_param_to_auditlogs_service( - self, auditlog_service - ): - client = AuditLogsClient(auditlog_service) - client.get_page(customParam="abc") - auditlog_service.get_page.assert_called_once_with( - page_num=1, - page_size=None, - begin_time=None, - end_time=None, - event_types=None, - user_ids=None, - usernames=None, - user_ip_addresses=None, - affected_user_ids=None, - affected_usernames=None, - customParam="abc", - ) diff --git a/tests/clients/test_cases.py b/tests/clients/test_cases.py deleted file mode 100644 index 8b5e32916..000000000 --- a/tests/clients/test_cases.py +++ /dev/null @@ -1,171 +0,0 @@ -from datetime import datetime - -import pytest - -from py42.clients.cases import CasesClient -from py42.services.cases import CasesService -from py42.services.casesfileevents import CasesFileEventsService - - -_TEST_CASE_NUMBER = 123456 - - -@pytest.fixture -def mock_cases_service(mocker): - return mocker.MagicMock(spec=CasesService) - - -@pytest.fixture -def mock_cases_file_event_service(mocker): - return mocker.MagicMock(spec=CasesFileEventsService) - - -class TestCasesClient: - def test_file_events_returns_cases_file_events_service( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - assert cases_client.file_events is mock_cases_file_event_service - - def test_create_calls_cases_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.create( - "name", - subject="subject", - assignee="assignee", - description="description", - findings="observation", - ) - mock_cases_service.create.assert_called_once_with( - "name", - subject="subject", - assignee="assignee", - description="description", - findings="observation", - ) - - def test_get_all_calls_cases_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get_all() - assert mock_cases_service.get_all.call_count == 1 - - def test_get_calls_cases_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get(_TEST_CASE_NUMBER) - mock_cases_service.get.assert_called_once_with(_TEST_CASE_NUMBER) - - def test_export_calls_cases_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.export_summary(_TEST_CASE_NUMBER) - mock_cases_service.export_summary.assert_called_once_with(_TEST_CASE_NUMBER) - - def test_update_calls_cases_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.update(_TEST_CASE_NUMBER, name="new name") - mock_cases_service.update.assert_called_once_with( - _TEST_CASE_NUMBER, - name="new name", - assignee=None, - subject=None, - status=None, - description=None, - findings=None, - ) - - def test_get_all_converts_datetime_to_ranges_and_calls_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get_all( - min_create_time="2021-01-01 00:00:00", - max_create_time="2021-02-01 00:00:00", - min_update_time="2021-01-31 00:00:00", - max_update_time="2021-02-20 00:00:00", - ) - created_at_range = "2021-01-01T00:00:00.000Z/2021-02-01T00:00:00.000Z" - updated_at_range = "2021-01-31T00:00:00.000Z/2021-02-20T00:00:00.000Z" - - mock_cases_service.get_all.assert_called_once_with( - name=None, - status=None, - created_at=created_at_range, - updated_at=updated_at_range, - subject=None, - assignee=None, - page_size=100, - sort_direction="asc", - sort_key="number", - ) - - def test_get_all_converts_diff_types_to_ranges_and_calls_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get_all( - min_create_time=1609439400, - min_update_time="2021-02-01 00:00:00", - max_create_time=1612117800.0, - max_update_time=datetime.strptime( - "2021-02-20 00:00:00", "%Y-%m-%d %H:%M:%S" - ), - subject="subject", - assignee="a", - name="test", - status="closed", - ) - created_at_range = "2020-12-31T18:30:00.000Z/2021-01-31T18:30:00.000Z" - updated_at_range = "2021-02-01T00:00:00.000Z/2021-02-20T00:00:00.000Z" - - mock_cases_service.get_all.assert_called_once_with( - name="test", - status="closed", - created_at=created_at_range, - updated_at=updated_at_range, - subject="subject", - assignee="a", - page_size=100, - sort_direction="asc", - sort_key="number", - ) - - def test_get_all_sets_default_min_when_max_specified( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get_all( - max_update_time=datetime.strptime( - "2021-02-20 00:00:00", "%Y-%m-%d %H:%M:%S" - ) - ) - expected_range = "1970-01-01T00:00:00.000Z/2021-02-20T00:00:00.000Z" - service_args = mock_cases_service.get_all.call_args[1] - assert service_args["updated_at"] == expected_range - - def test_get_all_sets_default_max_when_min_specified( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get_all(min_create_time=1609439400) - actual_range = mock_cases_service.get_all.call_args[1]["created_at"] - actual_max_time_string = actual_range.split("/")[1] - actual_max_time_obj = datetime.strptime( - actual_max_time_string, "%Y-%m-%dT%H:%M:%S.%fZ" - ) - assert (datetime.utcnow() - actual_max_time_obj).total_seconds() < 0.03 - - def test_get_page_calls_service_with_expected_params( - self, mock_cases_service, mock_cases_file_event_service - ): - cases_client = CasesClient(mock_cases_service, mock_cases_file_event_service) - cases_client.get(1) - mock_cases_service.get.assert_called_once_with(1) diff --git a/tests/clients/test_loginconfig.py b/tests/clients/test_loginconfig.py index 3ddbbcdc5..ca845a133 100644 --- a/tests/clients/test_loginconfig.py +++ b/tests/clients/test_loginconfig.py @@ -1,8 +1,8 @@ import pytest from requests.sessions import Session -from py42.clients.loginconfig import LoginConfigurationClient -from py42.services._connection import Connection +from pycpg.clients.loginconfig import LoginConfigurationClient +from pycpg.services._connection import Connection HOST_ADDRESS = "example.com" @@ -24,14 +24,14 @@ def test_get_for_user_calls_session_get_with_expected_uri_and_params( expected_params = {"username": "test@example.com"} mock_session.get.assert_called_once_with(expected_uri, params=expected_params) - def test_get_for_user_does_not_use_py42_connection_get_method( + def test_get_for_user_does_not_use_pycpg_connection_get_method( self, mocker, mock_session ): """Because the loginConfiguration endpoint is unauthenticated, we want to make - sure we don't force the Connection's C42RenewableAuth object to make any + sure we don't force the Connection's CPGRenewableAuth object to make any authentication requests before making the loginConfig request. """ - mock_get = mocker.patch("py42.services._connection.Connection.get") + mock_get = mocker.patch("pycpg.services._connection.Connection.get") connection = Connection.from_host_address(HOST_ADDRESS, session=mock_session) loginconfig = LoginConfigurationClient(connection) loginconfig.get_for_user("test@example.com") diff --git a/tests/clients/test_securitydata.py b/tests/clients/test_securitydata.py deleted file mode 100644 index 6dc503f93..000000000 --- a/tests/clients/test_securitydata.py +++ /dev/null @@ -1,912 +0,0 @@ -import pytest -import requests -from tests.conftest import create_mock_response - -from py42.clients.securitydata import SecurityDataClient -from py42.exceptions import Py42ChecksumNotFoundError -from py42.exceptions import Py42Error -from py42.sdk.queries.fileevents.file_event_query import FileEventQuery -from py42.services._connection import Connection -from py42.services.fileevent import FileEventService -from py42.services.preservationdata import PreservationDataService -from py42.services.savedsearch import SavedSearchService -from py42.services.storage._service_factory import StorageServiceFactory -from py42.services.storage.exfiltrateddata import ExfiltratedDataService -from py42.services.storage.preservationdata import StoragePreservationDataService - -FILE_EVENT_URI = "/forensic-search/queryservice/api/v1/fileevent" -RAW_QUERY = "RAW JSON QUERY" -USER_UID = "user-uid" -PDS_EXCEPTION_MESSAGE = "No file with hash {0} available for download." -FILE_EVENTS_RESPONSE_V2 = """{ - "fileEvents":[ - { - "@timestamp": "1", - "user": { "deviceUid": "testdeviceUid" }, - "file": { "name": "testfileName", "directory": "/test/file/path/", - "hash": { "md5": "testmd5-2", "sha256": "testsha256-2" } - } - } - ] -}""" -FILE_LOCATION_RESPONSE = """{ - "locations": [ - { - "fileName": "file1", - "deviceUid": "device1", - "filePath": "path1" - }, - { - "fileName": "file2", - "deviceUid": "device2", - "filePath": "path2" - } - ] -}""" - -PDS_FILE_VERSIONS = """{ - "preservationVersions": [ - { - "storageNodeURL": "https://host-1.example.com", - "archiveGuid": "archiveid-1", - "fileId": "fileid-1", - "fileMD5": "testmd5-1", - "fileSHA256": "testsha256-1", - "versionTimestamp": 12345 - }, - { - "storageNodeURL": "https://host-2.example.com", - "archiveGuid": "archiveid-2", - "fileId": "fileid-2", - "fileMD5": "testmd5-2", - "fileSHA256": "testsha256-2", - "versionTimestamp": 12344 - }, - { - "storageNodeURL": "https://host-3.example.com", - "archiveGuid": "archiveid-3", - "fileId": "fileid-3", - "fileMD5": "testmd5-3", - "fileSHA256": "testsha256-3", - "versionTimestamp": 12346 - } - ], - "securityEventVersionsMatchingChecksum": [], - "securityEventVersionsAtPath": [] -}""" - -XFC_EXACT_FILE_VERSIONS = """{ - "preservationVersions": [], - "securityEventVersionsMatchingChecksum": [ - { - "edsUrl": "https://host-1.example.com", - "deviceUid": "deviceuid-1", - "eventId": "eventid-1", - "fileMD5": "testmd5-1", - "fileSHA256": "testsha256-1", - "filePath": "/test/file/path-1/", - "versionTimestamp": 12345 - }, - { - "edsUrl": "https://host-2.example.com", - "deviceUid": "deviceuid-2", - "eventId": "eventid-2", - "fileMD5": "testmd5-2", - "fileSHA256": "testsha256-2", - "filePath": "/test/file/path-2/", - "versionTimestamp": 12344 - }, - { - "edsUrl": "https://host-3.example.com", - "deviceUid": "deviceuid-3", - "eventId": "eventid-3", - "fileMD5": "testmd5-3", - "fileSHA256": "testsha256-3", - "filePath": "/test/file/path-3/", - "versionTimestamp": 12346 - } - ], - "securityEventVersionsAtPath": [] -}""" - -XFC_MATCHED_FILE_VERSIONS = """{ - "preservationVersions": [], - "securityEventVersionsMatchingChecksum": [], - "securityEventVersionsAtPath": [ - { - "edsUrl": "https://host-1.example.com", - "deviceUid": "deviceuid-1", - "eventId": "eventid-1", - "fileMD5": "testmd5-1", - "fileSHA256": "testsha256-1", - "filePath": "/test/file/path-1/", - "versionTimestamp": 12345 - }, - { - "edsUrl": "https://host-2.example.com", - "deviceUid": "deviceuid-2", - "eventId": "eventid-2", - "fileMD5": "testmd5-2", - "fileSHA256": "testsha256-2", - "filePath": "/test/file/path-2/", - "versionTimestamp": 12344 - }, - { - "edsUrl": "https://host-3.example.com", - "deviceUid": "deviceuid-3", - "eventId": "eventid-3", - "fileMD5": "testmd5-3", - "fileSHA256": "testsha256-3", - "filePath": "/test/file/path-3/", - "versionTimestamp": 12346 - } - ] -}""" - -AVAILABLE_VERSION_RESPONSE = """{ - "storageNodeURL": "https://host.com", - "archiveGuid": "archiveid-3", - "fileId": "fileid-3", - "fileMD5": "testmd5-3", - "fileSHA256": "testsha256-3", - "versionTimestamp": 12346 -}""" - - -class TestSecurityClient: - @pytest.fixture - def connection(self, mocker): - connection = mocker.MagicMock(spec=Connection) - connection._session = mocker.MagicMock(spec=requests.Session) - return connection - - @pytest.fixture - def storage_service_factory(self, mocker): - return mocker.MagicMock(spec=StorageServiceFactory) - - @pytest.fixture - def file_event_service(self, mocker): - return mocker.MagicMock(spec=FileEventService) - - @pytest.fixture - def preservation_data_service(self, mocker): - return mocker.MagicMock(spec=PreservationDataService) - - @pytest.fixture - def saved_search_service(self, mocker): - return mocker.MagicMock(spec=SavedSearchService) - - def test_search_with_only_query_calls_through_to_client( - self, - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - security_client.search_file_events(RAW_QUERY) - file_event_service.search.assert_called_once_with(RAW_QUERY) - - def test_saved_searches_returns_saved_search_client( - self, - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - assert security_client.savedsearches - - @pytest.fixture - def file_event_search(self, mocker): - return create_mock_response(mocker, FILE_EVENTS_RESPONSE_V2) - - @pytest.fixture - def file_event_search_v2(self, mocker): - return create_mock_response(mocker, FILE_EVENTS_RESPONSE_V2) - - @pytest.fixture - def file_location(self, mocker): - return create_mock_response(mocker, FILE_LOCATION_RESPONSE) - - @pytest.fixture - def file_version_list(self, mocker): - return create_mock_response(mocker, PDS_FILE_VERSIONS) - - @pytest.fixture - def pds_config( - self, - mocker, - storage_service_factory, - file_event_service, - preservation_data_service, - saved_search_service, - ): - mock = mocker.MagicMock() - file_download = create_mock_response(mocker, "PDSDownloadToken=token") - file_event_service.search.return_value = create_mock_response( - mocker, FILE_EVENTS_RESPONSE_V2 - ) - preservation_data_service.get_file_version_list.return_value = ( - create_mock_response(mocker, PDS_FILE_VERSIONS) - ) - file_event_service.get_file_location_detail_by_sha256.return_value = ( - create_mock_response(mocker, FILE_LOCATION_RESPONSE) - ) - storage_node_client = mocker.MagicMock(spec=StoragePreservationDataService) - storage_node_client.get_download_token.return_value = file_download - storage_node_client.get_file.return_value = b"stream" - storage_service_factory.create_preservation_data_service.return_value = ( - storage_node_client - ) - exfiltration_client = mocker.MagicMock(spec=ExfiltratedDataService) - exfiltration_client.get_download_token.return_value = file_download - exfiltration_client.get_file.return_value = b"stream" - storage_service_factory.create_exfiltrated_data_service.return_value = ( - exfiltration_client - ) - - mock.storage_service_factory = storage_service_factory - mock.file_event_service = file_event_service - mock.preservation_data_service = preservation_data_service - mock.saved_search_service = saved_search_service - mock.storage_node_client = storage_node_client - mock.exfiltration_client = exfiltration_client - return mock - - @pytest.fixture - def pds_config_v2( - self, - mocker, - storage_service_factory, - file_event_service, - preservation_data_service, - saved_search_service, - ): - mock = mocker.MagicMock() - file_download = create_mock_response(mocker, "PDSDownloadToken=token") - file_event_service.search.return_value = create_mock_response( - mocker, FILE_EVENTS_RESPONSE_V2 - ) - preservation_data_service.get_file_version_list.return_value = ( - create_mock_response(mocker, PDS_FILE_VERSIONS) - ) - file_event_service.get_file_location_detail_by_sha256.return_value = ( - create_mock_response(mocker, FILE_LOCATION_RESPONSE) - ) - storage_node_client = mocker.MagicMock(spec=StoragePreservationDataService) - storage_node_client.get_download_token.return_value = file_download - storage_node_client.get_file.return_value = b"stream" - storage_service_factory.create_preservation_data_service.return_value = ( - storage_node_client - ) - exfiltration_client = mocker.MagicMock(spec=ExfiltratedDataService) - exfiltration_client.get_download_token.return_value = file_download - exfiltration_client.get_file.return_value = b"stream" - storage_service_factory.create_exfiltrated_data_service.return_value = ( - exfiltration_client - ) - - mock.storage_service_factory = storage_service_factory - mock.file_event_service = file_event_service - mock.preservation_data_service = preservation_data_service - mock.saved_search_service = saved_search_service - mock.storage_node_client = storage_node_client - mock.exfiltration_client = exfiltration_client - return mock - - def test_stream_file_by_sha256_with_exact_match_response_calls_get_version_list_with_expected_params( - self, - pds_config, - ): - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - response = security_client.stream_file_by_sha256("testsha256-2") - version_list_params = [ - "testdeviceUid", - "testmd5-2", - "testsha256-2", - "/test/file/path/testfileName", - ] - pds_config.preservation_data_service.get_file_version_list.assert_called_once_with( - *version_list_params - ) - pds_config.storage_service_factory.create_preservation_data_service.assert_called_once_with( - "https://host-2.example.com" - ) - assert ( - pds_config.file_event_service.get_file_location_detail_by_sha256.call_count - == 0 - ) - assert pds_config.preservation_data_service.find_file_version.call_count == 0 - expected_download_token_params = ["archiveid-2", "fileid-2", 12344] - pds_config.storage_node_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - assert response == b"stream" - - def test_stream_file_by_sha256_without_exact_match_response_calls_get_version_list_with_expected_params( - self, - mocker, - pds_config, - ): - pds_config.file_event_service.search.return_value = create_mock_response( - mocker, FILE_EVENTS_RESPONSE_V2.replace("-2", "-6") - ) - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - response = security_client.stream_file_by_sha256("testsha256-6") - expected = [ - "testdeviceUid", - "testmd5-6", - "testsha256-6", - "/test/file/path/testfileName", - ] - pds_config.preservation_data_service.get_file_version_list.assert_called_once_with( - *expected - ) - pds_config.storage_service_factory.create_preservation_data_service.assert_called_once_with( - "https://host-3.example.com" - ) - assert ( - pds_config.file_event_service.get_file_location_detail_by_sha256.call_count - == 0 - ) - assert pds_config.preservation_data_service.find_file_version.call_count == 0 - # should get version with most recent versionTimestamp - expected_download_token_params = ["archiveid-3", "fileid-3", 12346] - pds_config.storage_node_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - assert response == b"stream" - - def test_stream_file_by_sha256_when_search_returns_empty_response_raises_py42_checksum_not_found_error_( - self, mocker, pds_config - ): - pds_config.file_event_service.search.return_value = create_mock_response( - mocker, '{"fileEvents": []}' - ) - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - with pytest.raises(Py42ChecksumNotFoundError) as e: - security_client.stream_file_by_sha256("shahash") - - assert "No files found with SHA256 checksum" in e.value.args[0] - - def test_stream_file_by_sha256_when_file_versions_returns_empty_response_gets_version_from_other_location( - self, - mocker, - pds_config, - ): - available_version = create_mock_response(mocker, AVAILABLE_VERSION_RESPONSE) - file_version_list = create_mock_response(mocker, '{"preservationVersions": []}') - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - pds_config.preservation_data_service.find_file_version.return_value = ( - available_version - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - response = security_client.stream_file_by_sha256("shahash") - assert response == b"stream" - pds_config.file_event_service.get_file_location_detail_by_sha256.assert_called_once_with( - "testsha256-2" - ) - expected = ["testmd5-2", "testsha256-2", mocker.ANY] - pds_config.preservation_data_service.find_file_version.assert_called_once_with( - *expected - ) - # should return version returned by find_file_version - expected_expected_download_token_params = ["archiveid-3", "fileid-3", 12346] - pds_config.storage_node_client.get_download_token.assert_called_once_with( - *expected_expected_download_token_params - ) - - def test_stream_file_by_sha256_when_get_locations_returns_empty_list_raises_py42_error( - self, - mocker, - pds_config, - ): - file_version_list = create_mock_response(mocker, '{"preservationVersions": []}') - file_location = create_mock_response(mocker, '{"locations": []}') - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - pds_config.file_event_service.get_file_location_detail_by_sha256.return_value = ( - file_location - ) - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - with pytest.raises(Py42Error) as e: - security_client.stream_file_by_sha256("shahash") - - assert e.value.args[0] == PDS_EXCEPTION_MESSAGE.format("shahash") - - def test_stream_file_by_sha256_when_find_file_version_returns_204_status_code_raises_py42_error( - self, - mocker, - pds_config, - ): - file_version_list = create_mock_response(mocker, '{"preservationVersions": []}') - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - available_version = create_mock_response( - mocker, AVAILABLE_VERSION_RESPONSE, 204 - ) - pds_config.preservation_data_service.find_file_version.return_value = ( - available_version - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - with pytest.raises(Py42Error) as e: - security_client.stream_file_by_sha256("shahash") - - assert e.value.args[0] == PDS_EXCEPTION_MESSAGE.format("shahash") - - def test_stream_file_by_md5_with_exact_match_response_calls_get_version_list_with_expected_params( - self, - pds_config, - ): - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - response = security_client.stream_file_by_md5("testmd5-2") - version_list_params = [ - "testdeviceUid", - "testmd5-2", - "testsha256-2", - "/test/file/path/testfileName", - ] - pds_config.preservation_data_service.get_file_version_list.assert_called_once_with( - *version_list_params - ) - pds_config.storage_service_factory.create_preservation_data_service.assert_called_once_with( - "https://host-2.example.com" - ) - assert ( - pds_config.file_event_service.get_file_location_detail_by_sha256.call_count - == 0 - ) - assert pds_config.preservation_data_service.find_file_version.call_count == 0 - expected_download_token_params = ["archiveid-2", "fileid-2", 12344] - pds_config.storage_node_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - assert response == b"stream" - - def test_stream_file_by_md5_without_exact_match_response_calls_get_version_list_with_expected_params( - self, - mocker, - pds_config, - ): - pds_config.file_event_service.search.return_value = create_mock_response( - mocker, FILE_EVENTS_RESPONSE_V2.replace("-2", "-6") - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - response = security_client.stream_file_by_md5("testmd5-6") - expected = [ - "testdeviceUid", - "testmd5-6", - "testsha256-6", - "/test/file/path/testfileName", - ] - pds_config.preservation_data_service.get_file_version_list.assert_called_once_with( - *expected - ) - pds_config.storage_service_factory.create_preservation_data_service.assert_called_once_with( - "https://host-3.example.com" - ) - assert ( - pds_config.file_event_service.get_file_location_detail_by_sha256.call_count - == 0 - ) - assert pds_config.preservation_data_service.find_file_version.call_count == 0 - # should get version returned with most recent versionTimestamp - expected_download_token_params = ["archiveid-3", "fileid-3", 12346] - pds_config.storage_node_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - assert response == b"stream" - - def test_stream_file_by_md5_when_search_returns_empty_response_raises_py42_checksum_not_found_error_( - self, mocker, pds_config - ): - pds_config.file_event_service.search.return_value = create_mock_response( - mocker, '{"fileEvents": []}' - ) - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - with pytest.raises(Py42ChecksumNotFoundError) as e: - security_client.stream_file_by_md5("mdhash") - - assert "No files found with MD5 checksum" in e.value.args[0] - - def test_stream_file_by_md5_when_file_versions_returns_empty_response_gets_version_from_other_location( - self, - mocker, - pds_config, - ): - file_version_list = create_mock_response(mocker, '{"preservationVersions": []}') - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - available_version = create_mock_response(mocker, AVAILABLE_VERSION_RESPONSE) - pds_config.preservation_data_service.find_file_version.return_value = ( - available_version - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - response = security_client.stream_file_by_md5("mdhash") - assert response == b"stream" - pds_config.file_event_service.get_file_location_detail_by_sha256.assert_called_once_with( - "testsha256-2" - ) - expected = ["testmd5-2", "testsha256-2", mocker.ANY] - pds_config.preservation_data_service.find_file_version.assert_called_once_with( - *expected - ) - # should return version returned by find_file_version - expected_download_token_params = ["archiveid-3", "fileid-3", 12346] - pds_config.storage_node_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - - def test_stream_file_by_md5_when_get_locations_returns_empty_list_raises_py42_error( - self, mocker, pds_config - ): - file_version_list = create_mock_response(mocker, '{"preservationVersions": []}') - file_location = create_mock_response(mocker, '{"locations": []}') - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - pds_config.file_event_service.get_file_location_detail_by_sha256.return_value = ( - file_location - ) - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - with pytest.raises(Py42Error) as e: - security_client.stream_file_by_md5("mdhash") - - assert e.value.args[0] == PDS_EXCEPTION_MESSAGE.format("mdhash") - - def test_stream_file_by_md5_when_find_file_version_returns_204_status_code_raises_py42_error( - self, mocker, pds_config - ): - file_version_list = create_mock_response(mocker, '{"preservationVersions": []}') - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - available_version = create_mock_response( - mocker, AVAILABLE_VERSION_RESPONSE, 204 - ) - pds_config.preservation_data_service.find_file_version.return_value = ( - available_version - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - - with pytest.raises(Py42Error) as e: - security_client.stream_file_by_md5("mdhash") - - assert e.value.args[0] == PDS_EXCEPTION_MESSAGE.format("mdhash") - - def test_stream_file_by_md5_when_has_exact_match_calls_get_token_with_expected_params_and_streams_successfully( - self, mocker, pds_config - ): - file_version_list = create_mock_response(mocker, XFC_EXACT_FILE_VERSIONS) - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - response = security_client.stream_file_by_md5("testmd5-2") - assert response == b"stream" - expected_download_token_params = [ - "eventid-2", - "deviceuid-2", - "/test/file/path-2/", - "testsha256-2", - 12344, - ] - pds_config.exfiltration_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - - def test_stream_file_by_sha256_when_has_exact_match_calls_get_token_with_expected_params_and_streams_successfully( - self, mocker, pds_config - ): - file_version_list = create_mock_response(mocker, XFC_EXACT_FILE_VERSIONS) - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - response = security_client.stream_file_by_sha256("testsha256-2") - assert response == b"stream" - expected_download_token_params = [ - "eventid-2", - "deviceuid-2", - "/test/file/path-2/", - "testsha256-2", - 12344, - ] - pds_config.exfiltration_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - - def test_stream_file_by_md5_when_has_path_match_calls_get_token_with_expected_params_and_streams_successfully( - self, mocker, pds_config - ): - file_version_list = create_mock_response(mocker, XFC_MATCHED_FILE_VERSIONS) - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - response = security_client.stream_file_by_md5("testmd5-2") - assert response == b"stream" - expected_download_token_params = [ - "eventid-3", - "deviceuid-3", - "/test/file/path-3/", - "testsha256-3", - 12346, - ] - pds_config.exfiltration_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - - def test_stream_file_by_sha256_when_has_path_match_calls_get_token_with_expected_params_and_streams_successfully( - self, mocker, pds_config - ): - file_version_list = create_mock_response(mocker, XFC_MATCHED_FILE_VERSIONS) - pds_config.preservation_data_service.get_file_version_list.return_value = ( - file_version_list - ) - - security_client = SecurityDataClient( - pds_config.file_event_service, - pds_config.preservation_data_service, - pds_config.saved_search_service, - pds_config.storage_service_factory, - ) - response = security_client.stream_file_by_sha256("testsha256-2") - assert response == b"stream" - expected_download_token_params = [ - "eventid-3", - "deviceuid-3", - "/test/file/path-3/", - "testsha256-3", - 12346, - ] - pds_config.exfiltration_client.get_download_token.assert_called_once_with( - *expected_download_token_params - ) - - def test_search_all_file_events_calls_search_with_expected_params_when_pg_token_is_not_passed( - self, - connection, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - file_event_service = FileEventService(connection) - successful_response = { - "totalCount": None, - "fileEvents": None, - "nextPgToken": None, - "problems": None, - } - connection.post.return_value = successful_response - - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - query = FileEventQuery.all() - response = security_client.search_all_file_events(query) - expected = { - "groupClause": "AND", - "groups": [], - "srtDir": "asc", - "srtKey": "eventId", - "pgToken": "", - "pgSize": 500, - } - connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) - assert response is successful_response - - def test_search_all_file_events_calls_search_with_expected_params_when_pg_token_is_passed( - self, - connection, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - file_event_service = FileEventService(connection) - successful_response = { - "totalCount": None, - "fileEvents": None, - "nextPgToken": "pqr", - "problems": None, - } - connection.post.return_value = successful_response - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - query = FileEventQuery.all() - response = security_client.search_all_file_events(query, "abc") - expected = { - "groupClause": "AND", - "groups": [], - "srtDir": "asc", - "srtKey": "eventId", - "pgToken": "abc", - "pgSize": 500, - } - connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) - assert response is successful_response - - def test_search_all_file_events_handles_unescaped_quote_chars_in_token( - self, - connection, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - file_event_service = FileEventService(connection) - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - unescaped_token = '1234_"abcde"' - escaped_token = r"1234_\"abcde\"" - security_client.search_all_file_events(FileEventQuery.all(), unescaped_token) - expected = { - "groupClause": "AND", - "groups": [], - "srtDir": "asc", - "srtKey": "eventId", - "pgToken": escaped_token, - "pgSize": 500, - } - connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) - - def test_search_all_file_events_handles_escaped_quote_chars_in_token( - self, - connection, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - file_event_service = FileEventService(connection) - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - escaped_token = r"1234_\"abcde\"" - security_client.search_all_file_events(FileEventQuery.all(), escaped_token) - expected = { - "groupClause": "AND", - "groups": [], - "srtDir": "asc", - "srtKey": "eventId", - "pgToken": escaped_token, - "pgSize": 500, - } - connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) - - def test_search_all_file_events_when_token_is_none_succeeds( - self, - connection, - preservation_data_service, - saved_search_service, - storage_service_factory, - ): - file_event_service = FileEventService(connection) - security_client = SecurityDataClient( - file_event_service, - preservation_data_service, - saved_search_service, - storage_service_factory, - ) - security_client.search_all_file_events(FileEventQuery.all(), page_token=None) diff --git a/tests/clients/test_trustedactivities.py b/tests/clients/test_trustedactivities.py deleted file mode 100644 index ef143384e..000000000 --- a/tests/clients/test_trustedactivities.py +++ /dev/null @@ -1,82 +0,0 @@ -import pytest - -from py42.clients.trustedactivities import TrustedActivitiesClient -from py42.services.trustedactivities import TrustedActivitiesService - - -_TEST_TRUSTED_ACTIVITY_RESOURCE_ID = 123 - - -@pytest.fixture -def mock_trusted_activities_service(mocker): - return mocker.MagicMock(spec=TrustedActivitiesService) - - -class TestTrustedActivitiesClient: - def test_get_all_calls_service_with_expected_params( - self, mock_trusted_activities_service - ): - trusted_activities_client = TrustedActivitiesClient( - mock_trusted_activities_service - ) - trusted_activities_client.get_all() - assert mock_trusted_activities_service.get_all.call_count == 1 - - def test_get_all_calls_service_with_expected_optional_params( - self, mock_trusted_activities_service - ): - trusted_activities_client = TrustedActivitiesClient( - mock_trusted_activities_service - ) - trusted_activities_client.get_all("DOMAIN", 1) - mock_trusted_activities_service.get_all.assert_called_once_with("DOMAIN", 1) - - def test_create_calls_service_with_expected_params( - self, mock_trusted_activities_service - ): - trusted_activities_client = TrustedActivitiesClient( - mock_trusted_activities_service - ) - trusted_activities_client.create( - "DOMAIN", "test.com", description="description" - ) - mock_trusted_activities_service.create.assert_called_once_with( - "DOMAIN", "test.com", "description" - ) - - def test_get_calls_service_with_expected_params( - self, mock_trusted_activities_service - ): - trusted_activities_client = TrustedActivitiesClient( - mock_trusted_activities_service - ) - trusted_activities_client.get(_TEST_TRUSTED_ACTIVITY_RESOURCE_ID) - mock_trusted_activities_service.get.assert_called_once_with( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID - ) - - def test_update_calls_service_with_expected_params( - self, mock_trusted_activities_service - ): - trusted_activities_client = TrustedActivitiesClient( - mock_trusted_activities_service - ) - trusted_activities_client.update( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID, value="new-domain.com" - ) - mock_trusted_activities_service.update.assert_called_once_with( - id=_TEST_TRUSTED_ACTIVITY_RESOURCE_ID, - value="new-domain.com", - description=None, - ) - - def test_delete_calls_service_with_expected_params( - self, mock_trusted_activities_service - ): - trusted_activities_client = TrustedActivitiesClient( - mock_trusted_activities_service - ) - trusted_activities_client.delete(_TEST_TRUSTED_ACTIVITY_RESOURCE_ID) - mock_trusted_activities_service.delete.assert_called_once_with( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID - ) diff --git a/tests/clients/test_userriskprofile.py b/tests/clients/test_userriskprofile.py deleted file mode 100644 index 567eeef9b..000000000 --- a/tests/clients/test_userriskprofile.py +++ /dev/null @@ -1,164 +0,0 @@ -import pytest -from tests.conftest import create_mock_response - -from py42.clients.userriskprofile import UserRiskProfileClient -from py42.exceptions import Py42NotFoundError -from py42.response import Py42Response -from py42.services.userriskprofile import UserRiskProfileService -from py42.services.users import UserService - -USER_ID = "123" -USERNAME = "risk-user@code42.com" - - -@pytest.fixture -def mock_user_risk_profile_service(mocker): - return mocker.MagicMock(spec=UserRiskProfileService) - - -@pytest.fixture -def mock_user_service(mocker): - return mocker.MagicMock(spec=UserService) - - -class TestUserRiskProfileClient: - def test_get_by_id_calls_service_with_expected_params( - self, mock_user_risk_profile_service, mock_user_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.get_by_id(USER_ID) - mock_user_risk_profile_service.get_by_id.assert_called_once_with(USER_ID) - - def test_get_by_username_calls_user_service_with_expected_params( - self, mocker, mock_user_risk_profile_service, mock_user_service - ): - mock_user_service.get_by_username.return_value = create_mock_response( - mocker, text='{"data": {"users": [{"userUid": "1234"}]}}' - ) - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.get_by_username(USERNAME) - mock_user_service.get_by_username.assert_called_once_with(USERNAME) - mock_user_risk_profile_service.get_by_id.assert_called_once_with("1234") - - def test_get_by_username_raises_Py42NotFoundError_with_response_when_username_not_found( - self, mocker, mock_user_risk_profile_service, mock_user_service - ): - mock_user_service.get_by_username.return_value = create_mock_response( - mocker, text='{"data": {"users": []}}' - ) - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - with pytest.raises(Py42NotFoundError) as err: - user_risk_profile_client.get_by_username(USERNAME) - - assert isinstance(err.value.response, Py42Response) - - def test_update_calls_service_with_expected_params( - self, mock_user_risk_profile_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.update(USER_ID) - mock_user_risk_profile_service.update.assert_called_once_with( - USER_ID, None, None, None - ) - - def test_update_calls_service_with_optional_params( - self, mock_user_risk_profile_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.update(USER_ID, "2022-1-1", "2022-4-1", "notes") - mock_user_risk_profile_service.update.assert_called_once_with( - USER_ID, "2022-1-1", "2022-4-1", "notes" - ) - - def test_get_page_calls_service_with_expected_params( - self, mock_user_risk_profile_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.get_page() - assert mock_user_risk_profile_service.get_page.call_count == 1 - - def test_get_page_calls_service_with_optional_params( - self, mock_user_risk_profile_service, mock_user_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.get_page(page_num=1, page_size=10) - mock_user_risk_profile_service.get_page.assert_called_once_with( - 1, 10, None, None, None, None, None, None, None, None, None, None, None - ) - - def test_get_all_calls_service_with_expected_params( - self, mock_user_risk_profile_service, mock_user_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.get_all() - assert mock_user_risk_profile_service.get_all.call_count == 1 - - def test_get_all_calls_service_with_optional_params( - self, mock_user_risk_profile_service, mock_user_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.get_all( - manager_id="manager-id", - title="engineer", - division="division", - department="prod", - employment_type="full-time", - country="usa", - region="midwest", - locality="local", - active=True, - deleted=False, - support_user=False, - ) - mock_user_risk_profile_service.get_all.assert_called_once_with( - "manager-id", - "engineer", - "division", - "prod", - "full-time", - "usa", - "midwest", - "local", - True, - False, - False, - ) - - def test_add_cloud_aliases_calls_service_with_expected_params( - self, mock_user_risk_profile_service, mock_user_service - ): - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.add_cloud_aliases(USER_ID, "cloud-alias@email.com") - mock_user_risk_profile_service.add_cloud_aliases.assert_called_once_with( - USER_ID, "cloud-alias@email.com" - ) - - def test_delete_cloud_aliases_calls_service_with_expected_params( - self, mock_user_risk_profile_service, mock_user_service - ): - aliases = ["cloud-alias@email.com", "default@code42.com"] - user_risk_profile_client = UserRiskProfileClient( - mock_user_risk_profile_service, mock_user_service - ) - user_risk_profile_client.delete_cloud_aliases(USER_ID, aliases) - mock_user_risk_profile_service.delete_cloud_aliases(USER_ID, aliases) diff --git a/tests/clients/test_watchlists.py b/tests/clients/test_watchlists.py deleted file mode 100644 index a4b87b7c5..000000000 --- a/tests/clients/test_watchlists.py +++ /dev/null @@ -1,124 +0,0 @@ -import pytest - -from py42.clients.watchlists import WatchlistsClient -from py42.services.watchlists import WatchlistsService - -WATCHLIST_ID = "123" -WATCHLIST_TYPE = "DEPARTING_EMPLOYEE" - - -@pytest.fixture -def mock_watchlists_service(mocker): - return mocker.MagicMock(spec=WatchlistsService) - - -class TestWatchlistsClient: - def test_get_calls_service_with_expected_params( - self, - mock_watchlists_service, - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.get(WATCHLIST_ID) - mock_watchlists_service.get.assert_called_once_with(WATCHLIST_ID) - - def test_delete_calls_service_with_expected_params( - self, - mock_watchlists_service, - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.delete(WATCHLIST_ID) - mock_watchlists_service.delete.assert_called_once_with(WATCHLIST_ID) - - def test_get_all_calls_service_with_expected_params( - self, - mock_watchlists_service, - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.get_all() - assert mock_watchlists_service.get_all.call_count == 1 - - def test_create_calls_service_with_expected_params( - self, - mock_watchlists_service, - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.create(WATCHLIST_TYPE) - mock_watchlists_service.create.assert_called_once_with( - WATCHLIST_TYPE, title=None, description=None - ) - - def test_create_custom_without_title_raises_exception( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - with pytest.raises(ValueError): - watchlists_client.create("CUSTOM") - - def test_get_all_included_users_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.get_all_included_users(WATCHLIST_ID) - mock_watchlists_service.get_all_included_users.assert_called_once_with( - WATCHLIST_ID - ) - - def test_add_included_users_by_watchlist_id_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - user_ids = ["1a", "2b", "3c"] - watchlists_client.add_included_users_by_watchlist_id(user_ids, WATCHLIST_ID) - mock_watchlists_service.add_included_users_by_watchlist_id.assert_called_once_with( - user_ids, WATCHLIST_ID - ) - - def test_add_included_users_by_watchlist_tyoe_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - user_ids = ["1a", "2b", "3c"] - watchlists_client.add_included_users_by_watchlist_type(user_ids, WATCHLIST_TYPE) - mock_watchlists_service.add_included_users_by_watchlist_type.assert_called_once_with( - user_ids, WATCHLIST_TYPE - ) - - def test_remove_included_users_by_watchlist_id_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - user_ids = ["1a", "2b", "3c"] - watchlists_client.remove_included_users_by_watchlist_id(user_ids, WATCHLIST_ID) - mock_watchlists_service.delete_included_users_by_watchlist_id.assert_called_once_with( - user_ids, WATCHLIST_ID - ) - - def test_remove_included_users_by_watchlist_tyoe_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - user_ids = ["1a", "2b", "3c"] - watchlists_client.remove_included_users_by_watchlist_type( - user_ids, WATCHLIST_TYPE - ) - mock_watchlists_service.delete_included_users_by_watchlist_type.assert_called_once_with( - user_ids, WATCHLIST_TYPE - ) - - def test_get_watchlist_member_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.get_watchlist_member(WATCHLIST_ID, "123") - mock_watchlists_service.get_watchlist_member.assert_called_once_with( - WATCHLIST_ID, "123" - ) - - def test_get_all_watchlist_members_calls_service_with_expected_params( - self, mock_watchlists_service - ): - watchlists_client = WatchlistsClient(mock_watchlists_service) - watchlists_client.get_all_watchlist_members(WATCHLIST_ID) - mock_watchlists_service.get_all_watchlist_members.assert_called_once_with( - WATCHLIST_ID - ) diff --git a/tests/conftest.py b/tests/conftest.py index 090bfdb6a..82606ffec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,14 +3,13 @@ from requests import Response from requests import Session -from py42.clients._archiveaccess import FileSelection -from py42.clients._archiveaccess import FileType -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42UnauthorizedError -from py42.response import Py42Response -from py42.sdk.queries.query_filter import QueryFilter -from py42.services._connection import Connection -from py42.usercontext import UserContext +from pycpg.clients._archiveaccess import FileSelection +from pycpg.clients._archiveaccess import FileType +from pycpg.exceptions import PycpgNotFoundError +from pycpg.exceptions import PycpgUnauthorizedError +from pycpg.response import PycpgResponse +from pycpg.services._connection import Connection +from pycpg.usercontext import UserContext TENANT_ID_FROM_RESPONSE = "00000000-0000-0000-0000-000000000000" @@ -38,11 +37,6 @@ def user_context(mocker): TRACEBACK = "Traceback..." -EVENT_FILTER_FIELD_NAME = "filter_field_name" -OPERATOR_STRING = "IS_IN" -VALUE_STRING = "value_example" -VALUE_UNICODE = "您已经发现了秘密信息" - TEST_ACCEPTING_GUID = "accepting-device-guid" TEST_ADDED_PATH = "E:/" TEST_ADDED_EXCLUDED_PATH = "C:/Users/TestUser/Downloads/" @@ -108,7 +102,7 @@ def unauthorized_response(mocker, http_error): response.status_code = 401 response.encoding = None error.response = response - response.raise_for_status.side_effect = [Py42UnauthorizedError(error)] + response.raise_for_status.side_effect = [PycpgUnauthorizedError(error)] return response @@ -157,28 +151,6 @@ def exception(): return Exception() -@pytest.fixture -def query_filter_list(): - return [ - QueryFilter( - EVENT_FILTER_FIELD_NAME + str(suffix), - OPERATOR_STRING + str(suffix), - VALUE_STRING + str(suffix), - ) - for suffix in range(3) - ] - - -@pytest.fixture -def query_filter(): - return QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - - -@pytest.fixture -def unicode_query_filter(): - return QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_UNICODE) - - @pytest.fixture def mock_connection(mocker): connection = mocker.MagicMock(spec=Connection) @@ -199,7 +171,7 @@ def create_mock_response(mocker, text, status_code=200): response.text = text response.status_code = status_code response.encoding = None - return Py42Response(response) + return PycpgResponse(response) def create_mock_error(err_class, mocker, text): @@ -214,7 +186,7 @@ def mock_post_not_found_session(mocker, mock_connection): response.status_code = 404 exception = mocker.MagicMock(spec=HTTPError) exception.response = response - mock_connection.post.side_effect = Py42NotFoundError(exception) + mock_connection.post.side_effect = PycpgNotFoundError(exception) return mock_connection diff --git a/tests/constants/test_init.py b/tests/constants/test_init.py index dbdf8f1be..cf311704e 100644 --- a/tests/constants/test_init.py +++ b/tests/constants/test_init.py @@ -1,4 +1,4 @@ -from py42.constants import SortDirection +from pycpg.constants import SortDirection class TestSortDirection: diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 07e77e508..fe82a3f57 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -3,16 +3,12 @@ import pytest -import py42.sdk as _sdk -from py42.util import convert_datetime_to_epoch +import pycpg.sdk as _sdk +from pycpg.util import convert_datetime_to_epoch def pytest_addoption(parser): - parser.addini("alert_id", "Alert id that exists in the system.") parser.addini("device_id", "Device id that exists in the system.") - parser.addini("observer_rule_id", "Observer rule id.") - parser.addini("md5_hash", "MD5 hash of a file that exists on the device.") - parser.addini("sha256_hash", "SHA256 hash of a file that exists on the device.") parser.addini( "user_uid", "The UID of the user to get plan storage information for." ) @@ -20,25 +16,13 @@ def pytest_addoption(parser): parser.addini("destination_device_guid", "Device guid to which archival was done.") parser.addini("archive_guid", "Guid of the archival.") parser.addini("path", "Complete path of the file with filename which was archived.") - parser.addini("file_data", "Content of the file during archival.") - parser.addini("case_event_id", "Event id to associate to a case.") - - -@pytest.fixture(scope="session") -def alert_id(request): - return request.config.getini("alert_id") - - -@pytest.fixture(scope="session") -def observer_id(request): - return request.config.getini("observer_rule_id") @pytest.fixture(scope="session") def connection(): - host = os.environ.get("C42_HOST") or "http://127.0.0.1:4200" - user = os.environ.get("C42_USER") or "test.user@example.com" - pw = os.environ.get("C42_PW") or "password" + host = os.environ.get("CPG_HOST") or "http://127.0.0.1:4200" + user = os.environ.get("CPG_USER") or "test.user@example.com" + pw = os.environ.get("CPG_PW") or "password" return _get_sdk(host, user, pw) @@ -54,9 +38,9 @@ def _get_sdk(host, user, pw): @pytest.fixture(scope="session") def api_client_connection(): - host = os.environ.get("C42_HOST") or "http://127.0.0.1:4200" - client = os.environ.get("C42_API_CLIENT_ID") or "client_id" - secret = os.environ.get("C42_API_CLIENT_SECRET") or "secret" + host = os.environ.get("CPG_HOST") or "http://127.0.0.1:4200" + client = os.environ.get("CPG_API_CLIENT_ID") or "client_id" + secret = os.environ.get("CPG_API_CLIENT_SECRET") or "secret" return _get_api_client_sdk(host, client, secret) @@ -75,11 +59,6 @@ def timestamp(): return convert_datetime_to_epoch(datetime.utcnow()) -@pytest.fixture(scope="session") -def event_id(request): - return request.config.getini("case_event_id") - - @pytest.fixture(scope="session") def org(connection, timestamp): orgs_gen = connection.orgs.get_all() diff --git a/tests/integration/test_alertrules.py b/tests/integration/test_alertrules.py deleted file mode 100644 index f1976488d..000000000 --- a/tests/integration/test_alertrules.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest -from tests.integration.conftest import assert_successful_response - - -@pytest.fixture(scope="module") -def rule_id(connection, observer_id): - response = connection.alerts.rules.get_by_observer_id(observer_id) - return response["ruleMetadata"][0]["id"] - - -@pytest.mark.integration -class TestAlertRules: - def test_rules_get_all(self, connection): - response_gen = connection.alerts.rules.get_all() - for response in response_gen: - assert_successful_response(response) - break - - def test_rules_get_by_observer_id(self, connection, observer_id): - response = connection.alerts.rules.get_by_observer_id(observer_id) - assert_successful_response(response) - - def test_rules_get_all_by_name(self, connection): - response_gen = connection.alerts.rules.get_all_by_name("Test Alerts using CLI") - for response in response_gen: - assert_successful_response(response) - break - - def test_rules_get_page(self, connection): - response = connection.alerts.rules.get_page() - assert_successful_response(response) - - def test_rules_remove_user(self, connection, new_user, observer_id): - response = connection.alerts.rules.remove_user(observer_id, new_user["userUid"]) - assert_successful_response(response) - - def test_rules_remove_all_users(self, connection, observer_id): - response = connection.alerts.rules.remove_all_users(observer_id) - assert_successful_response(response) - - def test_rules_exfiltration_get(self, connection, rule_id): - response = connection.alerts.rules.exfiltration.get(rule_id) - assert_successful_response(response) - - def test_rules_cloudshare_get(self, connection, rule_id): - response = connection.alerts.rules.cloudshare.get(rule_id) - assert_successful_response(response) - - def test_file_type_mismatch_get(self, connection, rule_id): - response = connection.alerts.rules.filetypemismatch.get(rule_id) - assert_successful_response(response) diff --git a/tests/integration/test_alerts.py b/tests/integration/test_alerts.py deleted file mode 100644 index 0493f00f7..000000000 --- a/tests/integration/test_alerts.py +++ /dev/null @@ -1,35 +0,0 @@ -import pytest -from tests.integration.conftest import assert_successful_response - -from py42.sdk.queries.alerts.alert_query import AlertQuery -from py42.sdk.queries.alerts.filters import AlertState -from py42.sdk.queries.alerts.filters import Severity - - -@pytest.mark.integration -def test_search(connection): - filters = [ - AlertState.eq(AlertState.OPEN), - Severity.is_in([Severity.HIGH, Severity.MEDIUM]), - ] - alert_query = AlertQuery(*filters) - response = connection.alerts.search(alert_query) - assert_successful_response(response) - - -@pytest.mark.integration -def test_get_details(connection, alert_id): - response = connection.alerts.get_details(alert_id) - assert_successful_response(response) - - -@pytest.mark.integration -def test_resolve(connection, alert_id): - response = connection.alerts.resolve(alert_id) - assert_successful_response(response) - - -@pytest.mark.integration -def test_reopen(connection, alert_id): - response = connection.alerts.reopen(alert_id) - assert_successful_response(response) diff --git a/tests/integration/test_cases.py b/tests/integration/test_cases.py deleted file mode 100644 index e495de88a..000000000 --- a/tests/integration/test_cases.py +++ /dev/null @@ -1,48 +0,0 @@ -import pytest -from tests.integration.conftest import assert_successful_response - - -@pytest.mark.integration -class TestCases: - @pytest.fixture(scope="module") - def case(self, connection, timestamp): - return connection.cases.create(f"integration_test_{timestamp}") - - def test_get_all_cases( - self, - connection, - ): - page_gen = connection.cases.get_all() - for response in page_gen: - assert_successful_response(response) - break - - def test_get_case_by_case_number(self, connection, case): - response = connection.cases.get(case["number"]) - assert_successful_response(response) - - def test_update_case(self, connection, case): - response = connection.cases.update( - case["number"], findings="integration test case" - ) - assert_successful_response(response) - - def test_export_summary(self, connection, case): - response = connection.cases.export_summary(case["number"]) - assert_successful_response(response) - - def test_add_file_event(self, connection, case, event_id): - response = connection.cases.file_events.add(case["number"], event_id) - assert_successful_response(response) - - def test_get_file_event(self, connection, case, event_id): - response = connection.cases.file_events.get(case["number"], event_id) - assert_successful_response(response) - - def test_delete_file_event(self, connection, case, event_id): - response = connection.cases.file_events.delete(case["number"], event_id) - assert_successful_response(response) - - def test_get_all_file_events(self, connection, case): - response = connection.cases.file_events.get_all(case["number"]) - assert_successful_response(response) diff --git a/tests/integration/test_securitydata.py b/tests/integration/test_securitydata.py deleted file mode 100644 index cad4cc54c..000000000 --- a/tests/integration/test_securitydata.py +++ /dev/null @@ -1,50 +0,0 @@ -from datetime import datetime -from datetime import timedelta - -import pytest -from tests.integration.conftest import assert_successful_response - -from py42.sdk.queries.fileevents.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.filters import EventTimestamp -from py42.util import convert_datetime_to_epoch - - -@pytest.fixture(scope="module") -def md5_hash(request): - return request.config.getini("md5_hash") - - -@pytest.fixture(scope="module") -def sha256_hash(request): - return request.config.getini("sha256_hash") - - -@pytest.fixture(scope="module") -def user_uid(request): - return request.config.getini("user_uid") - - -@pytest.fixture -def file_data(request): - return request.config.getini("file_data") - - -@pytest.mark.integration -class TestSecurityData: - def test_search_file_events(self, connection): - start_date = datetime.utcnow() - timedelta(1) - end_date = datetime.utcnow() - start_timestamp = convert_datetime_to_epoch(start_date) - end_timestamp = convert_datetime_to_epoch(end_date) - date_query = EventTimestamp.in_range(start_timestamp, end_timestamp) - query = FileEventQuery.all(date_query) - response = connection.securitydata.search_file_events(query) - assert_successful_response(response) - - def test_stream_file_by_md5(self, connection, md5_hash, file_data): - response = connection.securitydata.stream_file_by_md5(md5_hash) - assert str(response) == file_data - - def test_stream_file_by_sha256(self, connection, sha256_hash, file_data): - response = connection.securitydata.stream_file_by_sha256(sha256_hash) - assert str(response) == file_data diff --git a/tests/integration/test_trustedactivites.py b/tests/integration/test_trustedactivites.py deleted file mode 100644 index 65cb52a3d..000000000 --- a/tests/integration/test_trustedactivites.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest -from tests.integration.conftest import assert_successful_response - - -@pytest.mark.integration -class TestTrustedActivities: - @pytest.fixture(scope="module") - def trusted_activity(self, connection): - return connection.trustedactivities.create("DOMAIN", "test.com") - - def test_get_all_trusted_activities( - self, - connection, - ): - page_gen = connection.trustedactivities.get_all() - for response in page_gen: - assert_successful_response(response) - break - - def test_get_all_trusted_activities_with_optional_params( - self, - connection, - ): - page_gen = connection.trustedactivities.get_all(type="DOMAIN", page_size=1) - for response in page_gen: - assert_successful_response(response) - break - - def test_get_trusted_activity(self, connection, trusted_activity): - response = connection.trustedactivities.get(trusted_activity["resourceId"]) - assert_successful_response(response) - - def test_update_trusted_activity(self, connection, trusted_activity): - response = connection.trustedactivities.update( - trusted_activity["resourceId"], description="integration test case" - ) - assert_successful_response(response) - - def test_delete_trusted_activity(self, connection, trusted_activity): - response = connection.trustedactivities.delete(trusted_activity["resourceId"]) - assert_successful_response(response) diff --git a/tests/sdk/queries/__init__.py b/tests/sdk/queries/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/sdk/queries/alerts/__init__.py b/tests/sdk/queries/alerts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/sdk/queries/alerts/filters/__init__.py b/tests/sdk/queries/alerts/filters/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/sdk/queries/alerts/filters/test_alert_filter.py b/tests/sdk/queries/alerts/filters/test_alert_filter.py deleted file mode 100644 index 3edefaaef..000000000 --- a/tests/sdk/queries/alerts/filters/test_alert_filter.py +++ /dev/null @@ -1,371 +0,0 @@ -from datetime import datetime -from time import time - -from tests.sdk.queries.conftest import CONTAINS -from tests.sdk.queries.conftest import IN_RANGE -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_CONTAINS -from tests.sdk.queries.conftest import NOT_IN -from tests.sdk.queries.conftest import ON_OR_AFTER -from tests.sdk.queries.conftest import ON_OR_BEFORE - -from py42.sdk.queries.alerts.filters import Actor -from py42.sdk.queries.alerts.filters import AlertState -from py42.sdk.queries.alerts.filters import DateObserved -from py42.sdk.queries.alerts.filters import Description -from py42.sdk.queries.alerts.filters import RuleId -from py42.sdk.queries.alerts.filters import RuleName -from py42.sdk.queries.alerts.filters import RuleSource -from py42.sdk.queries.alerts.filters import RuleType -from py42.sdk.queries.alerts.filters import Severity -from py42.sdk.queries.alerts.util import create_contains_filter_group -from py42.sdk.queries.alerts.util import create_not_contains_filter_group -from py42.util import MICROSECOND_FORMAT - - -def format_timestamp_with_microseconds(test_time): - test_date = datetime.utcfromtimestamp(test_time) - return format_datetime_with_microseconds(test_date) - - -def format_datetime_with_microseconds(test_date): - prefix = test_date.strftime(MICROSECOND_FORMAT) - timestamp_str = prefix - return timestamp_str - - -def test_create_contains_filter_group_returns_filter_group_with_correct_json_representation(): - term = "test_eq_term" - value_list = "string_to_contain" - _group = create_contains_filter_group(term, value_list) - assert ( - str(_group) == '{"filterClause":"AND", "filters":[{"operator":"CONTAINS", ' - '"term":"test_eq_term", "value":"string_to_contain"}]}' - ) - - -def test_create_not_contains_filter_group_returns_filter_group_with_correct_json_representation(): - term = "test_eq_term" - value_list = "string_to_not_contain" - _group = create_not_contains_filter_group(term, value_list) - assert ( - str(_group) - == '{"filterClause":"AND", "filters":[{"operator":"DOES_NOT_CONTAIN", ' - '"term":"test_eq_term", "value":"string_to_not_contain"}]}' - ) - - -def test_date_observed_on_or_after_str_gives_correct_json_representation(): - test_time = time() - formatted = format_timestamp_with_microseconds(test_time) - _filter = DateObserved.on_or_after(test_time) - expected = ON_OR_AFTER.format("createdAt", formatted) - assert str(_filter) == expected - - -def test_date_observed_on_or_before_str_gives_correct_json_representation(): - test_time = time() - formatted = format_timestamp_with_microseconds(test_time) - _filter = DateObserved.on_or_before(test_time) - expected = ON_OR_BEFORE.format("createdAt", formatted) - assert str(_filter) == expected - - -def test_date_observed_does_not_have_within_the_last_option(): - assert not hasattr(DateObserved(), "within_the_last") - - -def test_date_observed_in_range_str_gives_correct_json_representation(): - test_before_time = time() - test_after_time = time() + 30 # make sure timestamps are actually different - formatted_before = format_timestamp_with_microseconds(test_before_time) - formatted_after = format_timestamp_with_microseconds(test_after_time) - _filter = DateObserved.in_range(test_before_time, test_after_time) - expected = IN_RANGE.format("createdAt", formatted_before, formatted_after) - assert str(_filter) == expected - - -def test_date_observed_on_same_day_str_gives_correct_json_representation(): - test_time = time() - test_date = datetime.utcfromtimestamp(test_time) - start_time = datetime(test_date.year, test_date.month, test_date.day, 0, 0, 0) - end_time = datetime(test_date.year, test_date.month, test_date.day, 23, 59, 59) - formatted_before = format_datetime_with_microseconds(start_time) - formatted_after = format_datetime_with_microseconds(end_time) - _filter = DateObserved.on_same_day(test_time) - expected = IN_RANGE.format("createdAt", formatted_before, formatted_after) - assert str(_filter) == expected - - -def test_actor_eq_str_gives_correct_json_representation(): - _filter = Actor.eq("test.testerson") - expected = IS.format("actor", "test.testerson") - assert str(_filter) == expected - - -def test_actor_not_eq_str_gives_correct_json_representation(): - _filter = Actor.not_eq("test.testerson") - expected = IS_NOT.format("actor", "test.testerson") - assert str(_filter) == expected - - -def test_actor_is_in_str_gives_correct_json_representation(): - items = ["test.testerson", "flag.flagerson", "mock.mockerson"] - _filter = Actor.is_in(items) - expected = IS_IN.format("actor", *sorted(items)) - assert str(_filter) == expected - - -def test_actor_not_in_str_gives_correct_json_representation(): - items = ["test.testerson", "flag.flagerson", "mock.mockerson"] - _filter = Actor.not_in(items) - expected = NOT_IN.format("actor", *sorted(items)) - assert str(_filter) == expected - - -def test_actor_contains_str_gives_correct_json_representation(): - _filter = Actor.contains("test") - expected = CONTAINS.format("actor", "test") - assert str(_filter) == expected - - -def test_actor_not_contains_str_gives_correct_json_representation(): - _filter = Actor.not_contains("test") - expected = NOT_CONTAINS.format("actor", "test") - assert str(_filter) == expected - - -def test_severity_eq_str_gives_correct_json_representation(): - _filter = Severity.eq("HIGH") - expected = IS.format("riskSeverity", "HIGH") - assert str(_filter) == expected - - -def test_severity_not_eq_str_gives_correct_json_representation(): - _filter = Severity.not_eq("HIGH") - expected = IS_NOT.format("riskSeverity", "HIGH") - assert str(_filter) == expected - - -def test_severity_is_in_str_gives_correct_json_representation(): - items = ["HIGH", "MODERATE", "LOW"] - _filter = Severity.is_in(items) - expected = IS_IN.format("riskSeverity", *sorted(items)) - assert str(_filter) == expected - - -def test_severity_not_in_str_gives_correct_json_representation(): - items = ["HIGH", "MODERATE", "LOW"] - _filter = Severity.not_in(items) - expected = NOT_IN.format("riskSeverity", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_name_eq_str_gives_correct_json_representation(): - _filter = RuleName.eq("Departing Employee") - expected = IS.format("name", "Departing Employee") - assert str(_filter) == expected - - -def test_rule_name_not_eq_str_gives_correct_json_representation(): - _filter = RuleName.not_eq("Departing Employee") - expected = IS_NOT.format("name", "Departing Employee") - assert str(_filter) == expected - - -def test_rule_name_is_in_str_gives_correct_json_representation(): - items = ["rule 1", "rule 2", "rule 3"] - _filter = RuleName.is_in(items) - expected = IS_IN.format("name", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_name_not_in_str_gives_correct_json_representation(): - items = ["rule 1", "rule 2", "rule 3"] - _filter = RuleName.not_in(items) - expected = NOT_IN.format("name", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_name_contains_str_gives_correct_json_representation(): - _filter = RuleName.contains("test") - expected = CONTAINS.format("name", "test") - assert str(_filter) == expected - - -def test_rule_name_not_contains_str_gives_correct_json_representation(): - _filter = RuleName.not_contains("test") - expected = NOT_CONTAINS.format("name", "test") - assert str(_filter) == expected - - -def test_rule_id_eq_str_gives_correct_json_representation(): - _filter = RuleId.eq("rule123") - expected = IS.format("ruleId", "rule123") - assert str(_filter) == expected - - -def test_rule_id_not_eq_str_gives_correct_json_representation(): - _filter = RuleId.not_eq("rule123") - expected = IS_NOT.format("ruleId", "rule123") - assert str(_filter) == expected - - -def test_rule_id_is_in_str_gives_correct_json_representation(): - items = ["rule1", "rule2", "rule3"] - _filter = RuleId.is_in(items) - expected = IS_IN.format("ruleId", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_id_not_in_str_gives_correct_json_representation(): - items = ["rule 1", "rule 2", "rule 3"] - _filter = RuleId.not_in(items) - expected = NOT_IN.format("ruleId", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_type_eq_str_gives_correct_json_representation(): - _filter = RuleType.eq("rule123") - expected = IS.format("type", "rule123") - assert str(_filter) == expected - - -def test_rule_type_not_eq_str_gives_correct_json_representation(): - _filter = RuleType.not_eq("rule123") - expected = IS_NOT.format("type", "rule123") - assert str(_filter) == expected - - -def test_rule_type_is_in_str_gives_correct_json_representation(): - items = ["rule1", "rule2", "rule3"] - _filter = RuleType.is_in(items) - expected = IS_IN.format("type", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_type_not_in_str_gives_correct_json_representation(): - items = ["rule 1", "rule 2", "rule 3"] - _filter = RuleType.not_in(items) - expected = NOT_IN.format("type", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_source_eq_str_gives_correct_json_representation(): - _filter = RuleSource.eq("rule123") - expected = IS.format("ruleSource", "rule123") - assert str(_filter) == expected - - -def test_rule_source_not_eq_str_gives_correct_json_representation(): - _filter = RuleSource.not_eq("rule123") - expected = IS_NOT.format("ruleSource", "rule123") - assert str(_filter) == expected - - -def test_rule_source_is_in_str_gives_correct_json_representation(): - items = ["rule1", "rule2", "rule3"] - _filter = RuleSource.is_in(items) - expected = IS_IN.format("ruleSource", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_source_not_in_str_gives_correct_json_representation(): - items = ["rule 1", "rule 2", "rule 3"] - _filter = RuleSource.not_in(items) - expected = NOT_IN.format("ruleSource", *sorted(items)) - assert str(_filter) == expected - - -def test_description_eq_str_gives_correct_json_representation(): - _filter = Description.eq("Departing Employee") - expected = IS.format("description", "Departing Employee") - assert str(_filter) == expected - - -def test_description_not_eq_str_gives_correct_json_representation(): - _filter = Description.not_eq("Departing Employee") - expected = IS_NOT.format("description", "Departing Employee") - assert str(_filter) == expected - - -def test_description_is_in_str_gives_correct_json_representation(): - items = ["desc1", "desc2", "desc3"] - _filter = Description.is_in(items) - expected = IS_IN.format("description", *sorted(items)) - assert str(_filter) == expected - - -def test_description_not_in_str_gives_correct_json_representation(): - items = ["desc1", "desc2", "desc3"] - _filter = Description.not_in(items) - expected = NOT_IN.format("description", *sorted(items)) - assert str(_filter) == expected - - -def test_description_contains_str_gives_correct_json_representation(): - _filter = Description.contains("test") - expected = CONTAINS.format("description", "test") - assert str(_filter) == expected - - -def test_description_not_contains_str_gives_correct_json_representation(): - _filter = Description.not_contains("test") - expected = NOT_CONTAINS.format("description", "test") - assert str(_filter) == expected - - -def test_alert_state_eq_str_gives_correct_json_representation(): - _filter = AlertState.eq("OPEN") - expected = IS.format("state", "OPEN") - assert str(_filter) == expected - - -def test_alert_state_not_eq_str_gives_correct_json_representation(): - _filter = AlertState.not_eq("OPEN") - expected = IS_NOT.format("state", "OPEN") - assert str(_filter) == expected - - -def test_alert_state_is_in_str_gives_correct_json_representation(): - items = ["OPEN", "DISMISSED", "OTHER"] - _filter = AlertState.is_in(items) - expected = IS_IN.format("state", *sorted(items)) - assert str(_filter) == expected - - -def test_alert_state_not_in_str_gives_correct_json_representation(): - items = ["OPEN", "DISMISSED", "other"] - _filter = AlertState.not_in(items) - expected = NOT_IN.format("state", *sorted(items)) - assert str(_filter) == expected - - -def test_rule_source_choices_returns_set(): - choices = RuleSource.choices() - valid_set = {"Alerting", "Departing Employee", "High Risk Employee"} - assert set(choices) == valid_set - - -def test_rule_type_choices_returns_set(): - choices = RuleType.choices() - valid_set = { - "FedEndpointExfiltration", - "FedCloudSharePermissions", - "FedFileTypeMismatch", - } - assert set(choices) == valid_set - - -def test_severity_choices_returns_set(): - choices = Severity.choices() - valid_set = {"CRITICAL", "HIGH", "MODERATE", "LOW"} - assert set(choices) == valid_set - - -def test_alert_state_choices_returns_set(): - choices = AlertState.choices() - valid_set = {"OPEN", "RESOLVED", "PENDING", "IN_PROGRESS"} - assert set(choices) == valid_set diff --git a/tests/sdk/queries/alerts/test_alert_query.py b/tests/sdk/queries/alerts/test_alert_query.py deleted file mode 100644 index 33acc80dd..000000000 --- a/tests/sdk/queries/alerts/test_alert_query.py +++ /dev/null @@ -1,149 +0,0 @@ -from py42.sdk.queries.alerts.alert_query import AlertQuery - - -_TENANT_ID = "null" -JSON_QUERY_BASE = '{{"tenantId": {0}, "groupClause":"{1}", "groups":[{2}], "pgNum":{3}, "pgSize":{4}, "srtDirection":"{5}", "srtKey":"{6}"}}' - - -def build_query_json(group_clause, group_list): - return JSON_QUERY_BASE.format( - _TENANT_ID, group_clause, group_list, 0, 500, "desc", "CreatedAt" - ) - - -def test_alert_query_repr_does_not_throw_type_error(): - # On python 2, `repr` doesn't throw. - # On python 3, if `repr` doesn't return type `str`, then an exception is thrown. - try: - _ = repr(AlertQuery()) - except TypeError: - raise AssertionError() - - -def test_alert_query_constructs_successfully(event_filter_group): - assert AlertQuery(event_filter_group) - - -def test_alert_query_str_with_single_filter_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group) - json_query_str = build_query_json("AND", event_filter_group) - assert str(alert_query) == json_query_str - - -def test_alert_query_unicode_with_single_filter_gives_correct_json_representation( - unicode_event_filter_group, -): - alert_query = AlertQuery(unicode_event_filter_group) - json_query_str = build_query_json("AND", unicode_event_filter_group) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_single_filter_and_specified_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group, group_clause="AND") - json_query_str = build_query_json("AND", event_filter_group) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_single_filter_or_specified_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group, group_clause="OR") - json_query_str = build_query_json("OR", event_filter_group) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_many_filters_gives_correct_json_representation( - event_filter_group_list, -): - alert_query = AlertQuery(event_filter_group_list) - json_query_str = build_query_json("AND", event_filter_group_list) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_many_filters_and_specified_gives_correct_json_representation( - event_filter_group_list, -): - alert_query = AlertQuery(event_filter_group_list, group_clause="AND") - json_query_str = build_query_json("AND", event_filter_group_list) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_many_filters_or_specified_gives_correct_json_representation( - event_filter_group_list, -): - alert_query = AlertQuery(event_filter_group_list, group_clause="OR") - json_query_str = build_query_json("OR", event_filter_group_list) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_page_num_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group) - alert_query.page_number = 5 - json_query_str = JSON_QUERY_BASE.format( - _TENANT_ID, "AND", event_filter_group, 5, 500, "desc", "CreatedAt" - ) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_page_size_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group) - alert_query.page_size = 250 - json_query_str = JSON_QUERY_BASE.format( - _TENANT_ID, "AND", event_filter_group, 0, 250, "desc", "CreatedAt" - ) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_sort_direction_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group) - alert_query.sort_direction = "asc" - json_query_str = JSON_QUERY_BASE.format( - _TENANT_ID, "AND", event_filter_group, 0, 500, "asc", "CreatedAt" - ) - assert str(alert_query) == json_query_str - - -def test_alert_query_str_with_sort_key_gives_correct_json_representation( - event_filter_group, -): - alert_query = AlertQuery(event_filter_group) - alert_query.sort_key = "some_field_to_sort_by" - json_query_str = JSON_QUERY_BASE.format( - _TENANT_ID, "AND", event_filter_group, 0, 500, "desc", "some_field_to_sort_by" - ) - assert str(alert_query) == json_query_str - - -def test_alert_query_from_dict_gives_correct_json_representation(): - group = { - "filterClause": "AND", - "filters": [{"operator": "IS", "term": "testterm", "value": "testval"}], - } - group_str = '{"filterClause":"AND", "filters":[{"operator":"IS", "term":"testterm", "value":"testval"}]}' - alert_query_dict = {"groupClause": "AND", "groups": [group]} - alert_query = AlertQuery.from_dict(alert_query_dict) - json_query_str = JSON_QUERY_BASE.format( - _TENANT_ID, "AND", group_str, 0, 500, "desc", "CreatedAt" - ) - assert str(alert_query) == json_query_str - - -def test_alert_query_dict_gives_expected_dict_representation(event_filter_group): - alert_query = AlertQuery(event_filter_group) - alert_query_query_dict = dict(alert_query) - assert alert_query_query_dict["groupClause"] == "AND" - assert alert_query_query_dict["pgNum"] == 0 - assert alert_query_query_dict["pgSize"] == 500 - assert alert_query_query_dict["srtDirection"] == "desc" - assert alert_query_query_dict["srtKey"] == "CreatedAt" - assert type(alert_query_query_dict["groups"]) == list diff --git a/tests/sdk/queries/conftest.py b/tests/sdk/queries/conftest.py deleted file mode 100644 index 253d786f0..000000000 --- a/tests/sdk/queries/conftest.py +++ /dev/null @@ -1,66 +0,0 @@ -from datetime import datetime - -import pytest - -from py42.sdk.queries.query_filter import FilterGroup -from py42.sdk.queries.query_filter import QueryFilter -from py42.util import MICROSECOND_FORMAT - -EVENT_FILTER_FIELD_NAME = "filter_field_name" -OPERATOR_STRING = "IS_IN" -VALUE_STRING = "value_example" -VALUE_UNICODE = "您已经发现了秘密信息" - -EXISTS = '{{"filterClause":"AND", "filters":[{{"operator":"EXISTS", "term":"{0}", "value":null}}]}}' -NOT_EXISTS = '{{"filterClause":"AND", "filters":[{{"operator":"DOES_NOT_EXIST", "term":"{0}", "value":null}}]}}' -IS = '{{"filterClause":"AND", "filters":[{{"operator":"IS", "term":"{0}", "value":"{1}"}}]}}' -IS_NOT = '{{"filterClause":"AND", "filters":[{{"operator":"IS_NOT", "term":"{0}", "value":"{1}"}}]}}' -IS_IN = '{{"filterClause":"OR", "filters":[{{"operator":"IS", "term":"{0}", "value":"{1}"}},{{"operator":"IS", "term":"{0}", "value":"{2}"}},{{"operator":"IS", "term":"{0}", "value":"{3}"}}]}}' -NOT_IN = '{{"filterClause":"AND", "filters":[{{"operator":"IS_NOT", "term":"{0}", "value":"{1}"}},{{"operator":"IS_NOT", "term":"{0}", "value":"{2}"}},{{"operator":"IS_NOT", "term":"{0}", "value":"{3}"}}]}}' -IN_RANGE = '{{"filterClause":"AND", "filters":[{{"operator":"ON_OR_AFTER", "term":"{0}", "value":"{1}"}},{{"operator":"ON_OR_BEFORE", "term":"{0}", "value":"{2}"}}]}}' - -ON_OR_AFTER = '{{"filterClause":"AND", "filters":[{{"operator":"ON_OR_AFTER", "term":"{0}", "value":"{1}"}}]}}' -ON_OR_BEFORE = '{{"filterClause":"AND", "filters":[{{"operator":"ON_OR_BEFORE", "term":"{0}", "value":"{1}"}}]}}' - -CONTAINS = '{{"filterClause":"AND", "filters":[{{"operator":"CONTAINS", "term":"{0}", "value":"{1}"}}]}}' -NOT_CONTAINS = '{{"filterClause":"AND", "filters":[{{"operator":"DOES_NOT_CONTAIN", "term":"{0}", "value":"{1}"}}]}}' - -GREATER_THAN = '{{"filterClause":"AND", "filters":[{{"operator":"GREATER_THAN", "term":"{0}", "value":"{1}"}}]}}' -LESS_THAN = '{{"filterClause":"AND", "filters":[{{"operator":"LESS_THAN", "term":"{0}", "value":"{1}"}}]}}' -WITHIN_THE_LAST = '{{"filterClause":"AND", "filters":[{{"operator":"WITHIN_THE_LAST", "term":"{0}", "value":"{1}"}}]}}' - - -@pytest.fixture -def event_filter_group(query_filter): - return FilterGroup([query_filter]) - - -@pytest.fixture -def unicode_event_filter_group(unicode_query_filter): - return FilterGroup([unicode_query_filter]) - - -@pytest.fixture -def event_filter_group_list(event_filter_group): - return [event_filter_group for _ in range(3)] - - -@pytest.fixture -def query_filter(): - return QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - - -@pytest.fixture -def unicode_query_filter(): - return QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_UNICODE) - - -def format_timestamp(test_time): - test_date = datetime.utcfromtimestamp(test_time) - return format_datetime(test_date) - - -def format_datetime(test_date): - prefix = test_date.strftime(MICROSECOND_FORMAT)[:-4] - timestamp_str = f"{prefix}Z" - return timestamp_str diff --git a/tests/sdk/queries/fileevents/__init__.py b/tests/sdk/queries/fileevents/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/sdk/queries/fileevents/filters/__init__.py b/tests/sdk/queries/fileevents/filters/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/sdk/queries/fileevents/filters/test_activity_filter.py b/tests/sdk/queries/fileevents/filters/test_activity_filter.py deleted file mode 100644 index 38a70ab0f..000000000 --- a/tests/sdk/queries/fileevents/filters/test_activity_filter.py +++ /dev/null @@ -1,28 +0,0 @@ -from tests.sdk.queries.conftest import IS - -from py42.sdk.queries.fileevents.filters.activity_filter import RemoteActivity -from py42.sdk.queries.fileevents.filters.activity_filter import TrustedActivity - - -def test_risk_indicator_mime_type_is_true_str_gives_correct_json_representation(): - _filter = RemoteActivity.is_true() - expected = IS.format("remoteActivity", "TRUE") - assert str(_filter) == expected - - -def test_risk_indicator_mime_type_is_false_str_gives_correct_json_representation(): - _filter = RemoteActivity.is_false() - expected = IS.format("remoteActivity", "FALSE") - assert str(_filter) == expected - - -def test_risk_indicator_active_hours_is_true_str_gives_correct_json_representation(): - _filter = TrustedActivity.is_true() - expected = IS.format("trusted", "TRUE") - assert str(_filter) == expected - - -def test_risk_indicator_active_hours_is_false_str_gives_correct_json_representation(): - _filter = TrustedActivity.is_false() - expected = IS.format("trusted", "FALSE") - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_cloud_filter.py b/tests/sdk/queries/fileevents/filters/test_cloud_filter.py deleted file mode 100644 index 6bef84bd9..000000000 --- a/tests/sdk/queries/fileevents/filters/test_cloud_filter.py +++ /dev/null @@ -1,169 +0,0 @@ -from tests.sdk.queries.conftest import EXISTS -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_EXISTS -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.cloud_filter import Actor -from py42.sdk.queries.fileevents.filters.cloud_filter import DirectoryID -from py42.sdk.queries.fileevents.filters.cloud_filter import Shared -from py42.sdk.queries.fileevents.filters.cloud_filter import SharedWith -from py42.sdk.queries.fileevents.filters.cloud_filter import SharingTypeAdded - - -def test_actor_exists_str_gives_correct_json_representation(): - _filter = Actor.exists() - expected = EXISTS.format("actor") - assert str(_filter) == expected - - -def test_actor_not_exists_str_gives_correct_json_representation(): - _filter = Actor.not_exists() - expected = NOT_EXISTS.format("actor") - assert str(_filter) == expected - - -def test_actor_eq_str_gives_correct_json_representation(): - _filter = Actor.eq("test_actor") - expected = IS.format("actor", "test_actor") - assert str(_filter) == expected - - -def test_actor_not_eq_str_gives_correct_json_representation(): - _filter = Actor.not_eq("test_actor") - expected = IS_NOT.format("actor", "test_actor") - assert str(_filter) == expected - - -def test_actor_is_in_str_gives_correct_json_representation(): - items = ["actor1", "actor2", "actor3"] - _filter = Actor.is_in(items) - expected = IS_IN.format("actor", *items) - assert str(_filter) == expected - - -def test_actor_not_in_str_gives_correct_json_representation(): - items = ["actor1", "actor2", "actor3"] - _filter = Actor.not_in(items) - expected = NOT_IN.format("actor", *items) - assert str(_filter) == expected - - -def test_directory_id_eq_str_gives_correct_json_representation(): - _filter = DirectoryID.eq("test_id") - expected = IS.format("directoryId", "test_id") - assert str(_filter) == expected - - -def test_directory_id_not_eq_str_gives_correct_json_representation(): - _filter = DirectoryID.not_eq("test_id") - expected = IS_NOT.format("directoryId", "test_id") - assert str(_filter) == expected - - -def test_directory_id_is_in_str_gives_correct_json_representation(): - items = ["directoryId1", "directoryId2", "directoryId3"] - _filter = DirectoryID.is_in(items) - expected = IS_IN.format("directoryId", *items) - assert str(_filter) == expected - - -def test_directory_id_not_in_str_gives_correct_json_representation(): - items = ["directoryId1", "directoryId2", "directoryId3"] - _filter = DirectoryID.not_in(items) - expected = NOT_IN.format("directoryId", *items) - assert str(_filter) == expected - - -def test_shared_is_true_str_gives_correct_json_representation(): - _filter = Shared.is_true() - expected = IS.format("shared", "TRUE") - assert str(_filter) == expected - - -def test_shared_is_false_str_gives_correct_json_representation(): - _filter = Shared.is_false() - expected = IS.format("shared", "FALSE") - assert str(_filter) == expected - - -def test_shared_with_exists_str_gives_correct_json_representation(): - _filter = SharedWith.exists() - expected = EXISTS.format("sharedWith") - assert str(_filter) == expected - - -def test_shared_with_not_exists_str_gives_correct_json_representation(): - _filter = SharedWith.not_exists() - expected = NOT_EXISTS.format("sharedWith") - assert str(_filter) == expected - - -def test_shared_with_eq_str_gives_correct_json_representation(): - _filter = SharedWith.eq("test_user") - expected = IS.format("sharedWith", "test_user") - assert str(_filter) == expected - - -def test_shared_with_not_eq_str_gives_correct_json_representation(): - _filter = SharedWith.not_eq("test_user") - expected = IS_NOT.format("sharedWith", "test_user") - assert str(_filter) == expected - - -def test_shared_with_is_in_str_gives_correct_json_representation(): - items = ["user1", "user2", "user3"] - _filter = SharedWith.is_in(items) - expected = IS_IN.format("sharedWith", *items) - assert str(_filter) == expected - - -def test_shared_with_not_in_str_gives_correct_json_representation(): - items = ["user1", "user2", "user3"] - _filter = SharedWith.not_in(items) - expected = NOT_IN.format("sharedWith", *items) - assert str(_filter) == expected - - -def test_sharing_type_added_exists_str_gives_correct_json_representation(): - _filter = SharingTypeAdded.exists() - expected = EXISTS.format("sharingTypeAdded") - assert str(_filter) == expected - - -def test_sharing_type_added_not_exists_str_gives_correct_json_representation(): - _filter = SharingTypeAdded.not_exists() - expected = NOT_EXISTS.format("sharingTypeAdded") - assert str(_filter) == expected - - -def test_sharing_type_added_eq_str_gives_correct_json_representation(): - _filter = SharingTypeAdded.eq("test_cloud") - expected = IS.format("sharingTypeAdded", "test_cloud") - assert str(_filter) == expected - - -def test_sharing_type_added_not_eq_str_gives_correct_json_representation(): - _filter = SharingTypeAdded.not_eq("test_cloud") - expected = IS_NOT.format("sharingTypeAdded", "test_cloud") - assert str(_filter) == expected - - -def test_sharing_type_added_is_in_str_gives_correct_json_representation(): - items = ["cloud1", "cloud2", "cloud3"] - _filter = SharingTypeAdded.is_in(items) - expected = IS_IN.format("sharingTypeAdded", *items) - assert str(_filter) == expected - - -def test_sharing_type_added_not_in_str_gives_correct_json_representation(): - items = ["cloud1", "cloud2", "cloud3"] - _filter = SharingTypeAdded.not_in(items) - expected = NOT_IN.format("sharingTypeAdded", *items) - assert str(_filter) == expected - - -def test_sharing_type_choices_returns_valid_attributes(): - choices = SharingTypeAdded.choices() - assert set(choices) == {"SharedViaLink", "IsPublic", "OutsideTrustedDomains"} diff --git a/tests/sdk/queries/fileevents/filters/test_device_filter.py b/tests/sdk/queries/fileevents/filters/test_device_filter.py deleted file mode 100644 index cbd5575ce..000000000 --- a/tests/sdk/queries/fileevents/filters/test_device_filter.py +++ /dev/null @@ -1,200 +0,0 @@ -import pytest -from tests.sdk.queries.conftest import EXISTS -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_EXISTS -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.device_filter import DeviceSignedInUserName -from py42.sdk.queries.fileevents.filters.device_filter import DeviceUsername -from py42.sdk.queries.fileevents.filters.device_filter import OSHostname -from py42.sdk.queries.fileevents.filters.device_filter import PrivateIPAddress -from py42.sdk.queries.fileevents.filters.device_filter import PublicIPAddress - - -def test_device_username_exists_str_gives_correct_json_representation(): - _filter = DeviceUsername.exists() - expected = EXISTS.format("deviceUserName") - assert str(_filter) == expected - - -def test_device_username_not_exists_str_gives_correct_json_representation(): - _filter = DeviceUsername.not_exists() - expected = NOT_EXISTS.format("deviceUserName") - assert str(_filter) == expected - - -def test_device_username_eq_str_gives_correct_json_representation(): - _filter = DeviceUsername.eq("test_deviceUserName") - expected = IS.format("deviceUserName", "test_deviceUserName") - assert str(_filter) == expected - - -def test_device_username_not_eq_str_gives_correct_json_representation(): - _filter = DeviceUsername.not_eq("test_deviceUserName") - expected = IS_NOT.format("deviceUserName", "test_deviceUserName") - assert str(_filter) == expected - - -def test_device_username_is_in_str_gives_correct_json_representation(): - items = ["deviceUserName1", "deviceUserName2", "deviceUserName3"] - _filter = DeviceUsername.is_in(items) - expected = IS_IN.format("deviceUserName", *items) - assert str(_filter) == expected - - -def test_device_username_not_in_str_gives_correct_json_representation(): - items = ["deviceUserName1", "deviceUserName2", "deviceUserName3"] - _filter = DeviceUsername.not_in(items) - expected = NOT_IN.format("deviceUserName", *items) - assert str(_filter) == expected - - -def test_device_username_eq_unicode_str_gives_correct_json_representation(): - unicode_username = "您已经发现了秘密信息" - _filter = DeviceUsername.eq(unicode_username) - expected = IS.format( - "deviceUserName", - "\u60a8\u5df2\u7ecf\u53d1\u73b0\u4e86\u79d8\u5bc6\u4fe1\u606f", - ) - assert str(_filter) == expected - - -def test_os_hostname_exists_str_gives_correct_json_representation(): - _filter = OSHostname.exists() - expected = EXISTS.format("osHostName") - assert str(_filter) == expected - - -def test_os_hostname_not_exists_str_gives_correct_json_representation(): - _filter = OSHostname.not_exists() - expected = NOT_EXISTS.format("osHostName") - assert str(_filter) == expected - - -def test_os_hostname_eq_str_gives_correct_json_representation(): - _filter = OSHostname.eq("test_osHostName") - expected = IS.format("osHostName", "test_osHostName") - assert str(_filter) == expected - - -def test_os_hostname_not_eq_str_gives_correct_json_representation(): - _filter = OSHostname.not_eq("test_osHostName") - expected = IS_NOT.format("osHostName", "test_osHostName") - assert str(_filter) == expected - - -def test_os_hostname_is_in_str_gives_correct_json_representation(): - items = ["osHostName1", "osHostName2", "osHostName3"] - _filter = OSHostname.is_in(items) - expected = IS_IN.format("osHostName", *items) - assert str(_filter) == expected - - -def test_os_hostname_not_in_str_gives_correct_json_representation(): - items = ["osHostName1", "osHostName2", "osHostName3"] - _filter = OSHostname.not_in(items) - expected = NOT_IN.format("osHostName", *items) - assert str(_filter) == expected - - -def test_private_ip_exists_str_gives_correct_json_representation(): - _filter = PrivateIPAddress.exists() - expected = EXISTS.format("privateIpAddresses") - assert str(_filter) == expected - - -def test_private_ip_not_exists_str_gives_correct_json_representation(): - _filter = PrivateIPAddress.not_exists() - expected = NOT_EXISTS.format("privateIpAddresses") - assert str(_filter) == expected - - -def test_private_ip_address_eq_str_gives_correct_json_representation(): - _filter = PrivateIPAddress.eq("test_privateIp") - expected = IS.format("privateIpAddresses", "test_privateIp") - assert str(_filter) == expected - - -def test_private_ip_address_not_eq_str_gives_correct_json_representation(): - _filter = PrivateIPAddress.not_eq("test_privateIp") - expected = IS_NOT.format("privateIpAddresses", "test_privateIp") - assert str(_filter) == expected - - -def test_private_ip_address_is_in_str_gives_correct_json_representation(): - items = ["privateIp1", "privateIp2", "privateIp3"] - _filter = PrivateIPAddress.is_in(items) - expected = IS_IN.format("privateIpAddresses", *items) - assert str(_filter) == expected - - -def test_private_ip_address_not_in_str_gives_correct_json_representation(): - items = ["privateIp1", "privateIp2", "privateIp3"] - _filter = PrivateIPAddress.not_in(items) - expected = NOT_IN.format("privateIpAddresses", *items) - assert str(_filter) == expected - - -def test_public_ip_address_exists_str_gives_correct_json_representation(): - _filter = PublicIPAddress.exists() - expected = EXISTS.format("publicIpAddress") - assert str(_filter) == expected - - -def test_public_ip_address_not_exists_str_gives_correct_json_representation(): - _filter = PublicIPAddress.not_exists() - expected = NOT_EXISTS.format("publicIpAddress") - assert str(_filter) == expected - - -def test_public_ip_address_eq_str_gives_correct_json_representation(): - _filter = PublicIPAddress.eq("test_publicIp") - expected = IS.format("publicIpAddress", "test_publicIp") - assert str(_filter) == expected - - -def test_public_ip_address_not_eq_str_gives_correct_json_representation(): - _filter = PublicIPAddress.not_eq("test_publicIp") - expected = IS_NOT.format("publicIpAddress", "test_publicIp") - assert str(_filter) == expected - - -def test_public_ip_address_is_in_str_gives_correct_json_representation(): - items = ["publicIpAddress1", "publicIpAddress2", "publicIpAddress3"] - _filter = PublicIPAddress.is_in(items) - expected = IS_IN.format("publicIpAddress", *items) - assert str(_filter) == expected - - -def test_public_ip_address_not_in_str_gives_correct_json_representation(): - items = ["publicIpAddress1", "publicIpAddress2", "publicIpAddress3"] - _filter = PublicIPAddress.not_in(items) - expected = NOT_IN.format("publicIpAddress", *items) - assert str(_filter) == expected - - -@pytest.mark.parametrize( - "filter_criteria, test_filter", - [(DeviceSignedInUserName.eq, IS), (DeviceSignedInUserName.not_eq, IS_NOT)], -) -def test_equality_device_signed_in_username_gives_correct_json_representation( - filter_criteria, test_filter -): - _filter = filter_criteria("username") - expected = test_filter.format("operatingSystemUser", "username") - assert str(_filter) == expected - - -@pytest.mark.parametrize( - "filter_criteria, test_filter", - [(DeviceSignedInUserName.is_in, IS_IN), (DeviceSignedInUserName.not_in, NOT_IN)], -) -def test_multi_vlaue_device_signed_in_username_gives_correct_json_representation( - filter_criteria, test_filter -): - usernames = ["username1", "username2", "username3"] - _filter = filter_criteria(usernames) - expected = test_filter.format("operatingSystemUser", *usernames) - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_email_filter.py b/tests/sdk/queries/fileevents/filters/test_email_filter.py deleted file mode 100644 index 11c4552be..000000000 --- a/tests/sdk/queries/fileevents/filters/test_email_filter.py +++ /dev/null @@ -1,140 +0,0 @@ -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.email_filter import EmailFrom -from py42.sdk.queries.fileevents.filters.email_filter import EmailPolicyName -from py42.sdk.queries.fileevents.filters.email_filter import EmailRecipients -from py42.sdk.queries.fileevents.filters.email_filter import EmailSender -from py42.sdk.queries.fileevents.filters.email_filter import EmailSubject - - -def test_email_recipients_eq_str_gives_correct_json_representation(): - _filter = EmailRecipients.eq("test_recipient") - expected = IS.format("emailRecipients", "test_recipient") - assert str(_filter) == expected - - -def test_email_recipients_not_eq_str_gives_correct_json_representation(): - _filter = EmailRecipients.not_eq("test_recipient") - expected = IS_NOT.format("emailRecipients", "test_recipient") - assert str(_filter) == expected - - -def test_email_recipients_is_in_str_gives_correct_json_representation(): - items = ["EmailRecipients1", "EmailRecipients2", "EmailRecipients3"] - _filter = EmailRecipients.is_in(items) - expected = IS_IN.format("emailRecipients", *items) - assert str(_filter) == expected - - -def test_email_recipients_not_in_str_gives_correct_json_representation(): - items = ["EmailRecipients1", "EmailRecipients2", "EmailRecipients3"] - _filter = EmailRecipients.not_in(items) - expected = NOT_IN.format("emailRecipients", *items) - assert str(_filter) == expected - - -def test_email_sender_eq_str_gives_correct_json_representation(): - _filter = EmailSender.eq("test_category") - expected = IS.format("emailSender", "test_category") - assert str(_filter) == expected - - -def test_email_sender_not_eq_str_gives_correct_json_representation(): - _filter = EmailSender.not_eq("test_category") - expected = IS_NOT.format("emailSender", "test_category") - assert str(_filter) == expected - - -def test_email_sender_is_in_str_gives_correct_json_representation(): - items = ["email_sender1", "email_sender2", "email_sender3"] - _filter = EmailSender.is_in(items) - expected = IS_IN.format("emailSender", *items) - assert str(_filter) == expected - - -def test_email_sender_not_in_str_gives_correct_json_representation(): - items = ["email_sender1", "email_sender2", "email_sender3"] - _filter = EmailSender.not_in(items) - expected = NOT_IN.format("emailSender", *items) - assert str(_filter) == expected - - -def test_email_subject_eq_str_gives_correct_json_representation(): - _filter = EmailSubject.eq("test_subject") - expected = IS.format("emailSubject", "test_subject") - assert str(_filter) == expected - - -def test_email_subject_not_eq_str_gives_correct_json_representation(): - _filter = EmailSubject.not_eq("test_subject") - expected = IS_NOT.format("emailSubject", "test_subject") - assert str(_filter) == expected - - -def test_email_subject_is_in_str_gives_correct_json_representation(): - items = ["test_subject1", "test_subject2", "test_subject3"] - _filter = EmailSubject.is_in(items) - expected = IS_IN.format("emailSubject", *items) - assert str(_filter) == expected - - -def test_email_subject_not_in_str_gives_correct_json_representation(): - items = ["test_subject1", "test_subject2", "test_subject3"] - _filter = EmailSubject.not_in(items) - expected = NOT_IN.format("emailSubject", *items) - assert str(_filter) == expected - - -def test_email_policy_name_eq_str_gives_correct_json_representation(): - _filter = EmailPolicyName.eq("test_policy") - expected = IS.format("emailDlpPolicyNames", "test_policy") - assert str(_filter) == expected - - -def test_email_policy_name_not_eq_str_gives_correct_json_representation(): - _filter = EmailPolicyName.not_eq("test_policy") - expected = IS_NOT.format("emailDlpPolicyNames", "test_policy") - assert str(_filter) == expected - - -def test_email_policy_name_is_in_str_gives_correct_json_representation(): - items = ["test_policy1", "test_policy2", "test_policy3"] - _filter = EmailPolicyName.is_in(items) - expected = IS_IN.format("emailDlpPolicyNames", *items) - assert str(_filter) == expected - - -def test_email_policy_name_not_in_str_gives_correct_json_representation(): - items = ["test_policy1", "test_policy2", "test_policy3"] - _filter = EmailPolicyName.not_in(items) - expected = NOT_IN.format("emailDlpPolicyNames", *items) - assert str(_filter) == expected - - -def test_email_from_eq_str_gives_correct_json_representation(): - _filter = EmailFrom.eq("email_from") - expected = IS.format("emailFrom", "email_from") - assert str(_filter) == expected - - -def test_email_from_not_eq_str_gives_correct_json_representation(): - _filter = EmailFrom.not_eq("email_from") - expected = IS_NOT.format("emailFrom", "email_from") - assert str(_filter) == expected - - -def test_email_from_is_in_str_gives_correct_json_representation(): - items = ["email_from1", "email_from2", "email_from3"] - _filter = EmailFrom.is_in(items) - expected = IS_IN.format("emailFrom", *items) - assert str(_filter) == expected - - -def test_email_from_not_in_str_gives_correct_json_representation(): - items = ["email_from1", "email_from2", "email_from3"] - _filter = EmailFrom.not_in(items) - expected = NOT_IN.format("emailFrom", *items) - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_event_filter.py b/tests/sdk/queries/fileevents/filters/test_event_filter.py deleted file mode 100644 index 726dc0a8c..000000000 --- a/tests/sdk/queries/fileevents/filters/test_event_filter.py +++ /dev/null @@ -1,278 +0,0 @@ -from datetime import datetime -from time import time - -import pytest -from tests.sdk.queries.conftest import EXISTS -from tests.sdk.queries.conftest import format_datetime -from tests.sdk.queries.conftest import format_timestamp -from tests.sdk.queries.conftest import IN_RANGE -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_EXISTS -from tests.sdk.queries.conftest import NOT_IN -from tests.sdk.queries.conftest import ON_OR_AFTER -from tests.sdk.queries.conftest import ON_OR_BEFORE -from tests.sdk.queries.conftest import WITHIN_THE_LAST - -from py42.sdk.queries.fileevents.filters.event_filter import EventTimestamp -from py42.sdk.queries.fileevents.filters.event_filter import EventType -from py42.sdk.queries.fileevents.filters.event_filter import InsertionTimestamp -from py42.sdk.queries.fileevents.filters.event_filter import MimeTypeMismatch -from py42.sdk.queries.fileevents.filters.event_filter import OutsideActiveHours -from py42.sdk.queries.fileevents.filters.event_filter import Source - - -def test_event_timestamp_filter_has_within_the_last(): - assert hasattr(EventTimestamp(), "within_the_last") - - -def test_insertion_timestamp_filter_has_within_the_last(): - assert hasattr(InsertionTimestamp(), "within_the_last") - - -def test_event_timestamp_on_or_after_str_gives_correct_json_representation(): - test_time = time() - formatted = format_timestamp(test_time) - _filter = EventTimestamp.on_or_after(test_time) - expected = ON_OR_AFTER.format("eventTimestamp", formatted) - assert str(_filter) == expected - - -def test_event_timestamp_on_or_before_str_gives_correct_json_representation(): - test_time = time() - formatted = format_timestamp(test_time) - _filter = EventTimestamp.on_or_before(test_time) - expected = ON_OR_BEFORE.format("eventTimestamp", formatted) - assert str(_filter) == expected - - -def test_event_timestamp_in_range_str_gives_correct_json_representation(): - test_before_time = time() - test_after_time = time() + 30 # make sure timestamps are actually different - formatted_before = format_timestamp(test_before_time) - formatted_after = format_timestamp(test_after_time) - _filter = EventTimestamp.in_range(test_before_time, test_after_time) - expected = IN_RANGE.format("eventTimestamp", formatted_before, formatted_after) - assert str(_filter) == expected - - -def test_event_timestamp_on_same_day_str_gives_correct_json_representation(): - test_time = time() - test_date = datetime.utcfromtimestamp(test_time) - start_time = datetime(test_date.year, test_date.month, test_date.day, 0, 0, 0) - end_time = datetime(test_date.year, test_date.month, test_date.day, 23, 59, 59) - formatted_before = format_datetime(start_time) - formatted_after = format_datetime(end_time) - - _filter = EventTimestamp.on_same_day(test_time) - expected = IN_RANGE.format("eventTimestamp", formatted_before, formatted_after) - assert str(_filter) == expected - - -def test_event_type_exists_str_gives_correct_json_representation(): - _filter = EventType.exists() - expected = EXISTS.format("eventType") - assert str(_filter) == expected - - -def test_event_type_not_exists_str_gives_correct_json_representation(): - _filter = EventType.not_exists() - expected = NOT_EXISTS.format("eventType") - assert str(_filter) == expected - - -def test_event_type_eq_str_gives_correct_json_representation(): - _filter = EventType.eq(EventType.MODIFIED) - expected = IS.format("eventType", "MODIFIED") - assert str(_filter) == expected - - -def test_event_type_not_eq_str_gives_correct_json_representation(): - _filter = EventType.not_eq(EventType.CREATED) - expected = IS_NOT.format("eventType", "CREATED") - assert str(_filter) == expected - - -def test_event_type_is_in_str_gives_correct_json_representation(): - items = [EventType.DELETED, EventType.EMAILED, EventType.PRINTED] - _filter = EventType.is_in(items) - expected = IS_IN.format("eventType", *items) - assert str(_filter) == expected - - -def test_event_type_not_in_str_gives_correct_json_representation(): - items = [EventType.CREATED, EventType.DELETED, EventType.MODIFIED] - _filter = EventType.not_in(items) - expected = NOT_IN.format("eventType", *items) - assert str(_filter) == expected - - -def test_insertion_timestamp_on_or_after_str_gives_correct_json_representation(): - test_time = time() - formatted = format_timestamp(test_time) - _filter = InsertionTimestamp.on_or_after(test_time) - expected = ON_OR_AFTER.format("insertionTimestamp", formatted) - - assert str(_filter) == expected - - -def test_insertion_timestamp_on_or_before_str_gives_correct_json_representation(): - test_time = time() - formatted = format_timestamp(test_time) - _filter = InsertionTimestamp.on_or_before(test_time) - expected = ON_OR_BEFORE.format("insertionTimestamp", formatted) - assert str(_filter) == expected - - -def test_insertion_timestamp_in_range_str_gives_correct_json_representation(): - test_before_time = time() - test_after_time = time() + 30 # make sure timestamps are actually different - formatted_before = format_timestamp(test_before_time) - formatted_after = format_timestamp(test_after_time) - _filter = InsertionTimestamp.in_range(test_before_time, test_after_time) - expected = IN_RANGE.format("insertionTimestamp", formatted_before, formatted_after) - assert str(_filter) == expected - - -def test_insertion_timestamp_on_same_day_str_gives_correct_json_representation(): - test_time = time() - test_date = datetime.utcfromtimestamp(test_time) - start_time = datetime(test_date.year, test_date.month, test_date.day, 0, 0, 0) - end_time = datetime(test_date.year, test_date.month, test_date.day, 23, 59, 59) - formatted_before = format_datetime(start_time) - formatted_after = format_datetime(end_time) - _filter = InsertionTimestamp.on_same_day(test_time) - expected = IN_RANGE.format("insertionTimestamp", formatted_before, formatted_after) - assert str(_filter) == expected - - -def test_source_exists_str_gives_correct_json_representation(): - _filter = Source.exists() - expected = EXISTS.format("source") - assert str(_filter) == expected - - -def test_source_not_exists_str_gives_correct_json_representation(): - _filter = Source.not_exists() - expected = NOT_EXISTS.format("source") - assert str(_filter) == expected - - -def test_source_eq_str_gives_correct_json_representation(): - _filter = Source.eq(Source.ENDPOINT) - expected = IS.format("source", "Endpoint") - assert str(_filter) == expected - - -def test_source_not_eq_str_gives_correct_json_representation(): - _filter = Source.not_eq(Source.BOX) - expected = IS_NOT.format("source", "Box") - assert str(_filter) == expected - - -def test_source_is_in_str_gives_correct_json_representation(): - items = [Source.GMAIL, Source.GOOGLE_DRIVE, Source.OFFICE_365] - _filter = Source.is_in(items) - expected = IS_IN.format("source", *items) - assert str(_filter) == expected - - -def test_source_not_in_str_gives_correct_json_representation(): - items = [Source.GMAIL, Source.GOOGLE_DRIVE, Source.OFFICE_365] - _filter = Source.not_in(items) - expected = NOT_IN.format("source", *items) - assert str(_filter) == expected - - -def test_event_timestamp_choices_returns_valid_set(): - choices = EventTimestamp.choices() - valid_set = { - "PT15M", - "PT1H", - "PT3H", - "PT12H", - "P1D", - "P3D", - "P7D", - "P14D", - "P30D", - } - assert set(choices) == valid_set - - -def test_event_type_choices_returns_valid_set(): - choices = EventType.choices() - valid_set = { - "CREATED", - "MODIFIED", - "DELETED", - "READ_BY_APP", - "EMAILED", - "PRINTED", - } - assert set(choices) == valid_set - - -def test_source_choices_returns_valid_set(): - choices = Source.choices() - valid_set = { - "Endpoint", - "GoogleDrive", - "OneDrive", - "Box", - "Gmail", - "Office365", - } - assert set(choices) == valid_set - - -def test_event_timestamp_gives_correct_json_representation(): - _filter = EventTimestamp.within_the_last(EventTimestamp.ONE_HOUR) - expected = WITHIN_THE_LAST.format("eventTimestamp", "PT1H") - assert str(_filter) == expected - - -@pytest.mark.parametrize( - "key, value", - [ - (EventTimestamp.FIFTEEN_MINUTES, "PT15M"), - (EventTimestamp.ONE_HOUR, "PT1H"), - (EventTimestamp.THREE_HOURS, "PT3H"), - (EventTimestamp.TWELVE_HOURS, "PT12H"), - (EventTimestamp.ONE_DAY, "P1D"), - (EventTimestamp.THREE_DAYS, "P3D"), - (EventTimestamp.SEVEN_DAYS, "P7D"), - (EventTimestamp.FOURTEEN_DAYS, "P14D"), - (EventTimestamp.THIRTY_DAYS, "P30D"), - ], -) -def test_all_event_timestamp_gives_correct_json_representation(key, value): - - _filter = EventTimestamp.within_the_last(key) - expected = WITHIN_THE_LAST.format("eventTimestamp", value) - assert str(_filter) == expected - - -def test_risk_indicator_mime_type_is_true_str_gives_correct_json_representation(): - _filter = MimeTypeMismatch.is_true() - expected = IS.format("mimeTypeMismatch", "TRUE") - assert str(_filter) == expected - - -def test_risk_indicator_mime_type_is_false_str_gives_correct_json_representation(): - _filter = MimeTypeMismatch.is_false() - expected = IS.format("mimeTypeMismatch", "FALSE") - assert str(_filter) == expected - - -def test_risk_indicator_active_hours_is_true_str_gives_correct_json_representation(): - _filter = OutsideActiveHours.is_true() - expected = IS.format("outsideActiveHours", "TRUE") - assert str(_filter) == expected - - -def test_risk_indicator_active_hours_is_false_str_gives_correct_json_representation(): - _filter = OutsideActiveHours.is_false() - expected = IS.format("outsideActiveHours", "FALSE") - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_exposure_filter.py b/tests/sdk/queries/fileevents/filters/test_exposure_filter.py deleted file mode 100644 index 1ee6b9975..000000000 --- a/tests/sdk/queries/fileevents/filters/test_exposure_filter.py +++ /dev/null @@ -1,664 +0,0 @@ -from tests.sdk.queries.conftest import EXISTS -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_EXISTS -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.exposure_filter import DestinationCategory -from py42.sdk.queries.fileevents.filters.exposure_filter import DestinationName -from py42.sdk.queries.fileevents.filters.exposure_filter import ExposureType -from py42.sdk.queries.fileevents.filters.exposure_filter import ProcessName -from py42.sdk.queries.fileevents.filters.exposure_filter import ProcessOwner -from py42.sdk.queries.fileevents.filters.exposure_filter import RemovableMediaMediaName -from py42.sdk.queries.fileevents.filters.exposure_filter import RemovableMediaName -from py42.sdk.queries.fileevents.filters.exposure_filter import ( - RemovableMediaPartitionID, -) -from py42.sdk.queries.fileevents.filters.exposure_filter import ( - RemovableMediaSerialNumber, -) -from py42.sdk.queries.fileevents.filters.exposure_filter import RemovableMediaVendor -from py42.sdk.queries.fileevents.filters.exposure_filter import RemovableMediaVolumeName -from py42.sdk.queries.fileevents.filters.exposure_filter import SyncDestination -from py42.sdk.queries.fileevents.filters.exposure_filter import SyncDestinationUsername -from py42.sdk.queries.fileevents.filters.exposure_filter import TabURL -from py42.sdk.queries.fileevents.filters.exposure_filter import WindowTitle - - -def test_exposure_type_exists_str_gives_correct_json_representation(): - _filter = ExposureType.exists() - expected = EXISTS.format("exposure") - assert str(_filter) == expected - - -def test_exposure_type_not_exists_str_gives_correct_json_representation(): - _filter = ExposureType.not_exists() - expected = NOT_EXISTS.format("exposure") - assert str(_filter) == expected - - -def test_exposure_type_eq_str_gives_correct_json_representation(): - _filter = ExposureType.eq(ExposureType.APPLICATION_READ) - expected = IS.format("exposure", "ApplicationRead") - assert str(_filter) == expected - - -def test_exposure_type_not_eq_str_gives_correct_json_representation(): - _filter = ExposureType.not_eq(ExposureType.IS_PUBLIC) - expected = IS_NOT.format("exposure", "IsPublic") - assert str(_filter) == expected - - -def test_exposure_type_is_in_str_gives_correct_json_representation(): - items = [ - ExposureType.CLOUD_STORAGE, - ExposureType.OUTSIDE_TRUSTED_DOMAINS, - ExposureType.SHARED_TO_DOMAIN, - ] - _filter = ExposureType.is_in(items) - expected = IS_IN.format("exposure", *items) - assert str(_filter) == expected - - -def test_exposure_type_not_in_str_gives_correct_json_representation(): - items = [ - ExposureType.CLOUD_STORAGE, - ExposureType.OUTSIDE_TRUSTED_DOMAINS, - ExposureType.SHARED_TO_DOMAIN, - ] - _filter = ExposureType.not_in(items) - expected = NOT_IN.format("exposure", *items) - assert str(_filter) == expected - - -def test_process_name_exists_str_gives_correct_json_representation(): - _filter = ProcessName.exists() - expected = EXISTS.format("processName") - assert str(_filter) == expected - - -def test_process_name_not_exists_str_gives_correct_json_representation(): - _filter = ProcessName.not_exists() - expected = NOT_EXISTS.format("processName") - assert str(_filter) == expected - - -def test_process_name_eq_str_gives_correct_json_representation(): - _filter = ProcessName.eq("test_name") - expected = IS.format("processName", "test_name") - assert str(_filter) == expected - - -def test_process_name_not_eq_str_gives_correct_json_representation(): - _filter = ProcessName.not_eq("test_name") - expected = IS_NOT.format("processName", "test_name") - assert str(_filter) == expected - - -def test_process_name_is_in_str_gives_correct_json_representation(): - items = ["n1", "n2", "n3"] - _filter = ProcessName.is_in(items) - expected = IS_IN.format("processName", *items) - assert str(_filter) == expected - - -def test_process_name_not_in_str_gives_correct_json_representation(): - items = ["n1", "n2", "n3"] - _filter = ProcessName.not_in(items) - expected = NOT_IN.format("processName", *items) - assert str(_filter) == expected - - -def test_process_owner_exists_str_gives_correct_json_representation(): - _filter = ProcessOwner.exists() - expected = EXISTS.format("processOwner") - assert str(_filter) == expected - - -def test_process_owner_not_exists_str_gives_correct_json_representation(): - _filter = ProcessOwner.not_exists() - expected = NOT_EXISTS.format("processOwner") - assert str(_filter) == expected - - -def test_process_owner_eq_str_gives_correct_json_representation(): - _filter = ProcessOwner.eq("test_owner") - expected = IS.format("processOwner", "test_owner") - assert str(_filter) == expected - - -def test_process_owner_not_eq_str_gives_correct_json_representation(): - _filter = ProcessOwner.not_eq("test_owner") - expected = IS_NOT.format("processOwner", "test_owner") - assert str(_filter) == expected - - -def test_process_owner_is_in_str_gives_correct_json_representation(): - items = ["owner1", "owner2", "owner3"] - _filter = ProcessOwner.is_in(items) - expected = IS_IN.format("processOwner", *items) - assert str(_filter) == expected - - -def test_process_owner_not_in_str_gives_correct_json_representation(): - items = ["owner1", "owner2", "owner3"] - _filter = ProcessOwner.not_in(items) - expected = NOT_IN.format("processOwner", *items) - assert str(_filter) == expected - - -def test_removable_media_name_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaName.exists() - expected = EXISTS.format("removableMediaName") - assert str(_filter) == expected - - -def test_removable_media_name_not_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaName.not_exists() - expected = NOT_EXISTS.format("removableMediaName") - assert str(_filter) == expected - - -def test_removable_media_name_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaName.eq("test_name") - expected = IS.format("removableMediaName", "test_name") - assert str(_filter) == expected - - -def test_removable_media_name_not_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaName.not_eq("test_name") - expected = IS_NOT.format("removableMediaName", "test_name") - assert str(_filter) == expected - - -def test_removable_media_name_is_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaName.is_in(items) - expected = IS_IN.format("removableMediaName", *items) - assert str(_filter) == expected - - -def test_removable_media_name_not_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaName.not_in(items) - expected = NOT_IN.format("removableMediaName", *items) - assert str(_filter) == expected - - -def test_removable_media_vendor_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaVendor.exists() - expected = EXISTS.format("removableMediaVendor") - assert str(_filter) == expected - - -def test_removable_media_vendor_not_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaVendor.not_exists() - expected = NOT_EXISTS.format("removableMediaVendor") - assert str(_filter) == expected - - -def test_removable_media_vendor_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaVendor.eq("test_name") - expected = IS.format("removableMediaVendor", "test_name") - assert str(_filter) == expected - - -def test_removable_media_vendor_not_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaVendor.not_eq("test_name") - expected = IS_NOT.format("removableMediaVendor", "test_name") - assert str(_filter) == expected - - -def test_removable_media_vendor_is_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaVendor.is_in(items) - expected = IS_IN.format("removableMediaVendor", *items) - assert str(_filter) == expected - - -def test_removable_media_vendor_not_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaVendor.not_in(items) - expected = NOT_IN.format("removableMediaVendor", *items) - assert str(_filter) == expected - - -def test_removable_media_medianame_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaMediaName.exists() - expected = EXISTS.format("removableMediaMediaName") - assert str(_filter) == expected - - -def test_removable_media_medianame_not_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaMediaName.not_exists() - expected = NOT_EXISTS.format("removableMediaMediaName") - assert str(_filter) == expected - - -def test_removable_media_medianame_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaMediaName.eq("test_name") - expected = IS.format("removableMediaMediaName", "test_name") - assert str(_filter) == expected - - -def test_removable_media_medianame_not_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaMediaName.not_eq("test_name") - expected = IS_NOT.format("removableMediaMediaName", "test_name") - assert str(_filter) == expected - - -def test_removable_media_medianame_is_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaMediaName.is_in(items) - expected = IS_IN.format("removableMediaMediaName", *items) - assert str(_filter) == expected - - -def test_removable_media_medianame_not_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaMediaName.not_in(items) - expected = NOT_IN.format("removableMediaMediaName", *items) - assert str(_filter) == expected - - -def test_removable_media_volume_name_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaVolumeName.exists() - expected = EXISTS.format("removableMediaVolumeName") - assert str(_filter) == expected - - -def test_removable_media_volume_name_not_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaVolumeName.not_exists() - expected = NOT_EXISTS.format("removableMediaVolumeName") - assert str(_filter) == expected - - -def test_removable_media_volume_name_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaVolumeName.eq("test_name") - expected = IS.format("removableMediaVolumeName", "test_name") - assert str(_filter) == expected - - -def test_removable_media_volume_name_not_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaVolumeName.not_eq("test_name") - expected = IS_NOT.format("removableMediaVolumeName", "test_name") - assert str(_filter) == expected - - -def test_removable_media_volume_name_is_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaVolumeName.is_in(items) - expected = IS_IN.format("removableMediaVolumeName", *items) - assert str(_filter) == expected - - -def test_removable_media_volume_name_not_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaVolumeName.not_in(items) - expected = NOT_IN.format("removableMediaVolumeName", *items) - assert str(_filter) == expected - - -def test_removable_media_partition_id_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaPartitionID.exists() - expected = EXISTS.format("removableMediaPartitionId") - assert str(_filter) == expected - - -def test_removable_media_partition_id_not_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaPartitionID.not_exists() - expected = NOT_EXISTS.format("removableMediaPartitionId") - assert str(_filter) == expected - - -def test_removable_media_partition_id_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaPartitionID.eq("test_name") - expected = IS.format("removableMediaPartitionId", "test_name") - assert str(_filter) == expected - - -def test_removable_media_partition_id_not_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaPartitionID.not_eq("test_name") - expected = IS_NOT.format("removableMediaPartitionId", "test_name") - assert str(_filter) == expected - - -def test_removable_media_partition_id_is_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaPartitionID.is_in(items) - expected = IS_IN.format("removableMediaPartitionId", *items) - assert str(_filter) == expected - - -def test_removable_media_partition_id_not_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaPartitionID.not_in(items) - expected = NOT_IN.format("removableMediaPartitionId", *items) - assert str(_filter) == expected - - -def test_removable_media_serial_number_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaSerialNumber.exists() - expected = EXISTS.format("removableMediaSerialNumber") - assert str(_filter) == expected - - -def test_removable_media_serial_number_not_exists_str_gives_correct_json_representation(): - _filter = RemovableMediaSerialNumber.not_exists() - expected = NOT_EXISTS.format("removableMediaSerialNumber") - assert str(_filter) == expected - - -def test_removable_media_serial_number_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaSerialNumber.eq("test_name") - expected = IS.format("removableMediaSerialNumber", "test_name") - assert str(_filter) == expected - - -def test_removable_media_serial_number_not_eq_str_gives_correct_json_representation(): - _filter = RemovableMediaSerialNumber.not_eq("test_name") - expected = IS_NOT.format("removableMediaSerialNumber", "test_name") - assert str(_filter) == expected - - -def test_removable_media_serial_number_is_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaSerialNumber.is_in(items) - expected = IS_IN.format("removableMediaSerialNumber", *items) - assert str(_filter) == expected - - -def test_removable_media_serial_number_not_in_str_gives_correct_json_representation(): - items = ["name1", "name2", "name3"] - _filter = RemovableMediaSerialNumber.not_in(items) - expected = NOT_IN.format("removableMediaSerialNumber", *items) - assert str(_filter) == expected - - -def test_sync_destination_name_exists_str_gives_correct_json_representation(): - _filter = SyncDestination.exists() - expected = EXISTS.format("syncDestination") - assert str(_filter) == expected - - -def test_sync_destination_name_not_exists_str_gives_correct_json_representation(): - _filter = SyncDestination.not_exists() - expected = NOT_EXISTS.format("syncDestination") - assert str(_filter) == expected - - -def test_sync_destination_name_eq_str_gives_correct_json_representation(): - _filter = SyncDestination.eq(SyncDestination.GOOGLE_DRIVE) - expected = IS.format("syncDestination", SyncDestination.GOOGLE_DRIVE) - assert str(_filter) == expected - - -def test_sync_destination_name_not_eq_str_gives_correct_json_representation(): - _filter = SyncDestination.not_eq(SyncDestination.GOOGLE_DRIVE) - expected = IS_NOT.format("syncDestination", SyncDestination.GOOGLE_DRIVE) - assert str(_filter) == expected - - -def test_sync_destination_name_is_in_str_gives_correct_json_representation(): - items = [SyncDestination.GOOGLE_DRIVE, SyncDestination.BOX, SyncDestination.ICLOUD] - _filter = SyncDestination.is_in(items) - expected = IS_IN.format("syncDestination", *sorted(items)) - assert str(_filter) == expected - - -def test_sync_destination_name_not_in_str_gives_correct_json_representation(): - items = [SyncDestination.GOOGLE_DRIVE, SyncDestination.BOX, SyncDestination.ICLOUD] - _filter = SyncDestination.not_in(items) - expected = NOT_IN.format("syncDestination", *sorted(items)) - assert str(_filter) == expected - - -def test_sync_destination_username_exists_str_gives_correct_json_representation(): - _filter = SyncDestinationUsername.exists() - expected = EXISTS.format("syncDestinationUsername") - assert str(_filter) == expected - - -def test_sync_destination_username_not_exists_str_gives_correct_json_representation(): - _filter = SyncDestinationUsername.not_exists() - expected = NOT_EXISTS.format("syncDestinationUsername") - assert str(_filter) == expected - - -def test_sync_destination_username_eq_str_gives_correct_json_representation(): - _filter = SyncDestinationUsername.eq("test_user@example.com") - expected = IS.format("syncDestinationUsername", "test_user@example.com") - assert str(_filter) == expected - - -def test_sync_destination_username_not_eq_str_gives_correct_json_representation(): - _filter = SyncDestinationUsername.not_eq("test_user@example.com") - expected = IS_NOT.format("syncDestinationUsername", "test_user@example.com") - assert str(_filter) == expected - - -def test_sync_destination_username_is_in_str_gives_correct_json_representation(): - items = ["*@example2.com", "user1@example.com", "user2@example.com"] - _filter = SyncDestinationUsername.is_in(items) - expected = IS_IN.format("syncDestinationUsername", *items) - assert str(_filter) == expected - - -def test_sync_destination_username_not_in_str_gives_correct_json_representation(): - items = ["*@example2.com", "user1@example.com", "user2@example.com"] - _filter = SyncDestinationUsername.not_in(items) - expected = NOT_IN.format("syncDestinationUsername", *items) - assert str(_filter) == expected - - -def test_tab_url_exists_str_gives_correct_json_representation(): - _filter = TabURL.exists() - expected = EXISTS.format("tabUrls") - assert str(_filter) == expected - - -def test_tab_url_not_exists_str_gives_correct_json_representation(): - _filter = TabURL.not_exists() - expected = NOT_EXISTS.format("tabUrls") - assert str(_filter) == expected - - -def test_tab_url_eq_str_gives_correct_json_representation(): - _filter = TabURL.eq("test_tab_url") - expected = IS.format("tabUrls", "test_tab_url") - assert str(_filter) == expected - - -def test_tab_url_not_eq_str_gives_correct_json_representation(): - _filter = TabURL.not_eq("test_tab_url") - expected = IS_NOT.format("tabUrls", "test_tab_url") - assert str(_filter) == expected - - -def test_tab_url_is_in_str_gives_correct_json_representation(): - items = ["tab1", "tab2", "tab3"] - _filter = TabURL.is_in(items) - expected = IS_IN.format("tabUrls", *items) - assert str(_filter) == expected - - -def test_tab_url_not_in_str_gives_correct_json_representation(): - items = ["tab1", "tab2", "tab3"] - _filter = TabURL.not_in(items) - expected = NOT_IN.format("tabUrls", *items) - assert str(_filter) == expected - - -def test_window_title_exists_str_gives_correct_json_representation(): - _filter = WindowTitle.exists() - expected = EXISTS.format("tabTitles") - assert str(_filter) == expected - - -def test_window_title_not_exists_str_gives_correct_json_representation(): - _filter = WindowTitle.not_exists() - expected = NOT_EXISTS.format("tabTitles") - assert str(_filter) == expected - - -def test_window_title_eq_str_gives_correct_json_representation(): - _filter = WindowTitle.eq("test_window") - expected = IS.format("tabTitles", "test_window") - assert str(_filter) == expected - - -def test_window_title_not_eq_str_gives_correct_json_representation(): - _filter = WindowTitle.not_eq("test_window") - expected = IS_NOT.format("tabTitles", "test_window") - assert str(_filter) == expected - - -def test_window_title_is_in_str_gives_correct_json_representation(): - items = ["window1", "window2", "window3"] - _filter = WindowTitle.is_in(items) - expected = IS_IN.format("tabTitles", *items) - assert str(_filter) == expected - - -def test_window_title_not_in_str_gives_correct_json_representation(): - items = ["window1", "window2", "window3"] - _filter = WindowTitle.not_in(items) - expected = NOT_IN.format("tabTitles", *items) - assert str(_filter) == expected - - -def test_exposure_type_choices_returns_valid_attributes(): - choices = ExposureType.choices() - valid_set = { - "SharedViaLink", - "SharedToDomain", - "ApplicationRead", - "CloudStorage", - "RemovableMedia", - "IsPublic", - "OutsideTrustedDomains", - } - assert set(choices) == valid_set - - -def test_sync_destination_choices_returns_valid_attributes(): - choices = SyncDestination.choices() - valid_set = { - "ICloud", - "Box", - "BoxDrive", - "GoogleDrive", - "GoogleBackupAndSync", - "Dropbox", - "OneDrive", - } - assert set(choices) == valid_set - - -def test_destination_category_exists_str_gives_correct_json_representation(): - _filter = DestinationCategory.exists() - expected = EXISTS.format("destinationCategory") - assert str(_filter) == expected - - -def test_destination_category_not_exists_str_gives_correct_json_representation(): - _filter = DestinationCategory.not_exists() - expected = NOT_EXISTS.format("destinationCategory") - assert str(_filter) == expected - - -def test_destination_category_eq_str_gives_correct_json_representation(): - _filter = DestinationCategory.eq(DestinationCategory.CLOUD_STORAGE) - expected = IS.format("destinationCategory", "Cloud Storage") - assert str(_filter) == expected - - -def test_destination_category_not_eq_str_gives_correct_json_representation(): - _filter = DestinationCategory.not_eq(DestinationCategory.MESSAGING) - expected = IS_NOT.format("destinationCategory", "Messaging") - assert str(_filter) == expected - - -def test_destination_category_is_in_str_gives_correct_json_representation(): - items = [ - DestinationCategory.CLOUD_STORAGE, - DestinationCategory.MESSAGING, - DestinationCategory.UNCATEGORIZED, - ] - _filter = DestinationCategory.is_in(items) - expected = IS_IN.format("destinationCategory", *items) - assert str(_filter) == expected - - -def test_destination_category_not_in_str_gives_correct_json_representation(): - items = [ - DestinationCategory.CLOUD_STORAGE, - DestinationCategory.MESSAGING, - DestinationCategory.UNCATEGORIZED, - ] - _filter = DestinationCategory.not_in(items) - expected = NOT_IN.format("destinationCategory", *items) - assert str(_filter) == expected - - -def test_destination_category_choices_returns_valid_attributes(): - choices = DestinationCategory.choices() - valid_set = { - "Cloud Storage", - "Device", - "Email", - "Messaging", - "Multiple Possibilities", - "Social Media", - "Source Code Repository", - "Uncategorized", - "Unknown", - } - assert set(choices) == valid_set - - -def test_destination_name_exists_str_gives_correct_json_representation(): - _filter = DestinationName.exists() - expected = EXISTS.format("destinationName") - assert str(_filter) == expected - - -def test_destination_name_not_exists_str_gives_correct_json_representation(): - _filter = DestinationName.not_exists() - expected = NOT_EXISTS.format("destinationName") - assert str(_filter) == expected - - -def test_destination_name_eq_str_gives_correct_json_representation(): - _filter = DestinationName.eq("Dropbox") - expected = IS.format("destinationName", "Dropbox") - assert str(_filter) == expected - - -def test_destination_name_not_eq_str_gives_correct_json_representation(): - _filter = DestinationName.not_eq("Dropbox") - expected = IS_NOT.format("destinationName", "Dropbox") - assert str(_filter) == expected - - -def test_destination_name_is_in_str_gives_correct_json_representation(): - items = [ - "Dropbox", - "Reddit", - "Windows 10", - ] - _filter = DestinationName.is_in(items) - expected = IS_IN.format("destinationName", *items) - assert str(_filter) == expected - - -def test_destination_name_not_in_str_gives_correct_json_representation(): - items = [ - "Dropbox", - "Reddit", - "Windows 10", - ] - _filter = DestinationName.not_in(items) - expected = NOT_IN.format("destinationName", *items) - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_file_event_filter.py b/tests/sdk/queries/fileevents/filters/test_file_event_filter.py deleted file mode 100644 index 3549aeb64..000000000 --- a/tests/sdk/queries/fileevents/filters/test_file_event_filter.py +++ /dev/null @@ -1,21 +0,0 @@ -from py42.sdk.queries.fileevents.util import create_exists_filter_group -from py42.sdk.queries.fileevents.util import create_not_exists_filter_group - - -def test_create_exists_filter_returns_filter_group_with_correct_json_representation(): - term = "test_eq_term" - _group = create_exists_filter_group(term) - assert ( - str(_group) == '{"filterClause":"AND", "filters":[{"operator":"EXISTS", ' - '"term":"test_eq_term", "value":null}]}' - ) - - -def test_create_not_exists_filter_returns_filter_group_with_correct_json_representation(): - term = "test_is_in_term" - _group = create_not_exists_filter_group(term) - assert ( - str(_group) - == '{"filterClause":"AND", "filters":[{"operator":"DOES_NOT_EXIST", ' - '"term":"test_is_in_term", "value":null}]}' - ) diff --git a/tests/sdk/queries/fileevents/filters/test_file_filter.py b/tests/sdk/queries/fileevents/filters/test_file_filter.py deleted file mode 100644 index 9a3532967..000000000 --- a/tests/sdk/queries/fileevents/filters/test_file_filter.py +++ /dev/null @@ -1,287 +0,0 @@ -from tests.sdk.queries.conftest import EXISTS -from tests.sdk.queries.conftest import GREATER_THAN -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import LESS_THAN -from tests.sdk.queries.conftest import NOT_EXISTS -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.file_filter import FileCategory -from py42.sdk.queries.fileevents.filters.file_filter import FileName -from py42.sdk.queries.fileevents.filters.file_filter import FileOwner -from py42.sdk.queries.fileevents.filters.file_filter import FilePath -from py42.sdk.queries.fileevents.filters.file_filter import FileSize -from py42.sdk.queries.fileevents.filters.file_filter import MD5 -from py42.sdk.queries.fileevents.filters.file_filter import SHA256 - - -def test_file_category_eq_str_gives_correct_json_representation(): - _filter = FileCategory.eq(FileCategory.AUDIO) - expected = IS.format("fileCategory", "Audio") - assert str(_filter) == expected - - -def test_file_category_not_eq_str_gives_correct_json_representation(): - _filter = FileCategory.not_eq(FileCategory.DOCUMENT) - expected = IS_NOT.format("fileCategory", "Document") - assert str(_filter) == expected - - -def test_file_category_is_in_str_gives_correct_json_representation(): - items = [FileCategory.EXECUTABLE, FileCategory.IMAGE, FileCategory.PDF] - _filter = FileCategory.is_in(items) - expected = IS_IN.format("fileCategory", *items) - assert str(_filter) == expected - - -def test_file_category_not_in_str_gives_correct_json_representation(): - items = [FileCategory.EXECUTABLE, FileCategory.IMAGE, FileCategory.PDF] - _filter = FileCategory.not_in(items) - expected = NOT_IN.format("fileCategory", *items) - assert str(_filter) == expected - - -def test_file_name_exists_str_gives_correct_json_representation(): - _filter = FileName.exists() - expected = EXISTS.format("fileName") - assert str(_filter) == expected - - -def test_file_name_not_exists_str_gives_correct_json_representation(): - _filter = FileName.not_exists() - expected = NOT_EXISTS.format("fileName") - assert str(_filter) == expected - - -def test_file_name_eq_str_gives_correct_json_representation(): - _filter = FileName.eq("test_fileName") - expected = IS.format("fileName", "test_fileName") - assert str(_filter) == expected - - -def test_file_name_not_eq_str_gives_correct_json_representation(): - _filter = FileName.not_eq("test_fileName") - expected = IS_NOT.format("fileName", "test_fileName") - assert str(_filter) == expected - - -def test_file_name_is_in_str_gives_correct_json_representation(): - items = ["fileName1", "fileName2", "fileName3"] - _filter = FileName.is_in(items) - expected = IS_IN.format("fileName", *sorted(items)) - assert str(_filter) == expected - - -def test_file_name_not_in_str_gives_correct_json_representation(): - items = ["fileName1", "fileName2", "fileName3"] - _filter = FileName.not_in(items) - expected = NOT_IN.format("fileName", *sorted(items)) - assert str(_filter) == expected - - -def test_file_owner_exists_str_gives_correct_json_representation(): - _filter = FileOwner.exists() - expected = EXISTS.format("fileOwner") - assert str(_filter) == expected - - -def test_file_owner_not_exists_str_gives_correct_json_representation(): - _filter = FileOwner.not_exists() - expected = NOT_EXISTS.format("fileOwner") - assert str(_filter) == expected - - -def test_file_owner_eq_str_gives_correct_json_representation(): - _filter = FileOwner.eq("test_fileName") - expected = IS.format("fileOwner", "test_fileName") - assert str(_filter) == expected - - -def test_file_owner_not_eq_str_gives_correct_json_representation(): - _filter = FileOwner.not_eq("test_fileName") - expected = IS_NOT.format("fileOwner", "test_fileName") - assert str(_filter) == expected - - -def test_file_owner_is_in_str_gives_correct_json_representation(): - items = ["fileOwner1", "fileOwner2", "fileOwner3"] - _filter = FileOwner.is_in(items) - expected = IS_IN.format("fileOwner", *sorted(items)) - assert str(_filter) == expected - - -def test_file_owner_not_in_str_gives_correct_json_representation(): - items = ["fileOwner1", "fileOwner2", "fileOwner3"] - _filter = FileOwner.not_in(items) - expected = NOT_IN.format("fileOwner", *sorted(items)) - assert str(_filter) == expected - - -def test_file_path_exists_str_gives_correct_json_representation(): - _filter = FilePath.exists() - expected = EXISTS.format("filePath") - assert str(_filter) == expected - - -def test_file_path_not_exists_str_gives_correct_json_representation(): - _filter = FilePath.not_exists() - expected = NOT_EXISTS.format("filePath") - assert str(_filter) == expected - - -def test_file_path_eq_str_gives_correct_json_representation(): - _filter = FilePath.eq("test_filePath") - expected = IS.format("filePath", "test_filePath") - assert str(_filter) == expected - - -def test_file_path_not_eq_str_gives_correct_json_representation(): - _filter = FilePath.not_eq("test_filePath") - expected = IS_NOT.format("filePath", "test_filePath") - assert str(_filter) == expected - - -def test_file_path_is_in_str_gives_correct_json_representation(): - items = ["filePath1", "filePath2", "filePath3"] - _filter = FilePath.is_in(items) - expected = IS_IN.format("filePath", *sorted(items)) - assert str(_filter) == expected - - -def test_file_path_not_in_str_gives_correct_json_representation(): - items = ["filePath1", "filePath2", "filePath3"] - _filter = FilePath.not_in(items) - expected = NOT_IN.format("filePath", *sorted(items)) - assert str(_filter) == expected - - -def test_file_size_greater_than_str_gives_correct_json_representation(): - _filter = FileSize.greater_than("2048") - expected = GREATER_THAN.format("fileSize", "2048") - assert str(_filter) == expected - - -def test_file_size_greater_than_int_gives_correct_json_representation(): - _filter = FileSize.greater_than(2048) - expected = GREATER_THAN.format("fileSize", "2048") - assert str(_filter) == expected - - -def test_file_size_greater_than_float_gives_correct_json_representation(): - _filter = FileSize.greater_than(2048.10) - expected = GREATER_THAN.format("fileSize", "2048") - assert str(_filter) == expected - - -def test_file_size_less_than_str_gives_correct_json_representation(): - _filter = FileSize.less_than("2048") - expected = LESS_THAN.format("fileSize", "2048") - assert str(_filter) == expected - - -def test_file_size_less_than_int_gives_correct_json_representation(): - _filter = FileSize.less_than(2048) - expected = LESS_THAN.format("fileSize", "2048") - assert str(_filter) == expected - - -def test_file_size_less_than_float_gives_correct_json_representation(): - _filter = FileSize.less_than(2048.10) - expected = LESS_THAN.format("fileSize", "2048") - assert str(_filter) == expected - - -def test_md5_exists_str_gives_correct_json_representation(): - _filter = MD5.exists() - expected = EXISTS.format("md5Checksum") - assert str(_filter) == expected - - -def test_md5_not_exists_str_gives_correct_json_representation(): - _filter = MD5.not_exists() - expected = NOT_EXISTS.format("md5Checksum") - assert str(_filter) == expected - - -def test_md5_eq_str_gives_correct_json_representation(): - _filter = MD5.eq("test_md5") - expected = IS.format("md5Checksum", "test_md5") - assert str(_filter) == expected - - -def test_md5_not_eq_str_gives_correct_json_representation(): - _filter = MD5.not_eq("test_md5") - expected = IS_NOT.format("md5Checksum", "test_md5") - assert str(_filter) == expected - - -def test_md5_is_in_str_gives_correct_json_representation(): - items = ["md51", "md52", "md53"] - _filter = MD5.is_in(items) - expected = IS_IN.format("md5Checksum", *sorted(items)) - assert str(_filter) == expected - - -def test_md5_not_in_str_gives_correct_json_representation(): - items = ["md51", "md52", "md53"] - _filter = MD5.not_in(items) - expected = NOT_IN.format("md5Checksum", *sorted(items)) - assert str(_filter) == expected - - -def test_sha256_exists_str_gives_correct_json_representation(): - _filter = SHA256.exists() - expected = EXISTS.format("sha256Checksum") - assert str(_filter) == expected - - -def test_sha256_not_exists_str_gives_correct_json_representation(): - _filter = SHA256.not_exists() - expected = NOT_EXISTS.format("sha256Checksum") - assert str(_filter) == expected - - -def test_sha256_eq_str_gives_correct_json_representation(): - _filter = SHA256.eq("test_sha256") - expected = IS.format("sha256Checksum", "test_sha256") - assert str(_filter) == expected - - -def test_sha256_not_eq_str_gives_correct_json_representation(): - _filter = SHA256.not_eq("test_sha256") - expected = IS_NOT.format("sha256Checksum", "test_sha256") - assert str(_filter) == expected - - -def test_sha256_is_in_str_gives_correct_json_representation(): - items = ["sha2561", "sha2562", "sha2563"] - _filter = SHA256.is_in(items) - expected = IS_IN.format("sha256Checksum", *sorted(items)) - assert str(_filter) == expected - - -def test_sha256_not_in_str_gives_correct_json_representation(): - items = ["sha2561", "sha2562", "sha2563"] - _filter = SHA256.not_in(items) - expected = NOT_IN.format("sha256Checksum", *sorted(items)) - assert str(_filter) == expected - - -def test_file_category_choices_returns_valid_attributes(): - choices = FileCategory.choices() - valid_set = { - "Audio", - "Document", - "Executable", - "Image", - "Pdf", - "Presentation", - "Script", - "SourceCode", - "Spreadsheet", - "Video", - "VirtualDiskImage", - "Archive", - } - assert set(choices) == valid_set diff --git a/tests/sdk/queries/fileevents/filters/test_print_filter.py b/tests/sdk/queries/fileevents/filters/test_print_filter.py deleted file mode 100644 index 14587982d..000000000 --- a/tests/sdk/queries/fileevents/filters/test_print_filter.py +++ /dev/null @@ -1,58 +0,0 @@ -import pytest -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.print_filter import Printer -from py42.sdk.queries.fileevents.filters.print_filter import PrintJobName - - -@pytest.mark.parametrize( - "filter_criteria, test_filter", - [(Printer.eq, IS), (Printer.not_eq, IS_NOT)], -) -def test_equality_printer_name_filter_gives_correct_json_representation( - filter_criteria, test_filter -): - _filter = filter_criteria("printer") - expected = test_filter.format("printerName", "printer") - assert str(_filter) == expected - - -@pytest.mark.parametrize( - "filter_criteria, test_filter", - [(Printer.is_in, IS_IN), (Printer.not_in, NOT_IN)], -) -def test_multi_value_printer_name_gives_correct_json_representation( - filter_criteria, test_filter -): - usernames = ["printer1", "printer2", "printer3"] - _filter = filter_criteria(usernames) - expected = test_filter.format("printerName", *usernames) - assert str(_filter) == expected - - -@pytest.mark.parametrize( - "filter_criteria, test_filter", - [(PrintJobName.eq, IS), (PrintJobName.not_eq, IS_NOT)], -) -def test_equality_printer_job_name_gives_correct_json_representation( - filter_criteria, test_filter -): - _filter = filter_criteria("job") - expected = test_filter.format("printJobName", "job") - assert str(_filter) == expected - - -@pytest.mark.parametrize( - "filter_criteria, test_filter", - [(PrintJobName.is_in, IS_IN), (PrintJobName.not_in, NOT_IN)], -) -def test_multi_valueprinter_job_name_gives_correct_json_representation( - filter_criteria, test_filter -): - usernames = ["job1", "job2", "job3"] - _filter = filter_criteria(usernames) - expected = test_filter.format("printJobName", *usernames) - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_risk_filter.py b/tests/sdk/queries/fileevents/filters/test_risk_filter.py deleted file mode 100644 index 80c7fc9a8..000000000 --- a/tests/sdk/queries/fileevents/filters/test_risk_filter.py +++ /dev/null @@ -1,108 +0,0 @@ -from tests.sdk.queries.conftest import GREATER_THAN -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import LESS_THAN -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.risk_filter import RiskIndicator -from py42.sdk.queries.fileevents.filters.risk_filter import RiskScore -from py42.sdk.queries.fileevents.filters.risk_filter import RiskSeverity - - -def test_risk_indicator_eq_str_gives_correct_json_representation(): - _filter = RiskIndicator.eq(RiskIndicator.CloudDataExposures.PUBLIC_CORPORATE_BOX) - expected = IS.format("riskIndicatorNames", "Public link from corporate Box") - assert str(_filter) == expected - - -def test_risk_indicator_not_eq_str_gives_correct_json_representation(): - _filter = RiskIndicator.not_eq(RiskIndicator.CloudStorageUploads.AMAZON_DRIVE) - expected = IS_NOT.format("riskIndicatorNames", "Amazon Drive upload") - assert str(_filter) == expected - - -def test_risk_indicator_is_in_str_gives_correct_json_representation(): - items = [ - RiskIndicator.FileCategories.EXECUTABLE, - RiskIndicator.FileCategories.IMAGE, - RiskIndicator.FileCategories.PDF, - ] - _filter = RiskIndicator.is_in(items) - expected = IS_IN.format("riskIndicatorNames", *items) - assert str(_filter) == expected - - -def test_risk_indicator_not_in_str_gives_correct_json_representation(): - items = [ - RiskIndicator.FileCategories.EXECUTABLE, - RiskIndicator.FileCategories.IMAGE, - RiskIndicator.FileCategories.PDF, - ] - _filter = RiskIndicator.not_in(items) - expected = NOT_IN.format("riskIndicatorNames", *items) - assert str(_filter) == expected - - -def test_risk_severity_eq_str_gives_correct_json_representation(): - _filter = RiskSeverity.eq(RiskSeverity.HIGH) - expected = IS.format("riskSeverity", "HIGH") - assert str(_filter) == expected - - -def test_risk_severity_not_eq_str_gives_correct_json_representation(): - _filter = RiskSeverity.not_eq(RiskSeverity.CRITICAL) - expected = IS_NOT.format("riskSeverity", "CRITICAL") - assert str(_filter) == expected - - -def test_risk_severity_is_in_str_gives_correct_json_representation(): - items = [RiskSeverity.HIGH, RiskSeverity.LOW, RiskSeverity.MODERATE] - _filter = RiskSeverity.is_in(items) - expected = IS_IN.format("riskSeverity", *items) - assert str(_filter) == expected - - -def test_risk_severity_not_in_str_gives_correct_json_representation(): - items = [RiskSeverity.HIGH, RiskSeverity.LOW, RiskSeverity.MODERATE] - _filter = RiskSeverity.not_in(items) - expected = NOT_IN.format("riskSeverity", *items) - assert str(_filter) == expected - - -def test_risk_score_eq_str_gives_correct_json_representation(): - _filter = RiskScore.eq(5) - expected = IS.format("riskScore", "5") - assert str(_filter) == expected - - -def test_risk_score_not_eq_str_gives_correct_json_representation(): - _filter = RiskScore.not_eq(5) - expected = IS_NOT.format("riskScore", "5") - assert str(_filter) == expected - - -def test_risk_score_is_in_str_gives_correct_json_representation(): - items = [3, 4, 5] - _filter = RiskScore.is_in(items) - expected = IS_IN.format("riskScore", *items) - assert str(_filter) == expected - - -def test_risk_score_not_in_str_gives_correct_json_representation(): - items = [3, 4, 5] - _filter = RiskScore.not_in(items) - expected = NOT_IN.format("riskScore", *items) - assert str(_filter) == expected - - -def test_risk_score_greater_than_str_gives_correct_json_representation(): - _filter = RiskScore.greater_than(5) - expected = GREATER_THAN.format("riskScore", "5") - assert str(_filter) == expected - - -def test_risk_score_less_than_str_gives_correct_json_representation(): - _filter = RiskScore.less_than(5) - expected = LESS_THAN.format("riskScore", "5") - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/filters/test_source_filter.py b/tests/sdk/queries/fileevents/filters/test_source_filter.py deleted file mode 100644 index 9942d6bc4..000000000 --- a/tests/sdk/queries/fileevents/filters/test_source_filter.py +++ /dev/null @@ -1,171 +0,0 @@ -from tests.sdk.queries.conftest import EXISTS -from tests.sdk.queries.conftest import IS -from tests.sdk.queries.conftest import IS_IN -from tests.sdk.queries.conftest import IS_NOT -from tests.sdk.queries.conftest import NOT_EXISTS -from tests.sdk.queries.conftest import NOT_IN - -from py42.sdk.queries.fileevents.filters.source_filter import SourceCategory -from py42.sdk.queries.fileevents.filters.source_filter import SourceName -from py42.sdk.queries.fileevents.filters.source_filter import SourceTabTitles -from py42.sdk.queries.fileevents.filters.source_filter import SourceTabUrls - - -def test_source_category_exists_str_gives_correct_json_representation(): - _filter = SourceCategory.exists() - expected = EXISTS.format("sourceCategory") - assert str(_filter) == expected - - -def test_source_category_not_exists_str_gives_correct_json_representation(): - _filter = SourceCategory.not_exists() - expected = NOT_EXISTS.format("sourceCategory") - assert str(_filter) == expected - - -def test_source_category_eq_str_gives_correct_json_representation(): - _filter = SourceCategory.eq("test_sourceCategory") - expected = IS.format("sourceCategory", "test_sourceCategory") - assert str(_filter) == expected - - -def test_source_category_not_eq_str_gives_correct_json_representation(): - _filter = SourceCategory.not_eq("test_sourceCategory") - expected = IS_NOT.format("sourceCategory", "test_sourceCategory") - assert str(_filter) == expected - - -def test_source_category_is_in_str_gives_correct_json_representation(): - items = ["test_sourceCategory_1", "test_sourceCategory_2", "test_sourceCategory_3"] - _filter = SourceCategory.is_in(items) - expected = IS_IN.format("sourceCategory", *sorted(items)) - assert str(_filter) == expected - - -def test_source_category_not_in_str_gives_correct_json_representation(): - items = ["test_sourceCategory_1", "test_sourceCategory_2", "test_sourceCategory_3"] - _filter = SourceCategory.not_in(items) - expected = NOT_IN.format("sourceCategory", *sorted(items)) - assert str(_filter) == expected - - -def test_source_name_exists_str_gives_correct_json_representation(): - _filter = SourceName.exists() - expected = EXISTS.format("sourceName") - assert str(_filter) == expected - - -def test_source_name_not_exists_str_gives_correct_json_representation(): - _filter = SourceName.not_exists() - expected = NOT_EXISTS.format("sourceName") - assert str(_filter) == expected - - -def test_source_name_eq_str_gives_correct_json_representation(): - _filter = SourceName.eq("test_sourceName") - expected = IS.format("sourceName", "test_sourceName") - assert str(_filter) == expected - - -def test_source_name_not_eq_str_gives_correct_json_representation(): - _filter = SourceName.not_eq("test_sourceName") - expected = IS_NOT.format("sourceName", "test_sourceName") - assert str(_filter) == expected - - -def test_source_name_is_in_str_gives_correct_json_representation(): - items = ["test_sourceName_1", "test_sourceName_2", "test_sourceName_3"] - _filter = SourceName.is_in(items) - expected = IS_IN.format("sourceName", *sorted(items)) - assert str(_filter) == expected - - -def test_source_name_not_in_str_gives_correct_json_representation(): - items = ["test_sourceName_1", "test_sourceName_2", "test_sourceName_3"] - _filter = SourceName.not_in(items) - expected = NOT_IN.format("sourceName", *sorted(items)) - assert str(_filter) == expected - - -def test_source_tab_titles_exists_str_gives_correct_json_representation(): - _filter = SourceTabTitles.exists() - expected = EXISTS.format("sourceTabTitles") - assert str(_filter) == expected - - -def test_source_tab_titles_not_exists_str_gives_correct_json_representation(): - _filter = SourceTabTitles.not_exists() - expected = NOT_EXISTS.format("sourceTabTitles") - assert str(_filter) == expected - - -def test_source_tab_titles_eq_str_gives_correct_json_representation(): - _filter = SourceTabTitles.eq("test_sourceTabTitles") - expected = IS.format("sourceTabTitles", "test_sourceTabTitles") - assert str(_filter) == expected - - -def test_source_tab_titles_not_eq_str_gives_correct_json_representation(): - _filter = SourceTabTitles.not_eq("test_sourceTabTitles") - expected = IS_NOT.format("sourceTabTitles", "test_sourceTabTitles") - assert str(_filter) == expected - - -def test_source_tab_titles_is_in_str_gives_correct_json_representation(): - items = [ - "test_sourceTabTitles_1", - "test_sourceTabTitles_2", - "test_sourceTabTitles_3", - ] - _filter = SourceTabTitles.is_in(items) - expected = IS_IN.format("sourceTabTitles", *sorted(items)) - assert str(_filter) == expected - - -def test_source_tab_titles_not_in_str_gives_correct_json_representation(): - items = [ - "test_sourceTabTitles_1", - "test_sourceTabTitles_2", - "test_sourceTabTitles_3", - ] - _filter = SourceTabTitles.not_in(items) - expected = NOT_IN.format("sourceTabTitles", *sorted(items)) - assert str(_filter) == expected - - -def test_source_tab_urls_exists_str_gives_correct_json_representation(): - _filter = SourceTabUrls.exists() - expected = EXISTS.format("sourceTabUrls") - assert str(_filter) == expected - - -def test_source_tab_urls_not_exists_str_gives_correct_json_representation(): - _filter = SourceTabUrls.not_exists() - expected = NOT_EXISTS.format("sourceTabUrls") - assert str(_filter) == expected - - -def test_source_tab_urls_eq_str_gives_correct_json_representation(): - _filter = SourceTabUrls.eq("test_sourceTabUrls") - expected = IS.format("sourceTabUrls", "test_sourceTabUrls") - assert str(_filter) == expected - - -def test_source_tab_urls_not_eq_str_gives_correct_json_representation(): - _filter = SourceTabUrls.not_eq("test_sourceTabUrls") - expected = IS_NOT.format("sourceTabUrls", "test_sourceTabUrls") - assert str(_filter) == expected - - -def test_source_tab_urls_is_in_str_gives_correct_json_representation(): - items = ["test_sourceTabUrls_1", "test_sourceTabUrls_2", "test_sourceTabUrls_3"] - _filter = SourceTabUrls.is_in(items) - expected = IS_IN.format("sourceTabUrls", *sorted(items)) - assert str(_filter) == expected - - -def test_source_tab_urls_not_in_str_gives_correct_json_representation(): - items = ["test_sourceTabUrls_1", "test_sourceTabUrls_2", "test_sourceTabUrls_3"] - _filter = SourceTabUrls.not_in(items) - expected = NOT_IN.format("sourceTabUrls", *sorted(items)) - assert str(_filter) == expected diff --git a/tests/sdk/queries/fileevents/test_file_event_query.py b/tests/sdk/queries/fileevents/test_file_event_query.py deleted file mode 100644 index 0d6dfd1f5..000000000 --- a/tests/sdk/queries/fileevents/test_file_event_query.py +++ /dev/null @@ -1,172 +0,0 @@ -from py42.sdk.queries.fileevents.file_event_query import FileEventQuery -from py42.sdk.queries.fileevents.v2.file_event_query import ( - FileEventQuery as FileEventQueryV2, -) - -JSON_QUERY_BASE = '{{"groupClause":"{0}", "groups":[{1}], "srtDir":"{4}", "srtKey":"{5}", "pgNum":{2}, "pgSize":{3}}}' - - -def build_query_json(group_clause, group_list): - return JSON_QUERY_BASE.format(group_clause, group_list, 1, 500, "asc", "eventId") - - -def test_file_event_query_repr_does_not_throw_type_error(): - # On python 2, `repr` doesn't throw. - # On python 3, if `repr` doesn't return type `str`, then an exception is thrown. - try: - _ = repr(FileEventQuery()) - except TypeError: - raise AssertionError() - - -def test_file_event_query_constructs_successfully(event_filter_group): - assert FileEventQuery(event_filter_group) - - -def test_file_event_query_str_with_single_filter_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group) - json_query_str = build_query_json("AND", event_filter_group) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_unicode_with_single_filter_gives_correct_json_representation( - unicode_event_filter_group, -): - file_event_query = FileEventQuery(unicode_event_filter_group) - json_query_str = build_query_json("AND", unicode_event_filter_group) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_single_filter_and_specified_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group, group_clause="AND") - json_query_str = build_query_json("AND", event_filter_group) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_single_filter_or_specified_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group, group_clause="OR") - json_query_str = build_query_json("OR", event_filter_group) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_many_filters_gives_correct_json_representation( - event_filter_group_list, -): - file_event_query = FileEventQuery(event_filter_group_list) - json_query_str = build_query_json("AND", event_filter_group_list) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_many_filters_and_specified_gives_correct_json_representation( - event_filter_group_list, -): - file_event_query = FileEventQuery(event_filter_group_list, group_clause="AND") - json_query_str = build_query_json("AND", event_filter_group_list) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_many_filters_or_specified_gives_correct_json_representation( - event_filter_group_list, -): - file_event_query = FileEventQuery(event_filter_group_list, group_clause="OR") - json_query_str = build_query_json("OR", event_filter_group_list) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_page_num_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group) - file_event_query.page_number = 5 - json_query_str = JSON_QUERY_BASE.format( - "AND", event_filter_group, 5, 500, "asc", "eventId" - ) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_page_size_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group) - file_event_query.page_size = 500 - json_query_str = JSON_QUERY_BASE.format( - "AND", event_filter_group, 1, 500, "asc", "eventId" - ) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_sort_direction_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group) - file_event_query.sort_direction = "desc" - json_query_str = JSON_QUERY_BASE.format( - "AND", event_filter_group, 1, 500, "desc", "eventId" - ) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_str_with_sort_key_gives_correct_json_representation( - event_filter_group, -): - file_event_query = FileEventQuery(event_filter_group) - file_event_query.sort_key = "some_field_to_sort_by" - json_query_str = JSON_QUERY_BASE.format( - "AND", event_filter_group, 1, 500, "asc", "some_field_to_sort_by" - ) - assert str(file_event_query) == json_query_str - - -def test_file_event_query_from_dict_gives_correct_json_representation(): - group = { - "filterClause": "AND", - "filters": [{"operator": "IS", "term": "testterm", "value": "testval"}], - } - group_str = '{"filterClause":"AND", "filters":[{"operator":"IS", "term":"testterm", "value":"testval"}]}' - file_event_query_dict = {"groupClause": "AND", "groups": [group]} - file_event_query = FileEventQuery.from_dict(file_event_query_dict) - json_query_str = JSON_QUERY_BASE.format("AND", group_str, 1, 500, "asc", "eventId") - assert str(file_event_query) == json_query_str - - -def test_file_event_query_dict_gives_expected_dict_representation(event_filter_group): - file_event_query = FileEventQuery(event_filter_group) - file_event_query_dict = dict(file_event_query) - assert file_event_query_dict["groupClause"] == "AND" - assert file_event_query_dict["pgNum"] == 1 - assert file_event_query_dict["pgSize"] == 500 - assert file_event_query_dict["srtDir"] == "asc" - assert file_event_query_dict["srtKey"] == "eventId" - assert type(file_event_query_dict["groups"]) == list - - -def test_file_event_str_gives_correct_json_representation_when_pg_token_is_set( - event_filter_group, -): - query = FileEventQuery() - assert query.page_token is None - assert ( - str(query) - == '{"groupClause":"AND", "groups":[], "srtDir":"asc", "srtKey":"eventId", "pgNum":1, "pgSize":500}' - ) - query.page_token = "abc" - assert ( - str(query) - == '{"groupClause":"AND", "groups":[], "srtDir":"asc", "srtKey":"eventId", "pgToken":"abc", "pgSize":500}' - ) - - -def test_file_event_str_gives_correct_json_representation_when_using_v2_data( - event_filter_group, -): - query = FileEventQueryV2() - assert ( - str(query) - == '{"groupClause":"AND", "groups":[], "srtDir":"asc", "srtKey":"event.id", "pgNum":1, "pgSize":500}' - ) diff --git a/tests/sdk/queries/test_query_filter.py b/tests/sdk/queries/test_query_filter.py deleted file mode 100644 index 348fa7b6b..000000000 --- a/tests/sdk/queries/test_query_filter.py +++ /dev/null @@ -1,563 +0,0 @@ -from datetime import datetime - -import pytest - -from py42.sdk.queries.query_filter import create_eq_filter_group -from py42.sdk.queries.query_filter import create_filter_group -from py42.sdk.queries.query_filter import create_in_range_filter_group -from py42.sdk.queries.query_filter import create_is_in_filter_group -from py42.sdk.queries.query_filter import create_not_eq_filter_group -from py42.sdk.queries.query_filter import create_not_in_filter_group -from py42.sdk.queries.query_filter import create_on_or_after_filter_group -from py42.sdk.queries.query_filter import create_on_or_before_filter_group -from py42.sdk.queries.query_filter import create_query_filter -from py42.sdk.queries.query_filter import FilterGroup -from py42.sdk.queries.query_filter import QueryFilter -from py42.sdk.queries.query_filter import QueryFilterTimestampField - - -EVENT_FILTER_FIELD_NAME = "filter_field_name" -OPERATOR_STRING = "IS_IN" -VALUE_STRING = "value_example" -VALUE_UNICODE = "您已经发现了秘密信息" -TEST_TIMESTAMP = "2020-09-10 11:12:13" - -JSON_QUERY_FILTER = f'{{"operator":"{OPERATOR_STRING}", "term":"{EVENT_FILTER_FIELD_NAME}", "value":"{VALUE_STRING}"}}' - -JSON_FILTER_GROUP_BASE = '{{"filterClause":"{0}", "filters":[{1}]}}' -JSON_FILTER_GROUP_AND = JSON_FILTER_GROUP_BASE.format("AND", JSON_QUERY_FILTER) -JSON_FILTER_GROUP_OR = JSON_FILTER_GROUP_BASE.format("OR", JSON_QUERY_FILTER) - - -def json_query_filter_with_suffix(suffix): - return f'{{"operator":"{OPERATOR_STRING}{suffix}", "term":"{EVENT_FILTER_FIELD_NAME}{suffix}", "value":"{VALUE_STRING}{suffix}"}}' - - -def test_query_filter_constructs_successfully(): - assert QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - - -def test_query_filter_str_outputs_correct_json_representation(query_filter): - assert str(query_filter) == JSON_QUERY_FILTER - - -def test_query_filter_unicode_outputs_correct_json_representation(unicode_query_filter): - expected = f'{{"operator":"{OPERATOR_STRING}", "term":"{EVENT_FILTER_FIELD_NAME}", "value":"{VALUE_UNICODE}"}}' - assert str(unicode_query_filter) == expected - - -def test_query_filter_from_dict_gives_correct_json_representation(): - filter_dict = {"operator": "IS", "term": "testterm", "value": "testval"} - filter_json = '{"operator":"IS", "term":"testterm", "value":"testval"}' - alert_query = QueryFilter.from_dict(filter_dict) - assert str(alert_query) == filter_json - - -def test_query_filter_dict_gives_expected_dict_representation(event_filter_group): - query_filter = QueryFilter("testterm", "IS", value="testval") - alert_query_query_dict = dict(query_filter) - assert alert_query_query_dict["term"] == "testterm" - assert alert_query_query_dict["operator"] == "IS" - assert alert_query_query_dict["value"] == "testval" - - -def test_query_filter_term_returns_expected_value(): - query_filter = QueryFilter("testterm", "IS", value="testval") - assert query_filter.term == "testterm" - - -def test_query_filter_operator_returns_expected_value(): - query_filter = QueryFilter("testterm", "IS", value="testval") - assert query_filter.operator == "IS" - - -def test_query_filter_value_returns_expected_value(): - query_filter = QueryFilter("testterm", "IS", value="testval") - assert query_filter.value == "testval" - - -def test_filter_group_constructs_successfully(query_filter): - assert create_filter_group(query_filter, "AND") - - -def test_filter_group_str_gives_correct_json_representation(query_filter): - assert str(create_filter_group([query_filter], "AND")) == JSON_FILTER_GROUP_AND - - -def test_filter_group_with_and_specified_str_gives_correct_json_representation( - query_filter, -): - assert str(create_filter_group([query_filter], "AND")) == JSON_FILTER_GROUP_AND - - -def test_filter_group_with_or_specified_str_gives_correct_json_representation( - query_filter, -): - assert str(create_filter_group([query_filter], "OR")) == JSON_FILTER_GROUP_OR - - -def test_filter_group_from_dict_gives_correct_json_representation(query_filter): - filter_group_dict = { - "filterClause": "AND", - "filters": [{"operator": "IS", "term": "testterm", "value": "testval"}], - } - expected = '{"filterClause":"AND", "filters":[{"operator":"IS", "term":"testterm", "value":"testval"}]}' - filter_group = FilterGroup.from_dict(filter_group_dict) - assert str(filter_group) == expected - - -def test_filter_group_dict_gives_expected_dict_representation(query_filter): - filter_group = create_filter_group([query_filter], "AND") - filter_group_dict = dict(filter_group) - assert filter_group_dict["filterClause"] == "AND" - assert type(filter_group_dict["filters"]) == list - - -def test_filter_group_filter_list_returns_expected_value(query_filter): - filter_list = [query_filter] - filter_group = create_filter_group(filter_list, "AND") - assert filter_group.filter_list == filter_list - - -def test_filter_group_filter_clause_returns_excepted_value(query_filter): - filter_group = create_filter_group([query_filter], "AND") - assert filter_group.filter_clause == "AND" - - -def test_filter_group_with_multiple_filters_str_gives_correct_json_representation( - query_filter_list, -): - filters_string = ",".join( - [json_query_filter_with_suffix(suffix) for suffix in range(3)] - ) - json_multi_filter_group = JSON_FILTER_GROUP_BASE.format("AND", filters_string) - assert str(create_filter_group(query_filter_list, "AND")) == json_multi_filter_group - - -def test_filter_group_with_multiple_filters_and_specified_str_gives_correct_json_representation( - query_filter_list, -): - filters_string = ",".join( - [json_query_filter_with_suffix(suffix) for suffix in range(3)] - ) - json_multi_filter_group = JSON_FILTER_GROUP_BASE.format("AND", filters_string) - assert str(create_filter_group(query_filter_list, "AND")) == json_multi_filter_group - - -def test_filter_group_with_duplicate_filters_and_specified_str_gives_correct_json_representation( - query_filter, -): - expected = JSON_FILTER_GROUP_BASE.format("AND", JSON_QUERY_FILTER) - assert ( - str(create_filter_group([query_filter, query_filter, query_filter], "AND")) - == expected - ) - - -def test_filter_group_with_multiple_filters_or_specified_str_gives_correct_json_representation( - query_filter_list, -): - filters_string = ",".join( - [json_query_filter_with_suffix(suffix) for suffix in range(3)] - ) - json_multi_filter_group = JSON_FILTER_GROUP_BASE.format("OR", filters_string) - assert str(create_filter_group(query_filter_list, "OR")) == json_multi_filter_group - - -def test_filter_group_with_duplicate_filters_or_specified_str_gives_correct_json_representation( - query_filter, -): - expected = JSON_FILTER_GROUP_BASE.format("OR", JSON_QUERY_FILTER) - assert ( - str(create_filter_group([query_filter, query_filter, query_filter], "OR")) - == expected - ) - - -def test_create_eq_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_eq_filter_group("eqterm", "eqvalue") - assert ( - str(filter_group) == '{"filterClause":"AND",' - ' "filters":[{"operator":"IS", "term":"eqterm", "value":"eqvalue"}]}' - ) - - -def test_create_is_in_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_is_in_filter_group("isinterm", ["isinvalue1", "isinvalue2"]) - assert ( - str(filter_group) == '{"filterClause":"OR",' - ' "filters":[{"operator":"IS", "term":"isinterm", "value":"isinvalue1"},' - '{"operator":"IS", "term":"isinterm", "value":"isinvalue2"}]}' - ) - - -def test_create_not_eq_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_not_eq_filter_group("noteqterm", "noteqvalue") - assert ( - str(filter_group) == '{"filterClause":"AND",' - ' "filters":[{"operator":"IS_NOT", "term":"noteqterm", "value":"noteqvalue"}]}' - ) - - -def test_create_not_in_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_not_in_filter_group("isinterm", ["isinvalue1", "isinvalue2"]) - assert ( - str(filter_group) == '{"filterClause":"AND",' - ' "filters":[{"operator":"IS_NOT", "term":"isinterm", "value":"isinvalue1"},' - '{"operator":"IS_NOT", "term":"isinterm", "value":"isinvalue2"}]}' - ) - - -def test_create_on_or_after_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_on_or_after_filter_group("onorafterterm", "onoraftervalue") - assert ( - str(filter_group) == '{"filterClause":"AND",' - ' "filters":[{"operator":"ON_OR_AFTER", "term":"onorafterterm", "value":"onoraftervalue"}]}' - ) - - -def test_create_on_or_before_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_on_or_before_filter_group("onorbeforeterm", "onorbeforevalue") - assert ( - str(filter_group) == '{"filterClause":"AND",' - ' "filters":[{"operator":"ON_OR_BEFORE", "term":"onorbeforeterm", "value":"onorbeforevalue"}]}' - ) - - -def test_create_in_range_filter_group_returns_obj_with_correct_json_representation(): - filter_group = create_in_range_filter_group( - "rangeterm", "beforevalue", "aftervalue" - ) - assert ( - str(filter_group) == '{"filterClause":"AND",' - ' "filters":[{"operator":"ON_OR_AFTER", "term":"rangeterm", "value":"beforevalue"},' - '{"operator":"ON_OR_BEFORE", "term":"rangeterm", "value":"aftervalue"}]}' - ) - - -def test_create_query_filter_returns_obj_with_correct_json_representation(): - query_filter = create_query_filter( - EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING - ) - assert ( - str(query_filter) - == f'{{"operator":"{OPERATOR_STRING}", "term":"{EVENT_FILTER_FIELD_NAME}", "value":"{VALUE_STRING}"}}' - ) - - -def test_compare_query_filters_with_equivalent_args_returns_true(): - query_filter1 = QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - query_filter2 = QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - assert query_filter1 == query_filter2 - - -def test_compare_query_filters_with_different_values_returns_false(): - query_filter1 = QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, "TEST") - query_filter2 = QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, "NOT_TEST") - assert query_filter1 != query_filter2 - - -def test_compare_query_filters_with_different_operators_returns_false(): - query_filter1 = QueryFilter(EVENT_FILTER_FIELD_NAME, "IS", VALUE_STRING) - query_filter2 = QueryFilter(EVENT_FILTER_FIELD_NAME, "IS_NOT", VALUE_STRING) - assert query_filter1 != query_filter2 - - -def test_compare_query_filters_with_different_terms_returns_false(): - query_filter1 = QueryFilter("TEST", OPERATOR_STRING, VALUE_STRING) - query_filter2 = QueryFilter("NOT_TEST", OPERATOR_STRING, VALUE_STRING) - assert query_filter1 != query_filter2 - - -@pytest.mark.parametrize( - "equivalent", - [ - f'{{"operator":"{OPERATOR_STRING}", "term":"{EVENT_FILTER_FIELD_NAME}", "value":"{VALUE_STRING}"}}', - ( - ("operator", OPERATOR_STRING), - ("term", EVENT_FILTER_FIELD_NAME), - ("value", VALUE_STRING), - ), - [ - ("operator", OPERATOR_STRING), - ("term", EVENT_FILTER_FIELD_NAME), - ("value", VALUE_STRING), - ], - ], -) -def test_compare_query_filter_with_expected_equivalent_returns_true(equivalent): - query_filter = QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - assert query_filter == equivalent - - -@pytest.mark.parametrize( - "different", - [ - f'{{"operator":"DIFFERENT_OPERATOR", "term":"{EVENT_FILTER_FIELD_NAME}", "value":"{VALUE_STRING}"}}', - f'{{"operator":"{OPERATOR_STRING}", "term":"DIFFERENT_FIELD_NAME", "value":"{VALUE_STRING}"}}', - f'{{"operator":"{OPERATOR_STRING}", "term":"{EVENT_FILTER_FIELD_NAME}", "value":"DIFFERENT_VALUE"}}', - ( - ("operator", "DIFFERENT_OPERATOR"), - ("term", EVENT_FILTER_FIELD_NAME), - ("value", VALUE_STRING), - ), - ( - ("operator", OPERATOR_STRING), - ("term", "DIFFERENT_FIELD_NAME"), - ("value", VALUE_STRING), - ), - ( - ("operator", OPERATOR_STRING), - ("term", EVENT_FILTER_FIELD_NAME), - ("value", "DIFFERENT_VALUE"), - ), - [ - ("operator", "DIFFERENT_OPERATOR"), - ("term", EVENT_FILTER_FIELD_NAME), - ("value", VALUE_STRING), - ], - [ - ("operator", OPERATOR_STRING), - ("term", "DIFFERENT_FIELD_NAME"), - ("value", VALUE_STRING), - ], - [ - ("operator", OPERATOR_STRING), - ("term", EVENT_FILTER_FIELD_NAME), - ("value", "DIFFERENT_VALUE"), - ], - ], -) -def test_compare_query_filter_with_expected_different_returns_false(different): - query_filter = QueryFilter(EVENT_FILTER_FIELD_NAME, OPERATOR_STRING, VALUE_STRING) - assert query_filter != different - - -def test_compare_filter_group_with_equivalent_single_args_return_true(): - group1 = create_eq_filter_group("eqterm", "eqvalue") - group2 = create_eq_filter_group("eqterm", "eqvalue") - assert group1 == group2 - - -def test_compare_filter_group_with_equivalent_multiple_args_in_different_order_returns_true(): - group1 = create_is_in_filter_group("term", ["value1", "value2", "value3"]) - group2 = create_is_in_filter_group("term", ["value3", "value1", "value2"]) - assert group1 == group2 - assert str(group1) == group2 - assert tuple(group1) == group2 - assert list(group1) == group2 - assert group1 == str(group2) - assert group1 == tuple(group2) - assert group1 == list(group2) - - -@pytest.mark.parametrize( - "filter_class", - [ - QueryFilter("term", "IS", "value1"), - QueryFilter("term", "IS", "value2"), - QueryFilter("term", "IS", "value3"), - ], -) -def test_filter_group_contains_expected_query_filter_returns_true(filter_class): - group = create_is_in_filter_group("term", ["value1", "value2", "value3"]) - assert filter_class in group - assert str(filter_class) in group - assert tuple(filter_class) in group - assert list(filter_class) in group - - -@pytest.mark.parametrize( - "filter_class", - [ - QueryFilter("term", "IS", "value4"), - QueryFilter("different_term", "IS", "value2"), - QueryFilter("term", "DIFFERENT_OPERATOR", "value3"), - ], -) -def test_filter_group_when_does_not_contain_expected_query_filter_returns_false( - filter_class, -): - group = create_is_in_filter_group("term", ["value1", "value2", "value3"]) - assert filter_class not in group - assert str(filter_class) not in group - assert tuple(filter_class) not in group - assert list(filter_class) not in group - - -def test_filter_group_when_changed_filter_clause_has_correct_json_representation(): - group = create_is_in_filter_group("term", ["value1", "value2", "value3"]) - assert ( - str(group) == '{"filterClause":"OR", "filters"' - ':[{"operator":"IS", "term":"term", "value":"value1"},' - '{"operator":"IS", "term":"term", "value":"value2"},' - '{"operator":"IS", "term":"term", "value":"value3"}]}' - ) - group.filter_clause = "AND" - assert ( - str(group) == '{"filterClause":"AND", "filters"' - ':[{"operator":"IS", "term":"term", "value":"value1"},' - '{"operator":"IS", "term":"term", "value":"value2"},' - '{"operator":"IS", "term":"term", "value":"value3"}]}' - ) - - -class Test_QueryFilterTimestampField: - @pytest.mark.parametrize( - "timestamp", - [ - (TEST_TIMESTAMP), - (1599736333.0), - (1599736333), - datetime.strptime(TEST_TIMESTAMP, "%Y-%m-%d %H:%M:%S"), - ], - ) - def test_on_or_after(self, timestamp): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_AFTER", - "term": "override_timestamp_field_name", - "value": "2020-09-10T11:12:13.000Z", - } - ], - } - - qf = QueryFilterTimestampField.on_or_after(timestamp) - assert dict(qf) == expected - - @pytest.mark.parametrize( - "timestamp", - [ - (TEST_TIMESTAMP), - (1599736333.0), - (1599736333), - datetime.strptime(TEST_TIMESTAMP, "%Y-%m-%d %H:%M:%S"), - ], - ) - def test_on_or_before(self, timestamp): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_BEFORE", - "term": "override_timestamp_field_name", - "value": "2020-09-10T11:12:13.000Z", - } - ], - } - assert dict(QueryFilterTimestampField.on_or_before(timestamp)) == expected - - @pytest.mark.parametrize( - "start_timestamp, end_timestamp", - [ - (TEST_TIMESTAMP, "2020-09-10 12:13:14"), - (1599736333.0, 1599739994.0), - (1599736333, 1599739994), - ( - datetime.strptime(TEST_TIMESTAMP, "%Y-%m-%d %H:%M:%S"), - datetime.strptime("2020-09-10 12:13:14", "%Y-%m-%d %H:%M:%S"), - ), - ], - ) - def test_in_range(self, start_timestamp, end_timestamp): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_AFTER", - "term": "override_timestamp_field_name", - "value": "2020-09-10T11:12:13.000Z", - }, - { - "operator": "ON_OR_BEFORE", - "term": "override_timestamp_field_name", - "value": "2020-09-10T12:13:14.000Z", - }, - ], - } - - assert ( - dict(QueryFilterTimestampField.in_range(start_timestamp, end_timestamp)) - == expected - ) - - @pytest.mark.parametrize( - "timestamp", - [ - (TEST_TIMESTAMP), - (1599736333.0), - (1599736333), - datetime.strptime(TEST_TIMESTAMP, "%Y-%m-%d %H:%M:%S"), - ], - ) - def test_on_same_day(self, timestamp): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_AFTER", - "term": "override_timestamp_field_name", - "value": "2020-09-10T00:00:00.000Z", - }, - { - "operator": "ON_OR_BEFORE", - "term": "override_timestamp_field_name", - "value": "2020-09-10T23:59:59.000Z", - }, - ], - } - assert dict(QueryFilterTimestampField.on_same_day(timestamp)) == expected - - def test_on_or_after_with_decimals(self): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_AFTER", - "term": "override_timestamp_field_name", - "value": "2020-09-10T11:12:13.123Z", - } - ], - } - - qf = QueryFilterTimestampField.on_or_after(1599736333.123456) - assert dict(qf) == expected - - def test_on_or_before_with_decimals(self): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_BEFORE", - "term": "override_timestamp_field_name", - "value": "2020-09-10T11:12:13.123Z", - } - ], - } - assert ( - dict(QueryFilterTimestampField.on_or_before(1599736333.123456)) == expected - ) - - def test_in_range_with_decimals(self): - expected = { - "filterClause": "AND", - "filters": [ - { - "operator": "ON_OR_AFTER", - "term": "override_timestamp_field_name", - "value": "2020-09-10T11:12:13.123Z", - }, - { - "operator": "ON_OR_BEFORE", - "term": "override_timestamp_field_name", - "value": "2020-09-10T12:13:14.678Z", - }, - ], - } - - assert ( - dict(QueryFilterTimestampField.in_range(1599736333.123456, 1599739994.6789)) - == expected - ) diff --git a/tests/sdk/test_sdk.py b/tests/sdk/test_sdk.py index 51c24b829..fd6256c0a 100644 --- a/tests/sdk/test_sdk.py +++ b/tests/sdk/test_sdk.py @@ -2,22 +2,20 @@ from requests import Session from tests.conftest import create_mock_response -from py42.clients.alerts import AlertsClient -from py42.clients.archive import ArchiveClient -from py42.clients.auditlogs import AuditLogsClient -from py42.clients.cases import CasesClient -from py42.exceptions import Py42UnauthorizedError -from py42.sdk import from_local_account -from py42.sdk import SDKClient -from py42.services import administration -from py42.services import devices -from py42.services import legalhold -from py42.services import legalholdapiclient -from py42.services import orgs -from py42.services import users -from py42.services._auth import C42RenewableAuth -from py42.services._connection import Connection -from py42.usercontext import UserContext +from pycpg.clients.archive import ArchiveClient +from pycpg.clients.auditlogs import AuditLogsClient +from pycpg.exceptions import PycpgUnauthorizedError +from pycpg.sdk import from_local_account +from pycpg.sdk import SDKClient +from pycpg.services import administration +from pycpg.services import devices +from pycpg.services import legalhold +from pycpg.services import legalholdapiclient +from pycpg.services import orgs +from pycpg.services import users +from pycpg.services._auth import CPGRenewableAuth +from pycpg.services._connection import Connection +from pycpg.usercontext import UserContext HOST_ADDRESS = "https://example.com" @@ -27,14 +25,14 @@ class TestSDK: @pytest.fixture - def py42_connection(self, mocker, successful_response): + def pycpg_connection(self, mocker, successful_response): mock_connection = mocker.MagicMock(spec=Connection) mock_connection.get.return_value = successful_response return mock_connection @pytest.fixture def mock_auth(self, mocker): - return mocker.MagicMock(spec=C42RenewableAuth) + return mocker.MagicMock(spec=CPGRenewableAuth) @pytest.fixture def mock_session(self, mocker): @@ -42,52 +40,44 @@ def mock_session(self, mocker): mock_session.headers = {} return mock_session - def test_has_administration_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_administration_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.serveradmin) == administration.AdministrationService - def test_has_archive_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_archive_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.archive) == ArchiveClient - def test_has_device_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_device_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.devices) == devices.DeviceService - def test_has_alert_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) - assert type(client.alerts) == AlertsClient - - def test_has_legal_hold_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_legal_hold_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.legalhold) == legalhold.LegalHoldService def test_has_api_client_legal_hold_service_set_if_initialized_with_api_client_flag( - self, py42_connection, mock_auth + self, pycpg_connection, mock_auth ): - client = SDKClient(py42_connection, mock_auth, auth_flag=1) + client = SDKClient(pycpg_connection, mock_auth, auth_flag=1) assert type(client.legalhold) == legalholdapiclient.LegalHoldApiClientService - def test_has_org_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_org_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.orgs) == orgs.OrgService - def test_has_user_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_user_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.users) == users.UserService - def test_has_user_context_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_user_context_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.usercontext) == UserContext - def test_has_auditlog_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) + def test_has_auditlog_service_set(self, pycpg_connection, mock_auth): + client = SDKClient(pycpg_connection, mock_auth) assert type(client.auditlogs) == AuditLogsClient - def test_has_cases_service_set(self, py42_connection, mock_auth): - client = SDKClient(py42_connection, mock_auth) - assert type(client.cases) == CasesClient - def test_from_local_account_when_unauthorized_calls_loginConfig_and_returns_config_value_on_raised_exception_text( self, mocker, mock_session, mock_auth, unauthorized_response ): @@ -98,9 +88,9 @@ def test_from_local_account_when_unauthorized_calls_loginConfig_and_returns_conf ) connection = Connection.from_host_address(HOST_ADDRESS, session=mock_session) client = SDKClient(connection, mock_auth) - mocker.patch("py42.sdk.SDKClient.from_local_account", return_value=client) + mocker.patch("pycpg.sdk.SDKClient.from_local_account", return_value=client) - with pytest.raises(Py42UnauthorizedError) as err: + with pytest.raises(PycpgUnauthorizedError) as err: from_local_account(HOST_ADDRESS, TEST_USERNAME, TEST_PASSWORD) assert f"User LoginConfig: {login_type}" in str(err) diff --git a/tests/services/alertrules/__init__.py b/tests/services/alertrules/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/services/alertrules/test_cloudshare.py b/tests/services/alertrules/test_cloudshare.py deleted file mode 100644 index b27289983..000000000 --- a/tests/services/alertrules/test_cloudshare.py +++ /dev/null @@ -1,15 +0,0 @@ -from py42.services.alertrules import CloudShareService - - -class TestCloudShareClient: - def test_get_by_id_posts_to_correct_endpoint_for_cloudshare_type( - self, mock_connection - ): - alert_rule_client = CloudShareService(mock_connection, "tenant-id") - alert_rule_client.get("rule-id") - url = mock_connection.post.call_args[0][0] - assert url == "/svc/api/v1/Rules/query-cloud-share-permissions-rule" - posted_data = mock_connection.post.call_args[1]["json"] - assert posted_data["tenantId"] == "tenant-id" and posted_data["ruleIds"] == [ - "rule-id" - ] diff --git a/tests/services/alertrules/test_exfiltration.py b/tests/services/alertrules/test_exfiltration.py deleted file mode 100644 index ac8ba3fb5..000000000 --- a/tests/services/alertrules/test_exfiltration.py +++ /dev/null @@ -1,15 +0,0 @@ -from py42.services.alertrules import ExfiltrationService - - -class TestExfiltrationClient: - def test_get_by_id_posts_expected_data_for_exfiltration_type(self, mock_connection): - alert_rule_client = ExfiltrationService(mock_connection, "tenant-id") - alert_rule_client.get("rule-id") - - assert mock_connection.post.call_count == 1 - url = mock_connection.post.call_args[0][0] - assert url == "/svc/api/v1/Rules/query-endpoint-exfiltration-rule" - posted_data = mock_connection.post.call_args[1]["json"] - assert posted_data["tenantId"] == "tenant-id" and posted_data["ruleIds"] == [ - "rule-id" - ] diff --git a/tests/services/alertrules/test_file_type_mismatch.py b/tests/services/alertrules/test_file_type_mismatch.py deleted file mode 100644 index bd89e2e2e..000000000 --- a/tests/services/alertrules/test_file_type_mismatch.py +++ /dev/null @@ -1,15 +0,0 @@ -from py42.services.alertrules import FileTypeMismatchService - - -class TestFileTypeMisMatchClient: - def test_get_by_id_posts_to_correct_endpoint_for_type_mismatch_rule_type( - self, mock_connection - ): - alert_rule_client = FileTypeMismatchService(mock_connection, "tenant-id") - alert_rule_client.get("rule-id") - url = mock_connection.post.call_args[0][0] - assert url == "/svc/api/v1/Rules/query-file-type-mismatch-rule" - posted_data = mock_connection.post.call_args[1]["json"] - assert posted_data["tenantId"] == "tenant-id" and posted_data["ruleIds"] == [ - "rule-id" - ] diff --git a/tests/services/storage/test_archive.py b/tests/services/storage/test_archive.py index 686f42fe1..b986089c4 100644 --- a/tests/services/storage/test_archive.py +++ b/tests/services/storage/test_archive.py @@ -10,12 +10,12 @@ from tests.conftest import TEST_PASSWORD from tests.conftest import TEST_SESSION_ID -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42InvalidArchiveEncryptionKey -from py42.exceptions import Py42InvalidArchivePassword -from py42.response import Py42Response -from py42.services._connection import Connection -from py42.services.storage.archive import StorageArchiveService +from pycpg.exceptions import PycpgInternalServerError +from pycpg.exceptions import PycpgInvalidArchiveEncryptionKey +from pycpg.exceptions import PycpgInvalidArchivePassword +from pycpg.response import PycpgResponse +from pycpg.services._connection import Connection +from pycpg.services.storage.archive import StorageArchiveService JSON_KEYWORD = "json" @@ -192,12 +192,12 @@ def side_effect(*args, **kwargs): [{"name":"PRIVATE_PASSWORD_INVALID","description":"An error has occurred. See server logs for more information.","objects":[]}] """ - raise Py42InternalServerError(base_err) + raise PycpgInternalServerError(base_err) connection.post.side_effect = side_effect storage_archive_service = StorageArchiveService(connection) - with pytest.raises(Py42InvalidArchivePassword) as err: + with pytest.raises(PycpgInvalidArchivePassword) as err: storage_archive_service.create_restore_session( TEST_DEVICE_GUID, private_password=TEST_PASSWORD ) @@ -225,12 +225,12 @@ def side_effect(*args, **kwargs): [{"name":"CUSTOM_KEY_INVALID","description":"An error has occurred. See server logs for more information.","objects":[]}] """ - raise Py42InternalServerError(base_err) + raise PycpgInternalServerError(base_err) connection.post.side_effect = side_effect storage_archive_service = StorageArchiveService(connection) - with pytest.raises(Py42InvalidArchiveEncryptionKey) as err: + with pytest.raises(PycpgInvalidArchiveEncryptionKey) as err: storage_archive_service.create_restore_session( TEST_DEVICE_GUID, encryption_key=TEST_ENCRYPTION_KEY ) @@ -285,7 +285,7 @@ def test_start_restore_posts_expected_data_to_expected_url(self, connection): def test_get_restore_status_calls_get_with_correct_url(self, mocker, connection): storage_archive_service = StorageArchiveService(connection) - api_response = mocker.MagicMock(spec=Py42Response) + api_response = mocker.MagicMock(spec=PycpgResponse) connection.get.return_value = api_response storage_archive_service.get_restore_status(TEST_JOB_ID) expected_url = WEB_RESTORE_JOB_URL + "/" + TEST_JOB_ID @@ -295,7 +295,7 @@ def test_cancel_restore_calls_delete_with_correct_url_and_data( self, mocker, connection ): storage_archive_service = StorageArchiveService(connection) - api_response = mocker.MagicMock(spec=Py42Response) + api_response = mocker.MagicMock(spec=PycpgResponse) connection.delete.return_value = api_response storage_archive_service.cancel_restore(TEST_JOB_ID) connection.delete.assert_called_once_with( @@ -306,7 +306,7 @@ def test_stream_restore_result_status_calls_get_with_correct_url( self, mocker, connection ): storage_archive_service = StorageArchiveService(connection) - api_response = mocker.MagicMock(spec=Py42Response) + api_response = mocker.MagicMock(spec=PycpgResponse) connection.get.return_value = api_response storage_archive_service.stream_restore_result(TEST_JOB_ID) expected_url = WEB_RESTORE_JOB_RESULT_URL + "/" + TEST_JOB_ID diff --git a/tests/services/storage/test_auth.py b/tests/services/storage/test_auth.py index 073bdddcc..5e6dfdb35 100644 --- a/tests/services/storage/test_auth.py +++ b/tests/services/storage/test_auth.py @@ -2,7 +2,7 @@ from requests import Request from tests.conftest import create_mock_response -from py42.services._connection import Connection +from pycpg.services._connection import Connection TEST_USER_ID = "0123456789" TEST_DEVICE_GUID = "testdeviceguid" @@ -34,7 +34,7 @@ def mock_storage_auth_token_conn(mocker): mocker, '["TEST_V1", "TOKEN_VALUE"]' ) mocker.patch( - "py42.services.storage._auth._get_new_storage_connection", + "pycpg.services.storage._auth._get_new_storage_connection", return_value=mock_connection, ) return mock_connection diff --git a/tests/services/storage/test_exfiltrateddata.py b/tests/services/storage/test_exfiltrateddata.py deleted file mode 100644 index 1f58c268f..000000000 --- a/tests/services/storage/test_exfiltrateddata.py +++ /dev/null @@ -1,42 +0,0 @@ -import pytest -import requests - -from py42.services.storage.exfiltrateddata import ExfiltratedDataService - - -class TestExfiltratedDataService: - @pytest.fixture - def mock_request(self, mocker): - request = mocker.patch.object(requests, "get") - request.return_value = b"stream" - return request - - def test_get_download_token_calls_get_with_valid_params( - self, mock_successful_connection - ): - service = ExfiltratedDataService( - mock_successful_connection, mock_successful_connection - ) - service.get_download_token( - "testeventid", "testdeviceid", "testfilepath", "testSHA256", 1223 - ) - qry = "deviceUid=testdeviceid&eventId=testeventid&filePath=testfilepath&fileSHA256=testSHA256&versionTimestamp=1223" - expected = f"api/v1/file-download-token?{qry}" - mock_successful_connection.get.assert_called_once_with( - expected, headers={"Accept": "*/*"} - ) - - def test_get_file_calls_get_with_valid_params( - self, mock_successful_connection, mock_request - ): - mock_successful_connection.host_address = "https://example.com" - service = ExfiltratedDataService( - mock_successful_connection, mock_successful_connection - ) - service.get_file("testtoken") - mock_successful_connection.get.assert_called_once_with( - "https://example.com/api/v1/get-file", - headers={"Accept": "*/*"}, - params={"token": "testtoken"}, - stream=True, - ) diff --git a/tests/services/storage/test_preservationdata.py b/tests/services/storage/test_preservationdata.py deleted file mode 100644 index de034c777..000000000 --- a/tests/services/storage/test_preservationdata.py +++ /dev/null @@ -1,56 +0,0 @@ -import pytest -import requests - -from py42.services.storage.preservationdata import StoragePreservationDataService - - -class TestStoragePreservationDataService: - @pytest.fixture - def mock_request(self, mocker): - request = mocker.patch.object(requests, "get") - request.return_value = b"stream" - return request - - def test_get_download_token_calls_get_with_valid_params( - self, mock_successful_connection - ): - service = StoragePreservationDataService( - mock_successful_connection, mock_successful_connection - ) - service.get_download_token("abc", "fabc", 1223) - - mock_successful_connection.get.assert_called_once_with( - "api/v3/FileDownloadToken", - params={"archiveGuid": "abc", "fileId": "fabc", "versionTimestamp": 1223}, - ) - - def test_get_file_calls_get_with_valid_params_with_substitution( - self, mock_successful_connection, mock_request - ): - mock_successful_connection.host_address = "https://host.com" - - service = StoragePreservationDataService( - mock_successful_connection, mock_successful_connection - ) - service.get_file("token") - mock_successful_connection.get.assert_called_once_with( - "https://host.com/api/v3/GetFile", - headers={"Accept": "*/*"}, - params={"PDSDownloadToken": "token"}, - stream=True, - ) - - def test_get_file_calls_get_with_valid_params( - self, mock_successful_connection, mock_request - ): - mock_successful_connection.host_address = "https://host.com" - service = StoragePreservationDataService( - mock_successful_connection, mock_successful_connection - ) - service.get_file("PDSDownloadToken=token") - mock_successful_connection.get.assert_called_once_with( - "https://host.com/api/v3/GetFile", - headers={"Accept": "*/*"}, - params={"PDSDownloadToken": "token"}, - stream=True, - ) diff --git a/tests/services/storage/test_restore.py b/tests/services/storage/test_restore.py index 4a5d4d145..b77204fc8 100644 --- a/tests/services/storage/test_restore.py +++ b/tests/services/storage/test_restore.py @@ -9,11 +9,11 @@ from tests.services.storage.test_archive import TEST_NUM_BYTES from tests.services.storage.test_archive import TEST_NUM_FILES -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42BadRestoreRequestError -from py42.services.storage.restore import PushRestoreExistingFiles -from py42.services.storage.restore import PushRestoreLocation -from py42.services.storage.restore import PushRestoreService +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgBadRestoreRequestError +from pycpg.services.storage.restore import PushRestoreExistingFiles +from pycpg.services.storage.restore import PushRestoreLocation +from pycpg.services.storage.restore import PushRestoreService def _create_expected_restore_groups(file): @@ -23,7 +23,7 @@ def _create_expected_restore_groups(file): @pytest.fixture def mock_restore_connection_with_bad_request(mocker, mock_connection): mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, "CREATE_FAILED" + PycpgBadRequestError, mocker, "CREATE_FAILED" ) return mock_connection @@ -78,7 +78,7 @@ def test_start_push_restore_when_bad_request_raised_with_create_failed_text_rais ): service = PushRestoreService(mock_restore_connection_with_bad_request) restore_groups = _create_expected_restore_groups(single_file_selection[0].file) - with pytest.raises(Py42BadRestoreRequestError) as err: + with pytest.raises(PycpgBadRestoreRequestError) as err: service.start_push_restore( TEST_DEVICE_GUID, TEST_ACCEPTING_GUID, diff --git a/tests/services/storage/test_service_factory.py b/tests/services/storage/test_service_factory.py index c50c76878..bbbc3856a 100644 --- a/tests/services/storage/test_service_factory.py +++ b/tests/services/storage/test_service_factory.py @@ -3,13 +3,11 @@ from tests.conftest import create_mock_response from tests.conftest import TEST_DEVICE_GUID -from py42.exceptions import Py42Error -from py42.services._connection import Connection -from py42.services.devices import DeviceService -from py42.services.storage._service_factory import StorageServiceFactory -from py42.services.storage.archive import StorageArchiveService -from py42.services.storage.exfiltrateddata import ExfiltratedDataService -from py42.services.storage.preservationdata import StoragePreservationDataService +from pycpg.exceptions import PycpgError +from pycpg.services._connection import Connection +from pycpg.services.devices import DeviceService +from pycpg.services.storage._service_factory import StorageServiceFactory +from pycpg.services.storage.archive import StorageArchiveService @pytest.fixture @@ -85,23 +83,5 @@ def test_auto_select_destination_guid_when_device_has_no_destination_raises_exce ) response = create_mock_response(mocker, '{"backupUsage": []}') mock_device_service.get_by_guid.return_value = response - with pytest.raises(Py42Error): + with pytest.raises(PycpgError): factory.auto_select_destination_guid(TEST_DEVICE_GUID) - - def test_preservation_data_service( - self, mock_connection_with_storage_lookup, mock_device_service - ): - factory = StorageServiceFactory( - mock_connection_with_storage_lookup, mock_device_service - ) - service = factory.create_preservation_data_service("testhost.com") - assert type(service) == StoragePreservationDataService - - def test_exfiltrated_data_service( - self, mock_connection_with_storage_lookup, mock_device_service - ): - factory = StorageServiceFactory( - mock_connection_with_storage_lookup, mock_device_service - ) - service = factory.create_exfiltrated_data_service("testhost.com") - assert type(service) == ExfiltratedDataService diff --git a/tests/services/test_alert_rules.py b/tests/services/test_alert_rules.py deleted file mode 100644 index 327588cef..000000000 --- a/tests/services/test_alert_rules.py +++ /dev/null @@ -1,130 +0,0 @@ -import pytest -from requests import Response -from tests.conftest import create_mock_response - -from py42.exceptions import Py42InvalidRuleError -from py42.exceptions import Py42NotFoundError -from py42.services.alertrules import AlertRulesService -from py42.services.userriskprofile import UserRiskProfileService - - -MOCK_USER_GET_RESPONSE = """ -{ - "active": true, - "cloudAliases": [ - "user.aliases@code42.com" - ], - "country": "usa", - "deleted": false, - "department": "engineering", - "displayName": "User Name", - "division": "test", - "employmentType": "contract", - "endDate": { - "day": 10, - "month": 20, - "year": 2030 - }, - "locality": "midwest", - "managerDisplayName": "My Manager", - "managerId": "123-manager", - "managerUsername": "manager@email.com", - "notes": "my notes", - "region": "us", - "startDate": { - "day": 1, - "month": 20, - "year": 2020 - }, - "supportUser": true, - "tenantId": "123-456", - "title": "title", - "userId": "user-id", - "username": "username@code42.com" -} -""" - - -@pytest.fixture -def mock_user_profile_service(mocker): - response = create_mock_response(mocker, MOCK_USER_GET_RESPONSE) - service = mocker.MagicMock(spec=UserRiskProfileService) - service.get_by_id.return_value = response - return service - - -class TestAlertRulesService: - def test_add_user_posts_expected_data( - self, mock_connection, user_context, mock_user_profile_service - ): - alert_rule_service = AlertRulesService( - mock_connection, user_context, mock_user_profile_service - ) - alert_rule_service.add_user("rule-id", "user-id") - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/Rules/add-users" - assert ( - posted_data["tenantId"] == user_context.get_current_tenant_id() - and posted_data["ruleId"] == "rule-id" - and posted_data["userList"][0]["userIdFromAuthority"] == "user-id" - and posted_data["userList"][0]["userAliasList"] - == ["user.aliases@code42.com"] - ) - - def test_remove_user_posts_expected_data( - self, mock_connection, user_context, mock_user_profile_service - ): - alert_rule_service = AlertRulesService( - mock_connection, user_context, mock_user_profile_service - ) - alert_rule_service.remove_user("rule-id", "user-id") - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/Rules/remove-users" - assert ( - posted_data["tenantId"] == user_context.get_current_tenant_id() - and posted_data["ruleId"] == "rule-id" - and posted_data["userIdList"] == ["user-id"] - ) - - def test_remove_all_users_posts_expected_data( - self, mock_connection, user_context, mock_user_profile_service - ): - alert_rule_service = AlertRulesService( - mock_connection, user_context, mock_user_profile_service - ) - alert_rule_service.remove_all_users("rule-id") - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - assert ( - mock_connection.post.call_args[0][0] == "/svc/api/v1/Rules/remove-all-users" - ) - assert ( - posted_data["tenantId"] == user_context.get_current_tenant_id() - and posted_data["ruleId"] == "rule-id" - ) - - def test_add_user_raises_valid_exception_when_rule_id_is_invalid( - self, - mocker, - mock_connection, - user_context, - mock_user_profile_service, - ): - response = mocker.MagicMock(spec=Response) - response.status_code = 400 - exception = mocker.MagicMock(spec=Py42NotFoundError) - exception.response = response - mock_connection.post.side_effect = Py42NotFoundError(exception, "") - alert_rule_service = AlertRulesService( - mock_connection, - user_context, - mock_user_profile_service, - ) - with pytest.raises(Py42InvalidRuleError) as e: - alert_rule_service.add_user("invalid-rule-id", "user-id") - assert "Invalid Observer Rule ID 'invalid-rule-id'." in e.value.args[0] diff --git a/tests/services/test_alerts.py b/tests/services/test_alerts.py deleted file mode 100644 index fc9e89c3f..000000000 --- a/tests/services/test_alerts.py +++ /dev/null @@ -1,514 +0,0 @@ -import pytest -from requests import Response -from tests.conftest import create_mock_response -from tests.conftest import TENANT_ID_FROM_RESPONSE - -from py42.response import Py42Response -from py42.sdk.queries.alerts.alert_query import AlertQuery -from py42.sdk.queries.alerts.filters import AlertState -from py42.services._connection import Connection -from py42.services.alerts import AlertService - - -TEST_RESPONSE = """ -{"type$": "RULE_METADATA_SEARCH_RESPONSE", - "ruleMetadata": [{ "name": "TESTNAME"}, { "name": "TSTNAME"}, { "name": "TesTNAME"}] -, "totalCount": 1, "problems": []} -""" -TEST_PARSEABLE_ALERT_DETAIL_RESPONSE = """ -{ - "type$": "ALERT_DETAILS_RESPONSE", - "alerts": [ - { - "type$": "ALERT_DETAILS", - "observations": [ - { - "type$": "OBSERVATION", - "id": "example_obsv_id", - "observedAt": "2020-01-01T00:00:00.0000000Z", - "type": "FedEndpointExfiltration", - "data": "{\\"example_key\\":\\"example_string_value\\",\\"example_list\\":[\\"example_list_item_1\\",\\"example_list_item_2\\"]}" - } - ] - } - ] -} -""" -TEST_NON_PARSEABLE_ALERT_DETAIL_RESPONSE = """ -{ - "type$": "ALERT_DETAILS_RESPONSE", - "alerts": [ - { - "type$": "ALERT_DETAILS", - "observations": [ - { - "type$": "OBSERVATION", - "id": "example_obsv_id", - "observedAt": "2020-01-01T00:00:00.0000000Z", - "type": "FedEndpointExfiltration", - "data": "{\\"invalid_json\\": ][ }" - } - ] - } - ] -} -""" -TEST_ALERT_AGGREGATE_RESPONSE = """{ - "type$": "ALERT_DETAILS_IN_AGGREGATE_V2_RESPONSE", - "alert": { - "type$": "ALERT_DETAILS_AGGREGATE_V2", - "tenantId": "093845-3333-bbbb-a6d9-11213cfbb33", - "type": "FED_COMPOSITE", - "name": "Zip file exfiltration", - "description": "Alerts you about the movement of archive files that may conceal file contents.", - "actor": "test.testerson@example.com", - "actorId": "111154444252344565", - "target": "N/A", - "severity": "HIGH", - "ruleId": "4455661d-1111-2222-3333-33d83f500000", - "ruleSource": "Alerting", - "id": "d8e67016-1969-4cc9-9590-d52ab20d349a", - "createdAt": "2021-08-09T15:21:56.9761420Z", - "state": "OPEN", - "observations": [ - { - "type$": "OBSERVATION_AGGREGATE", - "observedAt": "2021-08-09T15:00:00.0000000Z", - "type": "FedEndpointExfiltration", - "data": "{}" - } - ], - "firstObservationAt": "2021-08-09T15:00:00.0000000Z", - "lastObservationAt": "2021-08-09T15:05:00.0000000Z", - "fileCount": 1, - "totalFileSize": 112303, - "fileCategories": [ - { - "type$": "FILE_CATEGORY", - "category": "Archive", - "fileCount": 1, - "totalFileSize": 112303 - } - ], - "riskSeveritySummary": [ - { - "type$": "RISK_SEVERITY_SUMMARY", - "severity": "CRITICAL", - "numEvents": 1, - "summarizedRiskIndicators": [ - { - "type$": "SUMMARIZED_RISK_INDICATOR", - "name": "Zip", - "numEvents": 1 - }, - { - "type$": "SUMMARIZED_RISK_INDICATOR", - "name": "Remote", - "numEvents": 1 - }, - { - "type$": "SUMMARIZED_RISK_INDICATOR", - "name": "Yahoo upload", - "numEvents": 1 - }, - { - "type$": "SUMMARIZED_RISK_INDICATOR", - "name": "Departing", - "numEvents": 1 - } - ] - } - ], - "ffsUrlEndpoint": "https://ffs-url-test.example.com", - "alertUrl": "https://alerts.example.com/alert-id" - } -} -""" - - -@pytest.fixture -def mock_get_all_session(mocker): - response = create_mock_response(mocker, TEST_RESPONSE) - connection = mocker.MagicMock(spec=Connection) - connection.post.return_value = response - return connection - - -class TestAlertService: - @pytest.fixture - def successful_post(self, mock_connection, successful_response): - mock_connection.post.return_value = successful_response - - def test_search_posts_expected_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - _filter = AlertState.eq("OPEN") - query = AlertQuery(_filter) - alert_service.search(query) - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["groupClause"] == "AND" - and post_data["srtKey"] == "CreatedAt" - and post_data["srtDirection"] == "desc" - and post_data["pgSize"] == 500 - and post_data["pgNum"] == 0 - and post_data["groups"][0]["filterClause"] == "AND" - and post_data["groups"][0]["filters"][0]["operator"] == "IS" - and post_data["groups"][0]["filters"][0]["term"] == "state" - and post_data["groups"][0]["filters"][0]["value"] == "OPEN" - ) - - def test_search_posts_to_expected_url( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - _filter = AlertState.eq("OPEN") - query = AlertQuery(_filter) - alert_service.search(query) - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/query-alerts" - - def test_get_details_when_not_given_tenant_id_posts_expected_data( - self, mock_connection, user_context, mocker - ): - response = create_mock_response(mocker, TEST_PARSEABLE_ALERT_DETAIL_RESPONSE) - mock_connection.post.return_value = response - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.get_details(alert_ids) - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - and post_data["alertIds"][1] == "ALERT_ID_2" - ) - - @pytest.mark.parametrize( - "alert_id", ["ALERT_ID_1", ("ALERT_ID_1",), ["ALERT_ID_1"]] - ) - def test_get_details_when_given_single_alert_id_posts_expected_data( - self, mock_connection, user_context, successful_post, mocker, alert_id - ): - response = create_mock_response(mocker, TEST_PARSEABLE_ALERT_DETAIL_RESPONSE) - mock_connection.post.return_value = response - alert_service = AlertService(mock_connection, user_context) - alert_service.get_details(alert_id) - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - ) - - def test_get_details_when_given_tenant_id_posts_expected_data( - self, mock_connection, user_context, successful_post, mocker - ): - response = create_mock_response(mocker, TEST_PARSEABLE_ALERT_DETAIL_RESPONSE) - mock_connection.post.return_value = response - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.get_details(alert_ids) - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - and post_data["alertIds"][1] == "ALERT_ID_2" - ) - - def test_get_details_posts_to_expected_url( - self, mock_connection, user_context, successful_post, mocker - ): - response = create_mock_response(mocker, TEST_PARSEABLE_ALERT_DETAIL_RESPONSE) - mock_connection.post.return_value = response - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.get_details(alert_ids) - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/query-details" - - def test_get_details_converts_json_observation_strings_to_objects( - self, mocker, mock_connection, user_context - ): - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = TEST_PARSEABLE_ALERT_DETAIL_RESPONSE - py42_response = Py42Response(requests_response) - mock_connection.post.return_value = py42_response - alert_service = AlertService(mock_connection, user_context) - response = alert_service.get_details("alert_id") - observation_data = response["alerts"][0]["observations"][0]["data"] - assert observation_data["example_key"] == "example_string_value" - assert type(observation_data["example_list"]) is list - - def test_get_details_when_observation_data_not_parseable_remains_unchanged( - self, mocker, mock_connection, user_context - ): - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = TEST_NON_PARSEABLE_ALERT_DETAIL_RESPONSE - py42_response = Py42Response(requests_response) - mock_connection.post.return_value = py42_response - alert_service = AlertService(mock_connection, user_context) - response = alert_service.get_details("alert_id") - observation_data = response["alerts"][0]["observations"][0]["data"] - expected_observation_data = '{"invalid_json": ][ }' - assert observation_data == expected_observation_data - - def test_update_state_when_not_given_tenant_id_posts_expected_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.update_state("RESOLVED", alert_ids, "") - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - and post_data["alertIds"][1] == "ALERT_ID_2" - and post_data["state"] == "RESOLVED" - and post_data["note"] == "" - ) - - @pytest.mark.parametrize( - "alert_id", ["ALERT_ID_1", ("ALERT_ID_1",), ["ALERT_ID_1"]] - ) - def test_update_state_when_given_single_alert_id_posts_expected_data( - self, mock_connection, user_context, successful_post, alert_id - ): - alert_service = AlertService(mock_connection, user_context) - alert_service.update_state("PENDING", alert_id) - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - and post_data["state"] == "PENDING" - and post_data["note"] is None - ) - - def test_update_state_posts_expected_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.update_state("OPEN", alert_ids, "some-tenant-id") - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - and post_data["alertIds"][1] == "ALERT_ID_2" - and post_data["state"] == "OPEN" - and post_data["note"] == "some-tenant-id" - ) - - def test_update_state_posts_to_expected_url( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.update_state("RESOLVED", alert_ids, "some-tenant-id") - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/update-state" - - def test_update_state_when_note_passed_none_posts_expected_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - alert_ids = ["ALERT_ID_1", "ALERT_ID_2"] - alert_service.update_state("RESOLVED", alert_ids, note=None) - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/update-state" - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertIds"][0] == "ALERT_ID_1" - and post_data["alertIds"][1] == "ALERT_ID_2" - and post_data["state"] == "RESOLVED" - and post_data["note"] is None - ) - - def test_get_all_rules_posts_expected_data(self, mock_connection, user_context): - alert_service = AlertService(mock_connection, user_context) - - for _ in alert_service.get_all_rules(sort_key="key", sort_direction="ASC"): - break - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - assert ( - mock_connection.post.call_args[0][0] - == "/svc/api/v1/rules/query-rule-metadata" - ) - assert ( - posted_data["tenantId"] == user_context.get_current_tenant_id() - and posted_data["groups"] == [] - and posted_data["groupClause"] == "AND" - and posted_data["pgNum"] == 0 - and posted_data["pgSize"] == 500 - and posted_data["srtKey"] == "key" - and posted_data["srtDirection"] == "ASC" - ) - - def test_get_all_rules_by_name_posts_expected_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - for _ in alert_service.get_all_rules_by_name( - "testname", sort_key="key", sort_direction="ASC" - ): - break - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - filter_group = posted_data["groups"][0]["filters"][0] - - assert filter_group["term"] == "Name" - assert filter_group["operator"] == "IS" - assert filter_group["value"] == "testname" - assert ( - mock_connection.post.call_args[0][0] - == "/svc/api/v1/rules/query-rule-metadata" - ) - assert ( - posted_data["tenantId"] == user_context.get_current_tenant_id() - and posted_data["groupClause"] == "AND" - and posted_data["pgNum"] == 0 - and posted_data["pgSize"] == 500 - and posted_data["srtKey"] == "key" - and posted_data["srtDirection"] == "ASC" - ) - - def test_get_rule_by_observer_id_posts_expected_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - for _ in alert_service.get_rule_by_observer_id("testid"): - break - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - filter_group = posted_data["groups"][0]["filters"][0] - - assert filter_group["term"] == "ObserverRuleId" - assert filter_group["operator"] == "IS" - assert filter_group["value"] == "testid" - assert ( - mock_connection.post.call_args[0][0] - == "/svc/api/v1/rules/query-rule-metadata" - ) - assert ( - posted_data["tenantId"] == user_context.get_current_tenant_id() - and posted_data["groupClause"] == "AND" - and posted_data["pgNum"] == 0 - and posted_data["pgSize"] == 500 - and posted_data["srtKey"] == "CreatedAt" - and posted_data["srtDirection"] == "DESC" - ) - - def test_get_rules_page_calls_post_with_expected_url_and_data( - self, mock_connection, user_context, successful_post - ): - alert_service = AlertService(mock_connection, user_context) - alert_service.get_rules_page( - groups=["groups"], - page_num=1, - page_size=100, - sort_key="sort key", - sort_direction="direction", - ) - # Note that pgNum is -1 from what is given because of that API - data = { - "tenantId": TENANT_ID_FROM_RESPONSE, - "groups": ["groups"], - "groupClause": "AND", - "pgNum": 0, - "pgSize": 100, - "srtKey": "sort key", - "srtDirection": "direction", - } - mock_connection.post.assert_called_once_with( - "/svc/api/v1/rules/query-rule-metadata", json=data - ) - - def test_update_note_calls_post_with_expected_url_and_data( - self, mock_connection, user_context - ): - alert_service = AlertService(mock_connection, user_context) - alert_service.update_note("alert-id", note="Test Note") - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/add-note" - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["alertId"] == "alert-id" - and post_data["note"] == "Test Note" - ) - - def test_search_all_pages_posts_expected_data(self, mock_connection, user_context): - alert_service = AlertService(mock_connection, user_context) - _filter = AlertState.eq("OPEN") - query = AlertQuery(_filter) - - for _ in alert_service.search_all_pages(query): - break - - assert mock_connection.post.call_count == 1 - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/query-alerts" - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["groupClause"] == "AND" - and post_data["srtKey"] == "CreatedAt" - and post_data["srtDirection"] == "desc" - and post_data["pgSize"] == 500 - and post_data["pgNum"] == 0 - and post_data["groups"][0]["filterClause"] == "AND" - and post_data["groups"][0]["filters"][0]["operator"] == "IS" - and post_data["groups"][0]["filters"][0]["term"] == "state" - and post_data["groups"][0]["filters"][0]["value"] == "OPEN" - ) - - def test_search_posts_expected_data_overwrites_default_option_when_passed_page_num_and_page_size( - self, mock_connection, user_context - ): - alert_service = AlertService(mock_connection, user_context) - _filter = AlertState.eq("OPEN") - query = AlertQuery(_filter) - - alert_service.search(query, 10, 20) - - assert mock_connection.post.call_count == 1 - assert mock_connection.post.call_args[0][0] == "/svc/api/v1/query-alerts" - post_data = mock_connection.post.call_args[1]["json"] - assert ( - post_data["tenantId"] == TENANT_ID_FROM_RESPONSE - and post_data["groupClause"] == "AND" - and post_data["srtKey"] == "CreatedAt" - and post_data["srtDirection"] == "desc" - and post_data["pgSize"] == 20 - and post_data["pgNum"] == 9 - and post_data["groups"][0]["filterClause"] == "AND" - and post_data["groups"][0]["filters"][0]["operator"] == "IS" - and post_data["groups"][0]["filters"][0]["term"] == "state" - and post_data["groups"][0]["filters"][0]["value"] == "OPEN" - ) - - def test_get_aggregate_data_calls_post_with_expected_url_and_data( - self, mock_connection, user_context - ): - alert_service = AlertService(mock_connection, user_context) - alert_service.get_aggregate_data("alert-id") - assert ( - mock_connection.post.call_args[0][0] - == "/svc/api/v2/query-details-aggregate" - ) - post_data = mock_connection.post.call_args[1]["json"] - assert post_data["alertId"] == "alert-id" - - def test_get_aggregate_data_creates_alias_for_ffs_url( - self, mocker, mock_connection, user_context - ): - # This is to support the method when it once called the v1 api. - mock_connection.post.return_value = create_mock_response( - mocker, TEST_ALERT_AGGREGATE_RESPONSE - ) - alert_service = AlertService(mock_connection, user_context) - response = alert_service.get_aggregate_data("alert-id") - assert ( - response["alert"]["ffsUrl"] - == "https://ffs-url-test.example.com" - == response["alert"]["ffsUrlEndpoint"] - ) diff --git a/tests/services/test_archive.py b/tests/services/test_archive.py index 3c35c0922..a7fb60c9c 100644 --- a/tests/services/test_archive.py +++ b/tests/services/test_archive.py @@ -1,8 +1,8 @@ import pytest from tests.conftest import create_mock_response -import py42.settings -from py42.services.archive import ArchiveService +import pycpg.settings +from pycpg.services.archive import ArchiveService ARCHIVE_URI = "/api/v1/Archive" DEFAULT_GET_ARCHIVES_PARAMS = { @@ -70,7 +70,7 @@ def test_get_all_archives_from_value_calls_get_expected_number_of_times( mock_get_archives_empty_response, ): device_guid = "42" - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = ArchiveService(mock_connection) mock_connection.get.side_effect = [ mock_get_archives_response, @@ -79,7 +79,7 @@ def test_get_all_archives_from_value_calls_get_expected_number_of_times( ] for _ in service.get_all_archives_from_value(device_guid, "backupSourceGuid"): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_by_value_calls_get_with_expected_uri_and_params(self, mock_connection): @@ -96,7 +96,7 @@ def test_get_all_restore_history_calls_get_expected_number_of_times( mock_get_all_restore_history_response, mock_get_all_restore_history_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = ArchiveService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_restore_history_response, @@ -105,7 +105,7 @@ def test_get_all_restore_history_calls_get_expected_number_of_times( ] for _ in service.get_all_restore_history(10, "orgId", "123"): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_web_restore_info_calls_get_with_expected_url_and_params( @@ -135,7 +135,7 @@ def test_get_all_org_cold_storage_archives_calls_get_expected_number_of_times( mock_get_all_org_cold_storage_response, mock_get_all_org_cold_storage_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = ArchiveService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_org_cold_storage_response, @@ -144,7 +144,7 @@ def test_get_all_org_cold_storage_archives_calls_get_expected_number_of_times( ] for _ in service.get_all_org_cold_storage_archives("orgId"): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_all_org_cold_storage_archives_calls_get_with_expected_uri_and_params( diff --git a/tests/services/test_auditlogs.py b/tests/services/test_auditlogs.py index c4df59063..ccf946adb 100644 --- a/tests/services/test_auditlogs.py +++ b/tests/services/test_auditlogs.py @@ -1,6 +1,6 @@ from datetime import datetime as dt -from py42.services.auditlogs import AuditLogsService +from pycpg.services.auditlogs import AuditLogsService class TestAuditLogService: @@ -52,7 +52,7 @@ def test_get_all_when_begin_and_end_times_are_given_passes_valid_date_range_para def test_get_all_calls_actor_names_with_list_of_user_names(self, mock_connection): service = AuditLogsService(mock_connection) - for _ in service.get_all(usernames=["test@test.com", "test@code42.com"]): + for _ in service.get_all(usernames=["test@test.com", "test@crashplan.com"]): pass expected_data = { "page": 0, @@ -60,7 +60,7 @@ def test_get_all_calls_actor_names_with_list_of_user_names(self, mock_connection "dateRange": {}, "eventTypes": [], "actorIds": [], - "actorNames": ["test@test.com", "test@code42.com"], + "actorNames": ["test@test.com", "test@crashplan.com"], "actorIpAddresses": [], "affectedUserIds": [], "affectedUserNames": [], @@ -72,7 +72,7 @@ def test_get_all_calls_actor_names_with_list_of_user_names(self, mock_connection def test_get_all_calls_all_params_in_valid_formats(self, mock_connection): service = AuditLogsService(mock_connection) for _ in service.get_all( - usernames=["test@test.com", "test@code42.com"], + usernames=["test@test.com", "test@crashplan.com"], user_ids=["1208", "12089"], event_types="abc", user_ip_addresses=["127.0.0.1", "0.0.0.0"], @@ -86,7 +86,7 @@ def test_get_all_calls_all_params_in_valid_formats(self, mock_connection): "dateRange": {}, "eventTypes": ["abc"], "actorIds": ["1208", "12089"], - "actorNames": ["test@test.com", "test@code42.com"], + "actorNames": ["test@test.com", "test@crashplan.com"], "actorIpAddresses": ["127.0.0.1", "0.0.0.0"], "affectedUserIds": [], "affectedUserNames": ["test_user@name.com"], @@ -133,7 +133,7 @@ def test_get_page_calls_expected_uri_and_params_in_valid_formats( page_size=300, begin_time=None, end_time=None, - usernames=["test@test.com", "test@code42.com"], + usernames=["test@test.com", "test@crashplan.com"], user_ids=["1208", "12089"], event_types="abc", user_ip_addresses=["127.0.0.1", "0.0.0.0"], @@ -146,7 +146,7 @@ def test_get_page_calls_expected_uri_and_params_in_valid_formats( "dateRange": {}, "eventTypes": ["abc"], "actorIds": ["1208", "12089"], - "actorNames": ["test@test.com", "test@code42.com"], + "actorNames": ["test@test.com", "test@crashplan.com"], "actorIpAddresses": ["127.0.0.1", "0.0.0.0"], "affectedUserIds": [], "affectedUserNames": ["test_user@name.com"], @@ -216,7 +216,7 @@ def test_get_page_when_format_is_specified_passes_csv_headers_and_params( page_size=300, begin_time=None, end_time=None, - usernames=["test@test.com", "test@code42.com"], + usernames=["test@test.com", "test@crashplan.com"], user_ids=["1208", "12089"], event_types="abc", user_ip_addresses=["127.0.0.1", "0.0.0.0"], @@ -229,7 +229,7 @@ def test_get_page_when_format_is_specified_passes_csv_headers_and_params( "dateRange": {}, "eventTypes": ["abc"], "actorIds": ["1208", "12089"], - "actorNames": ["test@test.com", "test@code42.com"], + "actorNames": ["test@test.com", "test@crashplan.com"], "actorIpAddresses": ["127.0.0.1", "0.0.0.0"], "affectedUserIds": [], "affectedUserNames": ["test_user@name.com"], @@ -250,7 +250,7 @@ def test_get_page_when_format_is_specified_passes_cef_headers_and_params( page_size=300, begin_time=None, end_time=None, - usernames=["test@test.com", "test@code42.com"], + usernames=["test@test.com", "test@crashplan.com"], user_ids=["1208", "12089"], event_types="abc", user_ip_addresses=["127.0.0.1", "0.0.0.0"], @@ -263,7 +263,7 @@ def test_get_page_when_format_is_specified_passes_cef_headers_and_params( "dateRange": {}, "eventTypes": ["abc"], "actorIds": ["1208", "12089"], - "actorNames": ["test@test.com", "test@code42.com"], + "actorNames": ["test@test.com", "test@crashplan.com"], "actorIpAddresses": ["127.0.0.1", "0.0.0.0"], "affectedUserIds": [], "affectedUserNames": ["test_user@name.com"], @@ -284,7 +284,7 @@ def test_get_page_when_invalid_format_is_specified_passes_no_headers_and_params( page_size=300, begin_time=None, end_time=None, - usernames=["test@test.com", "test@code42.com"], + usernames=["test@test.com", "test@crashplan.com"], user_ids=["1208", "12089"], event_types="abc", user_ip_addresses=["127.0.0.1", "0.0.0.0"], @@ -297,7 +297,7 @@ def test_get_page_when_invalid_format_is_specified_passes_no_headers_and_params( "dateRange": {}, "eventTypes": ["abc"], "actorIds": ["1208", "12089"], - "actorNames": ["test@test.com", "test@code42.com"], + "actorNames": ["test@test.com", "test@crashplan.com"], "actorIpAddresses": ["127.0.0.1", "0.0.0.0"], "affectedUserIds": [], "affectedUserNames": ["test_user@name.com"], diff --git a/tests/services/test_auth.py b/tests/services/test_auth.py index 8a4b57c6c..c15c08ddc 100644 --- a/tests/services/test_auth.py +++ b/tests/services/test_auth.py @@ -2,9 +2,9 @@ from requests import Request from tests.conftest import create_mock_response -from py42.services._auth import ApiClientAuth -from py42.services._auth import BearerAuth -from py42.services._auth import CustomJWTAuth +from pycpg.services._auth import ApiClientAuth +from pycpg.services._auth import BearerAuth +from pycpg.services._auth import CustomJWTAuth @pytest.fixture diff --git a/tests/services/test_cases.py b/tests/services/test_cases.py deleted file mode 100644 index 3b107a4d6..000000000 --- a/tests/services/test_cases.py +++ /dev/null @@ -1,337 +0,0 @@ -import pytest -from tests.conftest import create_mock_error -from tests.conftest import create_mock_response - -import py42.settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42CaseNameExistsError -from py42.exceptions import Py42DescriptionLimitExceededError -from py42.exceptions import Py42InvalidCaseUserError -from py42.exceptions import Py42UpdateClosedCaseError -from py42.services.cases import CasesService - -GET_ALL_TEST_RESPONSE = '{"cases":["test"], "totalCount":1}' -EMPTY_GET_ALL_TEST_RESPONSE = '{"cases": [], "totalCount":0}' -UPDATE_ERROR_RESPONSE = """{ - "timestamp": "2021-01-06T16:54:44.668+00:00", - "status": 400, - "error": "Bad Request", - "message": "NO_EDITS_ONCE_CLOSED", - "path": "/api/v1/case" -} -""" -GET_CASE_RESPONSE = """ -{ - "assignee": "string", - "assigneeUsername": "string", - "createdAt": "2021-01-04T08:09:58.832Z", - "createdByUserUid": "string", - "createdByUsername": "string", - "lastModifiedByUserUid": "string", - "lastModifiedByUsername": "string", - "name": "string", - "number": 0, - "status": "OPEN", - "subject": "string", - "subjectUsername": "string", - "updatedAt": "2021-01-04T08:09:58.832Z" -} -""" -NAME_EXISTS_ERROR_MSG = '{"problem":"NAME_EXISTS","description":null}' -DESCRIPTION_TOO_LONG_ERROR_MSG = '{"problem":"DESCRIPTION_TOO_LONG","description":null}' -UNKNOWN_ERROR_MSG = '{"problem":"SURPRISE!"}' -_TEST_CASE_NUMBER = 123456 -_BASE_URI = "/api/v1/case" - - -def _get_invalid_user_text(user_type): - return f'{{"problem":"INVALID_USER","description":"{user_type} validation failed"}}' - - -class TestCasesService: - @pytest.fixture - def mock_case_response(self, mocker): - return create_mock_response(mocker, GET_ALL_TEST_RESPONSE) - - @pytest.fixture - def mock_get_response(self, mocker): - return create_mock_response(mocker, GET_CASE_RESPONSE) - - @pytest.fixture - def mock_case_empty_response(self, mocker): - return create_mock_response(mocker, EMPTY_GET_ALL_TEST_RESPONSE) - - def test_create_called_with_expected_url_and_params(self, mock_connection): - cases_service = CasesService(mock_connection) - cases_service.create("name", "subject", "user uid", "description", "findings") - assert mock_connection.post.call_args[0][0] == "/api/v1/case" - data = { - "name": "name", - "subject": "subject", - "assignee": "user uid", - "description": "description", - "findings": "findings", - } - mock_connection.post.assert_called_once_with(_BASE_URI, json=data) - - def test_create_when_fails_with_name_exists_error_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, NAME_EXISTS_ERROR_MSG - ) - with pytest.raises(Py42CaseNameExistsError) as err: - cases_service.create("Duplicate") - - assert ( - err.value.args[0] - == "Case name 'Duplicate' already exists, please set another name." - ) - - def test_create_when_fails_with_description_too_long_error_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, DESCRIPTION_TOO_LONG_ERROR_MSG - ) - with pytest.raises(Py42DescriptionLimitExceededError) as err: - cases_service.create("test", description="supposedly too long") - - assert ( - err.value.args[0] - == "Description limit exceeded, max 250 characters allowed." - ) - - def test_create_when_fails_with_invalid_subject_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, _get_invalid_user_text("subject") - ) - with pytest.raises(Py42InvalidCaseUserError) as err: - cases_service.create("test", subject="Not a person") - - assert err.value.args[0] == "The provided subject is not a valid user." - - def test_create_when_fails_with_invalid_assignee_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, _get_invalid_user_text("assignee") - ) - with pytest.raises(Py42InvalidCaseUserError) as err: - cases_service.create("test", assignee="Not a person") - - assert err.value.args[0] == "The provided assignee is not a valid user." - - def test_create_when_fails_with_unknown_error_raises_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, UNKNOWN_ERROR_MSG - ) - with pytest.raises(Py42BadRequestError): - cases_service.create("Case") - - def test_get_all_called_expected_number_of_times( - self, mock_connection, mock_case_response, mock_case_empty_response - ): - cases_service = CasesService(mock_connection) - py42.settings.items_per_page = 1 - items = [mock_case_response, mock_case_empty_response] - - mock_connection.get.side_effect = items - for _ in cases_service.get_all(): - pass - - assert mock_connection.get.call_count == 2 - py42.settings.items_per_page = 500 - - def test_get_all_called_with_expected_url_and_default_params( - self, mock_connection, mock_case_response, mock_case_empty_response - ): - cases_service = CasesService(mock_connection) - items = [ - mock_case_response, - ] - - mock_connection.get.side_effect = items - for _ in cases_service.get_all(): - pass - - expected_params = { - "name": None, - "subject": None, - "assignee": None, - "createdAt": None, - "updatedAt": None, - "status": None, - "pgNum": 1, - "pgSize": 500, - "srtDir": "asc", - "srtKey": "number", - } - mock_connection.get.assert_called_once_with(_BASE_URI, params=expected_params) - - def test_get_all_called_with_expected_url_and_params( - self, mock_connection, mock_case_response - ): - cases_service = CasesService(mock_connection) - mock_connection.get.side_effect = [ - mock_case_response, - mock_case_response, - ] - for _ in cases_service.get_all(name="test-case"): - continue - - expected_params = { - "name": "test-case", - "subject": None, - "assignee": None, - "createdAt": None, - "updatedAt": None, - "status": None, - "pgNum": 1, - "pgSize": 500, - "srtDir": "asc", - "srtKey": "number", - } - mock_connection.get.assert_called_once_with(_BASE_URI, params=expected_params) - - def test_get_all_called_with_expected_url_and_all_optional_params( - self, mock_connection, mock_case_response - ): - cases_service = CasesService(mock_connection) - mock_connection.get.side_effect = [ - mock_case_response, - mock_case_response, - ] - for _ in cases_service.get_all( - name="test-case", - subject="test", - assignee="user-uid", - updated_at="2010-04-30T001", - created_at="2010-01-03T002", - status="open", - ): - continue - - expected_params = { - "name": "test-case", - "subject": "test", - "assignee": "user-uid", - "createdAt": "2010-01-03T002", - "updatedAt": "2010-04-30T001", - "status": "open", - "pgNum": 1, - "pgSize": 500, - "srtDir": "asc", - "srtKey": "number", - } - mock_connection.get.assert_called_once_with(_BASE_URI, params=expected_params) - - def test_export_called_with_expected_url_and_params(self, mock_connection): - cases_service = CasesService(mock_connection) - cases_service.export_summary(_TEST_CASE_NUMBER) - assert ( - mock_connection.get.call_args[0][0] - == f"/api/v1/case/{_TEST_CASE_NUMBER}/export" - ) - - def test_get_called_with_expected_url_and_params(self, mock_connection): - cases_service = CasesService(mock_connection) - cases_service.get(_TEST_CASE_NUMBER) - assert ( - mock_connection.get.call_args[0][0] == f"/api/v1/case/{_TEST_CASE_NUMBER}" - ) - - def test_update_called_with_expected_url_and_params( - self, mock_connection, mock_get_response - ): - cases_service = CasesService(mock_connection) - mock_connection.get.return_value = mock_get_response - cases_service.update(_TEST_CASE_NUMBER, findings="x") - data = { - "name": "string", - "subject": "string", - "assignee": "string", - "description": None, - "status": "OPEN", - "findings": "x", - } - mock_connection.put.assert_called_once_with( - f"/api/v1/case/{_TEST_CASE_NUMBER}", json=data - ) - - def test_update_when_fails_with_name_exists_error_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.put.side_effect = create_mock_error( - Py42BadRequestError, mocker, NAME_EXISTS_ERROR_MSG - ) - with pytest.raises(Py42CaseNameExistsError) as err: - cases_service.update(_TEST_CASE_NUMBER, "Duplicate") - - assert ( - err.value.args[0] - == "Case name 'Duplicate' already exists, please set another name." - ) - - def test_update_when_case_is_closed_raises_custom_exception( - self, mocker, mock_connection, mock_get_response - ): - cases_service = CasesService(mock_connection) - mock_connection.get.return_value = mock_get_response - mock_connection.put.side_effect = create_mock_error( - Py42BadRequestError, mocker, UPDATE_ERROR_RESPONSE - ) - with pytest.raises(Py42UpdateClosedCaseError) as err: - cases_service.update(_TEST_CASE_NUMBER, findings="x") - - assert err.value.args[0] == "Cannot update a closed case." - - def test_update_when_fails_with_description_too_long_error_raises_custom_exception( - self, mocker, mock_connection, mock_get_response - ): - cases_service = CasesService(mock_connection) - mock_connection.get.return_value = mock_get_response - mock_connection.put.side_effect = create_mock_error( - Py42BadRequestError, mocker, DESCRIPTION_TOO_LONG_ERROR_MSG - ) - with pytest.raises(Py42DescriptionLimitExceededError) as err: - cases_service.update(_TEST_CASE_NUMBER, description="supposedly too long") - - assert ( - err.value.args[0] - == "Description limit exceeded, max 250 characters allowed." - ) - - def test_update_when_fails_with_invalid_subject_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.put.side_effect = create_mock_error( - Py42BadRequestError, mocker, _get_invalid_user_text("subject") - ) - with pytest.raises(Py42InvalidCaseUserError) as err: - cases_service.update(_TEST_CASE_NUMBER, subject="Not a person") - - assert err.value.args[0] == "The provided subject is not a valid user." - - def test_update_when_fails_with_invalid_assignee_raises_custom_exception( - self, mocker, mock_connection - ): - cases_service = CasesService(mock_connection) - mock_connection.put.side_effect = create_mock_error( - Py42BadRequestError, mocker, _get_invalid_user_text("assignee") - ) - with pytest.raises(Py42InvalidCaseUserError) as err: - cases_service.update(_TEST_CASE_NUMBER, assignee="Not a person") - - assert err.value.args[0] == "The provided assignee is not a valid user." diff --git a/tests/services/test_casesfileevents.py b/tests/services/test_casesfileevents.py deleted file mode 100644 index 45da4b2c4..000000000 --- a/tests/services/test_casesfileevents.py +++ /dev/null @@ -1,94 +0,0 @@ -import pytest -from tests.conftest import create_mock_error - -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42CaseAlreadyHasEventError -from py42.exceptions import Py42UpdateClosedCaseError -from py42.services.casesfileevents import CasesFileEventsService - -_TEST_CASE_NUMBER = 123456 -UPDATE_ERROR_RESPONSE = '{"problem":"CASE_IS_CLOSED"}' -ADDED_SAME_EVENT_AGAIN_ERROR = '{"problem":"CASE_ALREADY_HAS_EVENT"}' -UNKNOWN_ERROR = '{"problem":"SURPRISE!"}' - - -class TestCasesFileEventService: - def test_add_called_with_expected_url_and_params(self, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - case_file_event_service.add(_TEST_CASE_NUMBER, "event-id") - assert ( - mock_connection.post.call_args[0][0] - == f"/api/v1/case/{_TEST_CASE_NUMBER}/fileevent/event-id" - ) - - def test_delete_called_with_expected_url_and_params(self, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - case_file_event_service.delete(_TEST_CASE_NUMBER, "event-id") - assert ( - mock_connection.delete.call_args[0][0] - == f"/api/v1/case/{_TEST_CASE_NUMBER}/fileevent/event-id" - ) - - def test_get_called_with_expected_url_and_params(self, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - case_file_event_service.get(_TEST_CASE_NUMBER, "event-id") - assert ( - mock_connection.get.call_args[0][0] - == f"/api/v1/case/{_TEST_CASE_NUMBER}/fileevent/event-id" - ) - - def test_get_all_called_with_expected_url_and_params(self, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - case_file_event_service.get_all(_TEST_CASE_NUMBER) - assert ( - mock_connection.get.call_args[0][0] - == f"/api/v1/case/{_TEST_CASE_NUMBER}/fileevent" - ) - - def test_add_on_a_closed_case_raises_error(self, mocker, mock_connection): - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, UPDATE_ERROR_RESPONSE - ) - case_file_event_service = CasesFileEventsService(mock_connection) - with pytest.raises(Py42UpdateClosedCaseError) as err: - case_file_event_service.add(_TEST_CASE_NUMBER, event_id="x") - - assert err.value.args[0] == "Cannot update a closed case." - - def test_delete_on_a_closed_case_raises_error(self, mocker, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - mock_connection.delete.side_effect = create_mock_error( - Py42BadRequestError, mocker, UPDATE_ERROR_RESPONSE - ) - with pytest.raises(Py42UpdateClosedCaseError) as err: - case_file_event_service.delete(_TEST_CASE_NUMBER, event_id="x") - - assert err.value.args[0] == "Cannot update a closed case." - - def test_add_when_same_event_is_added_multiple_times_raises_error( - self, mocker, mock_connection - ): - case_file_event_service = CasesFileEventsService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, ADDED_SAME_EVENT_AGAIN_ERROR - ) - with pytest.raises(Py42CaseAlreadyHasEventError) as err: - case_file_event_service.add(_TEST_CASE_NUMBER, event_id="x") - - assert err.value.args[0] == "Event is already associated to the case." - - def test_add_when_unknown_error_raises_error(self, mocker, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, UNKNOWN_ERROR - ) - with pytest.raises(Py42BadRequestError): - case_file_event_service.add(_TEST_CASE_NUMBER, event_id="x") - - def test_delete_when_unknown_error_raises_error(self, mocker, mock_connection): - case_file_event_service = CasesFileEventsService(mock_connection) - mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, UNKNOWN_ERROR - ) - with pytest.raises(Py42BadRequestError): - case_file_event_service.add(_TEST_CASE_NUMBER, event_id="x") diff --git a/tests/services/test_connection.py b/tests/services/test_connection.py index 5fa611a9a..28ca8005c 100644 --- a/tests/services/test_connection.py +++ b/tests/services/test_connection.py @@ -2,21 +2,21 @@ from requests import Response from tests.conftest import TEST_DEVICE_GUID -import py42.settings as settings -from py42.exceptions import Py42DeviceNotConnectedError -from py42.exceptions import Py42Error -from py42.exceptions import Py42FeatureUnavailableError -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42UnauthorizedError -from py42.response import Py42Response -from py42.services._auth import C42RenewableAuth -from py42.services._connection import ConnectedServerHostResolver -from py42.services._connection import Connection -from py42.services._connection import HostResolver -from py42.services._connection import KnownUrlHostResolver -from py42.services._connection import MicroserviceKeyHostResolver -from py42.services._connection import MicroservicePrefixHostResolver -from py42.services._keyvaluestore import KeyValueStoreService +import pycpg.settings as settings +from pycpg.exceptions import PycpgDeviceNotConnectedError +from pycpg.exceptions import PycpgError +from pycpg.exceptions import PycpgFeatureUnavailableError +from pycpg.exceptions import PycpgInternalServerError +from pycpg.exceptions import PycpgUnauthorizedError +from pycpg.response import PycpgResponse +from pycpg.services._auth import CPGRenewableAuth +from pycpg.services._connection import ConnectedServerHostResolver +from pycpg.services._connection import Connection +from pycpg.services._connection import HostResolver +from pycpg.services._connection import KnownUrlHostResolver +from pycpg.services._connection import MicroserviceKeyHostResolver +from pycpg.services._connection import MicroservicePrefixHostResolver +from pycpg.services._keyvaluestore import KeyValueStoreService default_kwargs = { "timeout": 60, @@ -43,7 +43,7 @@ def mock_host_resolver(mocker): @pytest.fixture def mock_auth(mocker): - return mocker.MagicMock(spec=C42RenewableAuth) + return mocker.MagicMock(spec=CPGRenewableAuth) @pytest.fixture @@ -56,7 +56,7 @@ def mock_server_env_conn(mocker): mock_conn = mocker.MagicMock(spec=Connection) mock_response = mocker.MagicMock(spec=Response) mock_response.text = '{"stsBaseUrl": "sts-testsuffix"}' - mock_conn.get.return_value = Py42Response(mock_response) + mock_conn.get.return_value = PycpgResponse(mock_response) return mock_conn @@ -65,7 +65,7 @@ def mock_server_env_conn_missing_sts_base_url(mocker): mock_conn = mocker.MagicMock(spec=Connection) mock_response = mocker.MagicMock(spec=Response) mock_response.text = "{}" - mock_conn.get.return_value = Py42Response(mock_response) + mock_conn.get.return_value = PycpgResponse(mock_response) return mock_conn @@ -74,7 +74,7 @@ def mock_connected_server_conn(mocker): mock_conn = mocker.MagicMock(spec=Connection) mock_response = mocker.MagicMock(spec=Response) mock_response.text = f'{{"serverUrl": "{HOST_ADDRESS}"}}' - mock_conn.get.return_value = Py42Response(mock_response) + mock_conn.get.return_value = PycpgResponse(mock_response) return mock_conn @@ -83,7 +83,7 @@ def mock_not_connected_server_conn(mocker): mock_conn = mocker.MagicMock(spec=Connection) mock_response = mocker.MagicMock(spec=Response) mock_response.text = '{"serverUrl": null}' - mock_conn.get.return_value = Py42Response(mock_response) + mock_conn.get.return_value = PycpgResponse(mock_response) return mock_conn @@ -138,7 +138,7 @@ def test_get_host_address_when_sts_base_url_not_found_raises_feature_unavailable resolver = MicroservicePrefixHostResolver( mock_server_env_conn_missing_sts_base_url, "TESTPREFIX" ) - with pytest.raises(Py42FeatureUnavailableError): + with pytest.raises(PycpgFeatureUnavailableError): resolver.get_host_address() def test_get_host_address_calls_correct_server_env_url(self, mock_server_env_conn): @@ -164,7 +164,7 @@ def test_get_host_address_when_server_returns_none_raises_expected_error( resolver = ConnectedServerHostResolver( mock_not_connected_server_conn, TEST_DEVICE_GUID ) - with pytest.raises(Py42DeviceNotConnectedError) as err: + with pytest.raises(PycpgDeviceNotConnectedError) as err: resolver.get_host_address() expected_message = ( @@ -273,7 +273,7 @@ def test_connection_request_with_error_status_code_raises_http_error( self, mock_host_resolver, mock_auth, error_requests_session ): connection = Connection(mock_host_resolver, mock_auth, error_requests_session) - with pytest.raises(Py42InternalServerError): + with pytest.raises(PycpgInternalServerError): connection.get(URL) def test_connection_request_calls_auth_handler_when_making_first_request( @@ -299,17 +299,17 @@ def test_connection_request_raises_unauthorized_error_when_renewal_results_in_40 mock_host_resolver, mock_auth, unauthorized_requests_session ) - with pytest.raises(Py42UnauthorizedError): + with pytest.raises(PycpgUnauthorizedError): connection.get(URL) assert unauthorized_requests_session.send.call_count == 2 - def test_connection_request_when_session_returns_none_raises_py42_error( + def test_connection_request_when_session_returns_none_raises_pycpg_error( self, mock_host_resolver, mock_auth, success_requests_session ): success_requests_session.send.return_value = None connection = Connection(mock_host_resolver, mock_auth, success_requests_session) - with pytest.raises(Py42Error): + with pytest.raises(PycpgError): connection.get(URL) def test_connection_request_when_no_data_does_not_include_content_type_header( diff --git a/tests/services/test_devices.py b/tests/services/test_devices.py index 833ef2f43..12d02e4b4 100644 --- a/tests/services/test_devices.py +++ b/tests/services/test_devices.py @@ -4,14 +4,13 @@ from tests.conftest import create_mock_error from tests.conftest import create_mock_response -import py42 -from py42.clients.settings.device_settings import DeviceSettings -from py42.clients.settings.device_settings import IncydrDeviceSettings -from py42.exceptions import Py42ActiveLegalHoldError -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42OrgNotFoundError -from py42.response import Py42Response -from py42.services.devices import DeviceService +import pycpg +from pycpg.clients.settings.device_settings import DeviceSettings +from pycpg.exceptions import PycpgActiveLegalHoldError +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgOrgNotFoundError +from pycpg.response import PycpgResponse +from pycpg.services.devices import DeviceService COMPUTER_URI = "/api/v1/Computer" UPGRADE_URI = "/api/v4/device-upgrade/upgrade-device" @@ -83,7 +82,7 @@ def test_get_by_id_calls_get_with_uri_and_params( def test_get_all_calls_get_expected_number_of_times( self, mock_connection, mock_get_all_response, mock_get_all_empty_response ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = DeviceService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_response, @@ -92,7 +91,7 @@ def test_get_all_calls_get_expected_number_of_times( ] for _ in service.get_all(): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_page_calls_get_with_expected_url_and_params(self, mock_connection): @@ -141,11 +140,11 @@ def side_effect(url, json): base_err = mocker.MagicMock(spec=HTTPError) base_err.response = mocker.MagicMock(spec=Response) base_err.response.text = "ACTIVE_LEGAL_HOLD" - raise Py42BadRequestError(base_err) + raise PycpgBadRequestError(base_err) mock_connection.post.side_effect = side_effect client = DeviceService(mock_connection) - with pytest.raises(Py42ActiveLegalHoldError) as err: + with pytest.raises(PycpgActiveLegalHoldError) as err: client.deactivate(1234) expected = "Cannot deactivate the device with ID 1234 as the device is involved in a legal hold matter." @@ -158,11 +157,11 @@ def test_get_page_when_org_not_found_raises_expected_error( ): text = '[{"name":"SYSTEM","description":"Unable to find org"}]' mock_connection.get.side_effect = create_mock_error( - Py42BadRequestError, mocker, text + PycpgBadRequestError, mocker, text ) service = DeviceService(mock_connection) - with pytest.raises(Py42OrgNotFoundError) as err: + with pytest.raises(PycpgOrgNotFoundError) as err: service.get_page(1, org_uid="TestOrgUid") assert "The organization with UID 'TestOrgUid' was not found." in str(err.value) @@ -184,32 +183,10 @@ def test_get_settings_returns_crashplan_settings_when_crashplan_service( requests_response = mocker.MagicMock(spec=Response) requests_response.text = text client = DeviceService(mock_connection) - mock_connection.get.return_value = Py42Response(requests_response) + mock_connection.get.return_value = PycpgResponse(requests_response) settings = client.get_settings("42") assert isinstance(settings, DeviceSettings) - def test_get_settings_returns_incydr_settings_when_artemis_service( - self, mocker, mock_connection - ): - text = """{"service": "Artemis"}""" - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = text - client = DeviceService(mock_connection) - mock_connection.get.return_value = Py42Response(requests_response) - settings = client.get_settings("42") - assert isinstance(settings, IncydrDeviceSettings) - - def test_get_settings_returns_incydr_settings_when_unspecified_service( - self, mocker, mock_connection - ): - text = """{"service": ""}""" - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = text - client = DeviceService(mock_connection) - mock_connection.get.return_value = Py42Response(requests_response) - settings = client.get_settings("42") - assert isinstance(settings, IncydrDeviceSettings) - def test_update_settings_calls_api_with_expected_params_when_crashplan( self, mock_connection ): @@ -228,14 +205,3 @@ def test_update_settings_calls_api_with_expected_params_when_crashplan( client.update_settings(settings) uri = f"/api/v1/Computer/{device_id}" mock_connection.put.assert_called_once_with(uri, json=settings) - - def test_update_settings_calls_api_with_expected_params_when_incydr( - self, mock_connection - ): - device_id = "123" - device_dict = {"computerId": device_id, "service": "Artemis"} - settings = IncydrDeviceSettings(device_dict) - client = DeviceService(mock_connection) - client.update_settings(settings) - uri = f"/api/v1/Computer/{device_id}" - mock_connection.put.assert_called_once_with(uri, json=settings) diff --git a/tests/services/test_fileevent.py b/tests/services/test_fileevent.py deleted file mode 100644 index 6980ae175..000000000 --- a/tests/services/test_fileevent.py +++ /dev/null @@ -1,138 +0,0 @@ -import json - -import pytest -import requests -from tests.conftest import create_mock_error - -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42InvalidPageTokenError -from py42.sdk.queries.fileevents.file_event_query import ( - FileEventQuery as FileEventQueryV1, -) -from py42.sdk.queries.fileevents.filters import FileName -from py42.sdk.queries.fileevents.v2.file_event_query import ( - FileEventQuery as FileEventQueryV2, -) -from py42.sdk.queries.fileevents.v2.filters.file import Name -from py42.services._connection import Connection -from py42.services.fileevent import FileEventService - -FILE_EVENT_URI = "/forensic-search/queryservice/api/v1/fileevent" -FILE_EVENT_URI_V2 = "/forensic-search/queryservice/api/v2/fileevent" - - -def _create_test_query(test_filename="*"): - return FileEventQueryV1(FileName.eq(test_filename)) - - -def _create_v2_test_query(test_filename="*"): - return FileEventQueryV2(Name.eq(test_filename)) - - -@pytest.fixture() -def mock_invalid_page_token_connection(mocker, connection): - connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, "INVALID_PAGE_TOKEN" - ) - return connection - - -class TestFileEventService: - @pytest.fixture - def connection(self, mocker): - connection = mocker.MagicMock(spec=Connection) - connection._session = mocker.MagicMock(spec=requests.Session) - return connection - - def test_search_calls_post_with_uri_and_query( - self, connection, successful_response - ): - service = FileEventService(connection) - connection.post.return_value = successful_response - query = _create_test_query() - service.search(query) - connection.post.assert_called_once_with(FILE_EVENT_URI, json=dict(query)) - - def test_search_when_given_str_type_query_calls_post_with_uri_and_query( - self, connection, successful_response - ): - service = FileEventService(connection) - connection.post.return_value = successful_response - query = str(_create_test_query()) - service.search(query) - expected = json.loads(query) - connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) - - def test_search_when_given_page_token_and_bad_request_with_invalid_page_token_occurs_raises_invalid_page_token_error( - self, mock_invalid_page_token_connection - ): - query = _create_test_query() - query.page_token = "test_page_token" - service = FileEventService(mock_invalid_page_token_connection) - with pytest.raises(Py42InvalidPageTokenError) as err: - service.search(query) - - assert f'Invalid page token: "{query.page_token}".' in str(err.value) - assert err.value.page_token == "test_page_token" - - def test_search_when_bad_request_raised_and_token_not_in_query_raises_bad_request( - self, mock_invalid_page_token_connection - ): - query = _create_test_query() - query.page_token = None - service = FileEventService(mock_invalid_page_token_connection) - with pytest.raises(Py42BadRequestError): - service.search(query) - - def test_search_when_bad_request_raised_with_token_but_has_not_invalid_token_text_raises_bad_request( - self, mocker, connection - ): - connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, "DIFFERENT_ERROR" - ) - query = _create_test_query() - query.page_token = "test_page_token" - service = FileEventService(connection) - with pytest.raises(Py42BadRequestError): - service.search(query) - - def test_unicode_query_search_calls_post_with_query( - self, connection, successful_response - ): - service = FileEventService(connection) - connection.post.return_value = successful_response - query = _create_test_query("我能吞") - expected = dict(query) - service.search(query) - connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) - - def test_get_file_location_detail_by_sha256_calls_get_with_hash( - self, connection, successful_response - ): - service = FileEventService(connection) - connection.get.return_value = successful_response - service.get_file_location_detail_by_sha256("abc") - connection.get.assert_called_once_with( - "/forensic-search/queryservice/api/v1/filelocations", - params={"sha256": "abc"}, - ) - - # V2 TESTS - def test_search_uses_v2_uri_and_query_if_v2_query( - self, connection, successful_response - ): - service = FileEventService(connection) - connection.post.return_value = successful_response - query = _create_v2_test_query() - service.search(query) - connection.post.assert_called_once_with(FILE_EVENT_URI_V2, json=dict(query)) - - def test_search_when_given_str_type_v2_query_calls_post_with_uri_and_query( - self, connection, successful_response - ): - service = FileEventService(connection) - connection.post.return_value = successful_response - query = str(_create_v2_test_query()) - service.search(query) - expected = json.loads(query) - connection.post.assert_called_once_with(FILE_EVENT_URI_V2, json=expected) diff --git a/tests/services/test_legalhold.py b/tests/services/test_legalhold.py index 9d7c80773..f696d10c3 100644 --- a/tests/services/test_legalhold.py +++ b/tests/services/test_legalhold.py @@ -2,13 +2,13 @@ from tests.conftest import create_mock_error from tests.conftest import create_mock_response -import py42 -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42ForbiddenError -from py42.exceptions import Py42LegalHoldCriteriaMissingError -from py42.exceptions import Py42LegalHoldNotFoundOrPermissionDeniedError -from py42.exceptions import Py42UserAlreadyAddedError -from py42.services.legalhold import LegalHoldService +import pycpg +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgForbiddenError +from pycpg.exceptions import PycpgLegalHoldCriteriaMissingError +from pycpg.exceptions import PycpgLegalHoldNotFoundOrPermissionDeniedError +from pycpg.exceptions import PycpgUserAlreadyAddedError +from pycpg.services.legalhold import LegalHoldService LEGAL_HOLD_URI = "/api/v1/LegalHold" @@ -78,10 +78,10 @@ def test_get_matter_by_uid_when_forbidden_raises_legal_hold_permission_denied_er self, mocker, mock_connection, successful_response ): mock_connection.get.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) service = LegalHoldService(mock_connection) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.get_matter_by_uid("matter") expected = "Matter with UID 'matter' can not be found. Your account may not have permission to view the matter." @@ -94,7 +94,7 @@ def test_get_all_matters_calls_get_expected_number_of_times( mock_get_all_matters_response, mock_get_all_matters_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = LegalHoldService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_matters_response, @@ -103,7 +103,7 @@ def test_get_all_matters_calls_get_expected_number_of_times( ] for _ in service.get_all_matters(): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_all_matter_custodians_calls_get_expected_number_of_times( @@ -112,7 +112,7 @@ def test_get_all_matter_custodians_calls_get_expected_number_of_times( mock_get_all_matter_custodians_response, mock_get_all_matter_custodians_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = LegalHoldService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_matter_custodians_response, @@ -121,7 +121,7 @@ def test_get_all_matter_custodians_calls_get_expected_number_of_times( ] for _ in service.get_all_matter_custodians(user="test"): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_all_events_calls_get_expected_number_of_times( @@ -130,7 +130,7 @@ def test_get_all_events_calls_get_expected_number_of_times( mock_get_all_events_response, mock_get_all_events_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = LegalHoldService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_events_response, @@ -139,7 +139,7 @@ def test_get_all_events_calls_get_expected_number_of_times( ] for _ in service.get_all_events(): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_matters_page_calls_get_with_expected_url_and_params( @@ -203,10 +203,10 @@ def test_get_custodians_page_raises_error_when_required_option_missing( "userUid, or userSearch" ) mock_connection.get.side_effect = create_mock_error( - Py42BadRequestError, mocker, text + PycpgBadRequestError, mocker, text ) service = LegalHoldService(mock_connection) - with pytest.raises(Py42LegalHoldCriteriaMissingError) as err: + with pytest.raises(PycpgLegalHoldCriteriaMissingError) as err: service.get_custodians_page(1) assert ( @@ -228,11 +228,11 @@ def test_add_to_matter_when_post_raises_bad_request_error_indicating_user_alread self, mocker, mock_connection ): mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, "USER_ALREADY_IN_HOLD" + PycpgBadRequestError, mocker, "USER_ALREADY_IN_HOLD" ) mock_connection.get.return_value = {"name": "NAME"} service = LegalHoldService(mock_connection) - with pytest.raises(Py42UserAlreadyAddedError) as err: + with pytest.raises(PycpgUserAlreadyAddedError) as err: service.add_to_matter("user", "legal") expected = ( diff --git a/tests/services/test_legalholdapiclient.py b/tests/services/test_legalholdapiclient.py index fcc224585..f7bc6c1f2 100644 --- a/tests/services/test_legalholdapiclient.py +++ b/tests/services/test_legalholdapiclient.py @@ -2,15 +2,15 @@ from tests.conftest import create_mock_error from tests.conftest import create_mock_response -import py42 -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42ForbiddenError -from py42.exceptions import Py42LegalHoldAlreadyActiveError -from py42.exceptions import Py42LegalHoldAlreadyDeactivatedError -from py42.exceptions import Py42LegalHoldCriteriaMissingError -from py42.exceptions import Py42LegalHoldNotFoundOrPermissionDeniedError -from py42.exceptions import Py42UserAlreadyAddedError -from py42.services.legalholdapiclient import LegalHoldApiClientService +import pycpg +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgForbiddenError +from pycpg.exceptions import PycpgLegalHoldAlreadyActiveError +from pycpg.exceptions import PycpgLegalHoldAlreadyDeactivatedError +from pycpg.exceptions import PycpgLegalHoldCriteriaMissingError +from pycpg.exceptions import PycpgLegalHoldNotFoundOrPermissionDeniedError +from pycpg.exceptions import PycpgUserAlreadyAddedError +from pycpg.services.legalholdapiclient import LegalHoldApiClientService BASE_URI = "/api/v27" @@ -83,14 +83,14 @@ def test_get_policy_by_uid_calls_get_with_expected_url_and_params( } mock_connection.get.assert_called_once_with(uri, params=data) - def test_get_policy_by_uid_raises_py42_error_if_policy_uid_not_found_or_forbidden( + def test_get_policy_by_uid_raises_pycpg_error_if_policy_uid_not_found_or_forbidden( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.get.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.get_policy_by_uid(TEST_POLICY_UID) assert ( @@ -155,10 +155,10 @@ def test_get_matter_by_uid_when_forbidden_raises_legal_hold_permission_denied_er self, mocker, mock_connection, successful_response ): mock_connection.get.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) service = LegalHoldApiClientService(mock_connection) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.get_matter_by_uid(TEST_MATTER_UID) expected = f"Matter with UID '{TEST_MATTER_UID}' can not be found. Your account may not have permission to view the matter." @@ -170,7 +170,7 @@ def test_get_all_matters_calls_get_expected_number_of_times( mock_get_all_matters_response, mock_get_all_matters_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = LegalHoldApiClientService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_matters_response, @@ -179,7 +179,7 @@ def test_get_all_matters_calls_get_expected_number_of_times( ] for _ in service.get_all_matters(): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_all_matter_custodians_calls_get_expected_number_of_times( @@ -188,7 +188,7 @@ def test_get_all_matter_custodians_calls_get_expected_number_of_times( mock_get_all_matter_custodians_response, mock_get_all_matter_custodians_empty_response, ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = LegalHoldApiClientService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_matter_custodians_response, @@ -197,7 +197,7 @@ def test_get_all_matter_custodians_calls_get_expected_number_of_times( ] for _ in service.get_all_matter_custodians(user="test"): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_matters_page_calls_get_with_expected_url_and_params( @@ -244,10 +244,10 @@ def test_get_custodians_page_raises_error_when_required_option_missing( "userUid, or userSearch" ) mock_connection.get.side_effect = create_mock_error( - Py42BadRequestError, mocker, text + PycpgBadRequestError, mocker, text ) service = LegalHoldApiClientService(mock_connection) - with pytest.raises(Py42LegalHoldCriteriaMissingError) as err: + with pytest.raises(PycpgLegalHoldCriteriaMissingError) as err: service.get_custodians_page(1) assert ( @@ -268,11 +268,11 @@ def test_add_to_matter_when_post_raises_bad_request_error_indicating_user_alread self, mocker, mock_connection ): mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, "USER_ALREADY_IN_HOLD" + PycpgBadRequestError, mocker, "USER_ALREADY_IN_HOLD" ) mock_connection.get.return_value = {"name": "NAME"} service = LegalHoldApiClientService(mock_connection) - with pytest.raises(Py42UserAlreadyAddedError) as err: + with pytest.raises(PycpgUserAlreadyAddedError) as err: service.add_to_matter("user", "legal") expected = ( @@ -280,14 +280,14 @@ def test_add_to_matter_when_post_raises_bad_request_error_indicating_user_alread ) assert expected in str(err.value) - def test_add_to_matter_raises_py42_error_if_membership_uid_not_found_or_forbidden( + def test_add_to_matter_raises_pycpg_error_if_membership_uid_not_found_or_forbidden( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.post.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.add_to_matter("user_uid", TEST_MATTER_UID) assert ( @@ -307,14 +307,14 @@ def test_remove_from_matter_calls_post_with_expected_url_and_params( } mock_connection.post.assert_called_once_with(uri, json=data) - def test_remove_from_matter_raises_py42_error_if_membership_uid_not_found_or_forbidden( + def test_remove_from_matter_raises_pycpg_error_if_membership_uid_not_found_or_forbidden( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.post.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.remove_from_matter(TEST_MEMBERSHIP_UID) assert ( @@ -334,14 +334,14 @@ def test_deactivate_matter_calls_post_with_expected_url_and_params( } mock_connection.post.assert_called_once_with(uri, json=data) - def test_deactivate_matter_raises_py42_error_if_matter_already_deactivated( + def test_deactivate_matter_raises_pycpg_error_if_matter_already_deactivated( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, '"problem":"ALREADY_DEACTIVATED' + PycpgBadRequestError, mocker, '"problem":"ALREADY_DEACTIVATED' ) - with pytest.raises(Py42LegalHoldAlreadyDeactivatedError) as err: + with pytest.raises(PycpgLegalHoldAlreadyDeactivatedError) as err: service.deactivate_matter(TEST_MATTER_UID) assert ( @@ -349,14 +349,14 @@ def test_deactivate_matter_raises_py42_error_if_matter_already_deactivated( == f"Legal Hold Matter with UID '{TEST_MATTER_UID}' has already been deactivated." ) - def test_deactivate_matter_raises_py42_error_if_matter_uid_not_found_or_forbidden( + def test_deactivate_matter_raises_pycpg_error_if_matter_uid_not_found_or_forbidden( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.post.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.deactivate_matter(TEST_MATTER_UID) assert ( @@ -376,14 +376,14 @@ def test_reactivate_matter_calls_post_with_expected_url_and_params( } mock_connection.post.assert_called_once_with(uri, json=data) - def test_reactivate_matter_raises_py42_error_if_matter_already_active( + def test_reactivate_matter_raises_pycpg_error_if_matter_already_active( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, '"problem":"ALREADY_ACTIVE' + PycpgBadRequestError, mocker, '"problem":"ALREADY_ACTIVE' ) - with pytest.raises(Py42LegalHoldAlreadyActiveError) as err: + with pytest.raises(PycpgLegalHoldAlreadyActiveError) as err: service.reactivate_matter(TEST_MATTER_UID) assert ( @@ -391,14 +391,14 @@ def test_reactivate_matter_raises_py42_error_if_matter_already_active( == f"Legal Hold Matter with UID '{TEST_MATTER_UID}' is already active." ) - def test_reactivate_matter_raises_py42_error_if_matter_uid_not_found_or_forbidden( + def test_reactivate_matter_raises_pycpg_error_if_matter_uid_not_found_or_forbidden( self, mocker, mock_connection ): service = LegalHoldApiClientService(mock_connection) mock_connection.post.side_effect = create_mock_error( - Py42ForbiddenError, mocker, "" + PycpgForbiddenError, mocker, "" ) - with pytest.raises(Py42LegalHoldNotFoundOrPermissionDeniedError) as err: + with pytest.raises(PycpgLegalHoldNotFoundOrPermissionDeniedError) as err: service.reactivate_matter(TEST_MATTER_UID) assert ( diff --git a/tests/services/test_orgs.py b/tests/services/test_orgs.py index 56fd69746..cb9810465 100644 --- a/tests/services/test_orgs.py +++ b/tests/services/test_orgs.py @@ -4,9 +4,9 @@ from tests.conftest import create_mock_error from tests.conftest import create_mock_response -import py42.settings -from py42.exceptions import Py42InternalServerError -from py42.services.orgs import OrgService +import pycpg.settings +from pycpg.exceptions import PycpgInternalServerError +from pycpg.services.orgs import OrgService COMPUTER_URI = "/api/v1/Org" ORGS_V3_URI = "/api/v3/orgs" @@ -31,7 +31,7 @@ def mock_get_all_empty_response(self, mocker): yield create_mock_response(mocker, MOCK_EMPTY_GET_ORGS_RESPONSE) @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value="org-guid-123", ) @@ -47,7 +47,7 @@ def test_get_org_by_id_calls_get_with_uri_and_params( def test_get_all_calls_get_expected_number_of_times( self, mock_connection, mock_get_all_response, mock_get_all_empty_response ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = OrgService(mock_connection) mock_connection.get.side_effect = [ mock_get_all_response, @@ -56,7 +56,7 @@ def test_get_all_calls_get_expected_number_of_times( ] for _ in service.get_all(): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_page_calls_get_with_expected_url_and_params(self, mock_connection): @@ -86,7 +86,7 @@ def test_get_agent_full_disk_access_states_calls_get_agent_state_with_arguments( service.get_agent_state.assert_called_once_with("ORG_ID", "fullDiskAccess") @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value="parent-guid-123", ) @@ -107,7 +107,7 @@ def test_create_org_calls_post_with_expected_uri_and_params(self, mock_connectio mock_connection.post.assert_called_once_with(ORGS_V3_URI, json=data) @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value="org-guid-123", ) @@ -118,7 +118,7 @@ def test_get_by_uid_calls_get_with_expected_uri_and_params(self, mock_connection mock_connection.get.assert_called_once_with(uri, params={}) @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value=TEST_ORG_GUID, ) @@ -129,7 +129,7 @@ def test_block_calls_post_with_expected_uri_and_params(self, mock_connection): mock_connection.post.assert_called_once_with(uri) @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value=TEST_ORG_GUID, ) @@ -140,7 +140,7 @@ def test_unblock_calls_post_with_expected_uri_and_params(self, mock_connection): mock_connection.post.assert_called_once_with(uri) @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value=TEST_ORG_GUID, ) @@ -151,7 +151,7 @@ def test_deactivate_calls_post_with_expected_uri_and_params(self, mock_connectio mock_connection.post.assert_called_once_with(uri) @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value=TEST_ORG_GUID, ) @@ -172,16 +172,16 @@ def test_get_current_returns_note_about_api_client_support_when_internal_server_ ): service = OrgService(mock_connection) mock_connection.get.side_effect = create_mock_error( - Py42InternalServerError, mocker, "Server Error" + PycpgInternalServerError, mocker, "Server Error" ) - with pytest.raises(Py42InternalServerError) as err: + with pytest.raises(PycpgInternalServerError) as err: service.get_current() expected = "Please be aware that this method is incompatible with api client authentication." assert expected in err.value.args[0] @patch.object( - py42.services.orgs.OrgService, + pycpg.services.orgs.OrgService, "_get_guid_by_id", return_value=TEST_ORG_GUID, ) diff --git a/tests/services/test_pds.py b/tests/services/test_pds.py deleted file mode 100644 index fd5824818..000000000 --- a/tests/services/test_pds.py +++ /dev/null @@ -1,33 +0,0 @@ -import pytest - -from py42.services.preservationdata import PreservationDataService - - -class TestPreservationDataService: - @pytest.fixture - def mock_connection(self, mock_connection, successful_response): - mock_connection.post.return_value = successful_response - return mock_connection - - def test_find_file_version_posts_expected_data(self, mock_connection): - pds = PreservationDataService(mock_connection) - pds.find_file_version("abc", "adfadf", ["/path/path", "/path/path2"]) - - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - assert mock_connection.post.call_args[0][0] == "/api/v1/FindAvailableVersion" - assert ( - posted_data["fileSHA256"] == "adfadf" - and posted_data["fileMD5"] == "abc" - and posted_data["devicePaths"][0] == "/path/path" - and posted_data["devicePaths"][1] == "/path/path2" - ) - - def test_get_file_version_list_uses_expected_url(self, mock_connection): - pds = PreservationDataService(mock_connection) - pds.get_file_version_list("testguid", "testmd5", "testsha256", "/t/1 X") - qry = ( - "fileSHA256=testsha256&fileMD5=testmd5&deviceUid=testguid&filePath=/t/1%20X" - ) - expected = f"/api/v2/file-version-listing?{qry}" - mock_connection.get.assert_called_once_with(expected) diff --git a/tests/services/test_savedsearch.py b/tests/services/test_savedsearch.py deleted file mode 100644 index 2e4c2d573..000000000 --- a/tests/services/test_savedsearch.py +++ /dev/null @@ -1,176 +0,0 @@ -from tests.conftest import create_mock_response - -from py42 import settings -from py42.sdk.queries.fileevents.file_event_query import ( - FileEventQuery as FileEventQueryV1, -) -from py42.sdk.queries.fileevents.v2.file_event_query import ( - FileEventQuery as FileEventQueryV2, -) -from py42.services.fileevent import FileEventService -from py42.services.savedsearch import SavedSearchService - -SAVED_SEARCH_GET_RESPONSE = """ - {"searches": [{"groups": [] }]} -""" -FILE_EVENT_URI = "/forensic-search/queryservice/api/v1/fileevent" - - -class TestSavedSearchService: - def test_get_calls_get_with_expected_uri(self, mock_connection, mocker): - mock_connection.get.return_value = create_mock_response(mocker, "{}") - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.get() - assert mock_connection.get.call_count == 1 - assert ( - mock_connection.get.call_args[0][0] - == "/forensic-search/queryservice/api/v1/saved" - ) - - def test_get_by_id_calls_get_with_expected_uri(self, mock_connection, mocker): - mock_connection.get.return_value = create_mock_response(mocker, "{}") - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.get_by_id("test-id") - assert ( - mock_connection.get.call_args[0][0] - == "/forensic-search/queryservice/api/v1/saved/test-id" - ) - - def test_execute_calls_post_with_expected_uri(self, mock_connection, mocker): - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.post.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.execute("test-id") - assert ( - mock_connection.post.call_args[0][0] - == "/forensic-search/queryservice/api/v1/fileevent" - ) - - def test_execute_calls_post_with_expected_query(self, mock_connection, mocker): - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.get.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.execute("test-id") - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - assert ( - posted_data["pgSize"] == 500 - and posted_data["pgNum"] == 1 - and posted_data["groups"] == [] - ) - - def test_execute_calls_post_with_expected_setting_page_param( - self, mock_connection, mocker - ): - test_custom_page_num = 2 - settings.security_events_per_page = 5000 - - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.get.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_client = SavedSearchService(mock_connection, file_event_service) - saved_search_client.execute( - "test-id", - page_number=test_custom_page_num, - ) - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - settings.security_events_per_page = 10000 - assert ( - posted_data["pgSize"] == 5000 - and posted_data["pgNum"] == 2 - and posted_data["groups"] == [] - ) - - def test_execute_calls_post_with_expected_page_params( - self, mock_connection, mocker - ): - test_custom_page_num = 2 - settings.security_events_per_page = 6000 - test_custom_page_size = 5000 - - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.get.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_client = SavedSearchService(mock_connection, file_event_service) - saved_search_client.execute( - "test-id", - page_number=test_custom_page_num, - page_size=test_custom_page_size, - ) - assert mock_connection.post.call_count == 1 - posted_data = mock_connection.post.call_args[1]["json"] - settings.security_events_per_page = 10000 - assert ( - posted_data["pgSize"] == 5000 - and posted_data["pgNum"] == 2 - and posted_data["groups"] == [] - ) - - def test_get_query_calls_get_with_expected_uri(self, mock_connection, mocker): - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.post.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.get_query("test-id") - assert ( - mock_connection.get.call_args[0][0] - == "/forensic-search/queryservice/api/v1/saved/test-id" - ) - - def test_get_query_builds_v1_query_if_not_use_v2_flag( - self, mock_connection, mocker - ): - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.post.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - query = saved_search_service.get_query("test-id") - assert isinstance(query, FileEventQueryV1) - assert query.version == "v1" - assert query.sort_key == "eventId" - - # V2 TESTS - def test_get_calls_with_v2_uri_if_use_v2_flag(self, mock_connection): - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.get(use_v2=True) - mock_connection.get.assert_called_once_with( - "/forensic-search/queryservice/api/v2/saved" - ) - - def test_get_by_id_calls_v2_uri_if_use_v2_flag(self, mock_connection): - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.get_by_id("test-id", use_v2=True) - mock_connection.get.assert_called_once_with( - "/forensic-search/queryservice/api/v2/saved/test-id" - ) - - def test_get_query_builds_v2_query_if_use_v2_flag(self, mock_connection, mocker): - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.post.return_value = response - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - query = saved_search_service.get_query("test-id", use_v2=True) - assert isinstance(query, FileEventQueryV2) - assert query.version == "v2" - assert query.sort_key == "event.id" - - def test_execute_calls_search_with_v2_uri_if_use_v2_flag( - self, mock_connection, mocker - ): - response = create_mock_response(mocker, SAVED_SEARCH_GET_RESPONSE) - mock_connection.post.return_value = response - - file_event_service = FileEventService(mock_connection) - saved_search_service = SavedSearchService(mock_connection, file_event_service) - saved_search_service.execute("test-id", use_v2=True) - expected_query = FileEventQueryV2.from_dict({"groups": []}) - mock_connection.post.assert_called_once_with( - "/forensic-search/queryservice/api/v2/fileevent", json=dict(expected_query) - ) diff --git a/tests/services/test_trustedactivities.py b/tests/services/test_trustedactivities.py deleted file mode 100644 index 716651106..000000000 --- a/tests/services/test_trustedactivities.py +++ /dev/null @@ -1,328 +0,0 @@ -import json - -import pytest -from tests.conftest import create_mock_error -from tests.conftest import create_mock_response - -import py42.settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42ConflictError -from py42.exceptions import Py42DescriptionLimitExceededError -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42TrustedActivityConflictError -from py42.exceptions import Py42TrustedActivityIdNotFound -from py42.exceptions import Py42TrustedActivityInvalidCharacterError -from py42.services.trustedactivities import TrustedActivitiesService - -GET_ALL_TEST_RESPONSE = '{"trustResources":["test"], "totalCount":1}' -EMPTY_GET_ALL_TEST_RESPONSE = '{"trustResources": [], "totalCount":0}' -GET_TRUSTED_ACTIVITY_RESPONSE = { - "description": "test description", - "resourceId": "456", - "type": "DOMAIN", - "updatedAt": "2021-09-13T15:36:59.743Z", - "updatedByUserUid": "user uid", - "updatedByUsername": "username", - "value": "domain.com", -} - -_INVALID_RESOURCE_ID = 0 -_TEST_TRUSTED_ACTIVITY_RESOURCE_ID = 123 -_BASE_URI = "/api/v1/trusted-activities" -_DESCRIPTION_TOO_LONG_ERROR_MSG = ( - '{"problem":"DESCRIPTION_TOO_LONG","description":null}' -) -_CONFLICT_ERROR_MSG = '{"problem":"CONFLICT","description":null}' -_INVALID_CHANGE_ERROR_MSG = '{"problem":"INVALID_CHANGE","description":null}' -_INVALID_CHARACTER_ERROR_MSG = ( - '{"problem":"INVALID_CHARACTERS_IN_VALUE","description":null}' -) -_RESOURCE_ID_NOT_FOUND_ERROR_MSG = "" - - -@pytest.fixture -def mock_get_all_response(mocker): - return create_mock_response(mocker, GET_ALL_TEST_RESPONSE) - - -@pytest.fixture -def mock_get_all_empty_response(mocker): - return create_mock_response(mocker, EMPTY_GET_ALL_TEST_RESPONSE) - - -@pytest.fixture -def mock_get_response(mocker): - data = json.dumps(GET_TRUSTED_ACTIVITY_RESPONSE) - response = create_mock_response(mocker, data) - return response - - -@pytest.fixture -def mock_long_description_error(mocker): - return create_mock_error( - Py42BadRequestError, mocker, _DESCRIPTION_TOO_LONG_ERROR_MSG - ) - - -@pytest.fixture -def mock_conflict_error(mocker): - return create_mock_error(Py42ConflictError, mocker, _CONFLICT_ERROR_MSG) - - -@pytest.fixture -def mock_invalid_change_error(mocker): - return create_mock_error(Py42BadRequestError, mocker, _INVALID_CHANGE_ERROR_MSG) - - -@pytest.fixture -def mock_invalid_character_error(mocker): - return create_mock_error(Py42BadRequestError, mocker, _INVALID_CHARACTER_ERROR_MSG) - - -@pytest.fixture -def mock_resource_id_not_found_error(mocker): - return create_mock_error( - Py42NotFoundError, mocker, _RESOURCE_ID_NOT_FOUND_ERROR_MSG - ) - - -class TestTrustedActivitiesService: - def test_create_called_with_expected_url_and_params(self, mock_connection): - trusted_activities_service = TrustedActivitiesService(mock_connection) - trusted_activities_service.create( - "DOMAIN", - "test.com", - ) - assert mock_connection.post.call_args[0][0] == _BASE_URI - data = { - "type": "DOMAIN", - "value": "test.com", - "description": None, - } - mock_connection.post.assert_called_once_with(_BASE_URI, json=data) - - def test_create_called_with_expected_url_and_optional_params(self, mock_connection): - trusted_activities_service = TrustedActivitiesService(mock_connection) - trusted_activities_service.create("DOMAIN", "test.com", "description") - assert mock_connection.post.call_args[0][0] == _BASE_URI - data = { - "type": "DOMAIN", - "value": "test.com", - "description": "description", - } - mock_connection.post.assert_called_once_with(_BASE_URI, json=data) - - def test_create_when_fails_with_name_conflict_error_raises_custom_exception( - self, mock_connection, mock_conflict_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.post.side_effect = mock_conflict_error - with pytest.raises(Py42TrustedActivityConflictError) as err: - trusted_activities_service.create("DOMAIN", "duplicate-name") - - assert err.value.args[0] == ( - "Duplicate URL or workspace name, 'duplicate-name' already exists on your trusted list. " - "Please provide a unique value" - ) - - def test_create_when_fails_with_description_too_long_error_raises_custom_exception( - self, mock_connection, mock_long_description_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.post.side_effect = mock_long_description_error - with pytest.raises(Py42DescriptionLimitExceededError) as err: - trusted_activities_service.create( - "DOMAIN", "name", description="supposedly too long" - ) - - assert ( - err.value.args[0] - == "Description limit exceeded, max 250 characters allowed." - ) - - def test_create_when_fails_with_invalid_character_error_raises_custom_exception( - self, mock_connection, mock_invalid_character_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.post.side_effect = mock_invalid_character_error - with pytest.raises(Py42TrustedActivityInvalidCharacterError) as err: - trusted_activities_service.create("DOMAIN", "bad@name") - - assert ( - err.value.args[0] == "Invalid character in domain or Slack workspace name" - ) - - def test_get_all_called_expected_number_of_times( - self, mock_connection, mock_get_all_response, mock_get_all_empty_response - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - py42.settings.items_per_page = 1 - items = [mock_get_all_response, mock_get_all_empty_response] - - mock_connection.get.side_effect = items - for _ in trusted_activities_service.get_all(): - pass - - assert mock_connection.get.call_count == 2 - py42.settings.items_per_page = 500 - - def test_get_all_called_with_expected_url_and_params( - self, mock_connection, mock_get_all_response - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.side_effect = [mock_get_all_response] - - for _ in trusted_activities_service.get_all(): - pass - - assert mock_connection.get.call_args[0][0] == _BASE_URI - data = { - "type": None, - "pgNum": 1, - "pgSize": 500, - } - mock_connection.get.assert_called_once_with(_BASE_URI, params=data) - - def test_get_all_called_with_expected_url_and_all_optional_params( - self, mock_connection, mock_get_all_response - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.side_effect = [ - mock_get_all_response, - ] - - for _ in trusted_activities_service.get_all("DOMAIN"): - pass - - assert mock_connection.get.call_args[0][0] == _BASE_URI - data = { - "type": "DOMAIN", - "pgNum": 1, - "pgSize": 500, - } - mock_connection.get.assert_called_once_with(_BASE_URI, params=data) - - def test_get_called_with_expected_url_and_params(self, mock_connection): - trusted_activities_service = TrustedActivitiesService(mock_connection) - trusted_activities_service.get(_TEST_TRUSTED_ACTIVITY_RESOURCE_ID) - expected_url = f"{_BASE_URI}/{_TEST_TRUSTED_ACTIVITY_RESOURCE_ID}" - assert mock_connection.get.call_args[0][0] == expected_url - mock_connection.get.assert_called_once_with(expected_url) - - def test_get_when_fails_with_resource_id_not_found_error_raises_custom_exception( - self, mock_connection, mock_resource_id_not_found_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.side_effect = mock_resource_id_not_found_error - with pytest.raises(Py42TrustedActivityIdNotFound) as err: - trusted_activities_service.get(_INVALID_RESOURCE_ID) - - assert err.value.args[0] == f"Resource ID '{_INVALID_RESOURCE_ID}' not found." - - def test_update_called_with_expected_url_and_params( - self, mock_connection, mock_get_response - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.return_value = mock_get_response - trusted_activities_service.update(_TEST_TRUSTED_ACTIVITY_RESOURCE_ID) - expected_url = f"{_BASE_URI}/{_TEST_TRUSTED_ACTIVITY_RESOURCE_ID}" - assert mock_connection.put.call_args[0][0] == expected_url - data = { - "type": "DOMAIN", - "value": "domain.com", - "description": "test description", - } - mock_connection.put.assert_called_once_with(expected_url, json=data) - - def test_update_called_with_expected_url_and_optional_params( - self, mock_connection, mock_get_response - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.return_value = mock_get_response - trusted_activities_service.update( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID, "test.com", "description" - ) - expected_url = f"{_BASE_URI}/{_TEST_TRUSTED_ACTIVITY_RESOURCE_ID}" - assert mock_connection.put.call_args[0][0] == expected_url - data = { - "type": "DOMAIN", - "value": "test.com", - "description": "description", - } - mock_connection.put.assert_called_once_with(expected_url, json=data) - - def test_update_when_fails_with_name_conflict_error_raises_custom_exception( - self, mock_connection, mock_conflict_error, mock_get_response - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.return_value = mock_get_response - mock_connection.put.side_effect = mock_conflict_error - with pytest.raises(Py42TrustedActivityConflictError) as err: - trusted_activities_service.update( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID, value="duplicate-name" - ) - - assert err.value.args[0] == ( - "Duplicate URL or workspace name, 'duplicate-name' already exists on your trusted list. " - "Please provide a unique value" - ) - - def test_update_when_fails_with_description_too_long_error_raises_custom_exception( - self, mock_connection, mock_get_response, mock_long_description_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.return_value = mock_get_response - mock_connection.put.side_effect = mock_long_description_error - with pytest.raises(Py42DescriptionLimitExceededError) as err: - trusted_activities_service.update( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID, description="supposedly too long" - ) - - assert ( - err.value.args[0] - == "Description limit exceeded, max 250 characters allowed." - ) - - def test_update_when_fails_with_invalid_character_error_raises_custom_exception( - self, mock_connection, mock_get_response, mock_invalid_character_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.get.return_value = mock_get_response - mock_connection.put.side_effect = mock_invalid_character_error - with pytest.raises(Py42TrustedActivityInvalidCharacterError) as err: - trusted_activities_service.update( - _TEST_TRUSTED_ACTIVITY_RESOURCE_ID, value="bad@name" - ) - - assert ( - err.value.args[0] == "Invalid character in domain or Slack workspace name" - ) - - def test_update_when_fails_with_resource_id_not_found_error_raises_custom_exception( - self, mock_connection, mock_resource_id_not_found_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.put.side_effect = mock_resource_id_not_found_error - with pytest.raises(Py42TrustedActivityIdNotFound) as err: - trusted_activities_service.update( - _INVALID_RESOURCE_ID, description="This id should not be found." - ) - - assert err.value.args[0] == f"Resource ID '{_INVALID_RESOURCE_ID}' not found." - - def test_delete_called_with_expected_url_and_params(self, mock_connection): - trusted_activities_service = TrustedActivitiesService(mock_connection) - trusted_activities_service.delete(_TEST_TRUSTED_ACTIVITY_RESOURCE_ID) - expected_url = f"{_BASE_URI}/{_TEST_TRUSTED_ACTIVITY_RESOURCE_ID}" - assert mock_connection.delete.call_args[0][0] == expected_url - mock_connection.delete.assert_called_once_with(expected_url) - - def test_delete_when_fails_with_resource_id_not_found_error_raises_custom_exception( - self, mock_connection, mock_resource_id_not_found_error - ): - trusted_activities_service = TrustedActivitiesService(mock_connection) - mock_connection.delete.side_effect = mock_resource_id_not_found_error - with pytest.raises(Py42TrustedActivityIdNotFound) as err: - trusted_activities_service.delete(_INVALID_RESOURCE_ID) - - assert err.value.args[0] == f"Resource ID '{_INVALID_RESOURCE_ID}' not found." diff --git a/tests/services/test_userriskprofile.py b/tests/services/test_userriskprofile.py deleted file mode 100644 index 37c84813f..000000000 --- a/tests/services/test_userriskprofile.py +++ /dev/null @@ -1,305 +0,0 @@ -import json -from datetime import date -from datetime import datetime - -import pytest -from requests import Response -from tests.conftest import create_mock_error -from tests.conftest import create_mock_response - -import py42.settings -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42NotFoundError -from py42.response import Py42Response -from py42.services.userriskprofile import UserRiskProfileService - -USER_ID = "user-42" -USERNAME = "risk-user@code42.com" -GET_ALL_RESPONSE = '{"userRiskProfiles":["test"], "totalCount":1}' -GET_ALL_RESPONSE_EMPTY = '{"userRiskProfiles": [], "totalCount":0}' -GET_ALL_RESPONSE_POPULATED = { - "userRiskProfiles": [{"username": "risk-user@code42.com", "userId": "user-42"}], - "totalCount": 1, -} -CLOUD_ALIASES = ["test@code42.com", "user1@email.com"] - - -URI = "/v1/user-risk-profiles" - - -@pytest.fixture -def mock_get_all_response(mocker): - return create_mock_response(mocker, GET_ALL_RESPONSE) - - -@pytest.fixture -def mock_get_all_response_empty(mocker): - return create_mock_response(mocker, GET_ALL_RESPONSE_EMPTY) - - -@pytest.fixture -def mock_not_found_error(mocker): - return create_mock_error(Py42NotFoundError, mocker, "Not Found Error Msg") - - -class TestUserRiskProfileService: - def test_get_by_id_calls_get_with_expected_params(self, mock_connection): - user_risk_profile_service = UserRiskProfileService(mock_connection) - user_risk_profile_service.get_by_id(USER_ID) - mock_connection.get.assert_called_once_with(f"{URI}/{USER_ID}") - - def test_get_by_id_raises_py42_not_found_when_id_not_found( - self, mock_connection, mock_not_found_error - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - mock_connection.get.side_effect = mock_not_found_error - with pytest.raises(Py42NotFoundError) as err: - user_risk_profile_service.get_by_id("fake-id") - - assert ( - err.value.args[0] - == "User risk profile for user with the ID 'fake-id' not found." - ) - - def test_get_by_username_calls_get_with_expected_params( - self, mock_connection, mocker - ): - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = json.dumps(GET_ALL_RESPONSE_POPULATED) - py42_response = Py42Response(requests_response) - mock_connection.get.return_value = py42_response - - user_risk_profile_service = UserRiskProfileService(mock_connection) - user_risk_profile_service.get_by_username(USERNAME) - mock_connection.get.assert_called_with(f"{URI}/{USER_ID}") - - def test_update_calls_patch_with_expected_params_when_all_fields_provided( - self, mock_connection - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - paths = ["startDate", "endDate", "notes"] - user_risk_profile_service.update( - USER_ID, - start_date="2010-07-01", - end_date="2022-01-04", - notes="this is a test note.", - ) - params = {"paths": ", ".join(paths)} - data = { - "endDate": {"day": 4, "month": 1, "year": 2022}, - "notes": "this is a test note.", - "startDate": {"day": 1, "month": 7, "year": 2010}, - } - mock_connection.patch.assert_called_once_with( - f"{URI}/{USER_ID}", json=data, params=params - ) - - def test_update_calls_patch_with_expected_params_when_datetime_provided( - self, mock_connection - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - paths = ["startDate", "endDate", "notes"] - user_risk_profile_service.update( - USER_ID, - start_date=date(2010, 7, 1), - end_date=datetime(2022, 1, 4), - notes="this is a test note.", - ) - params = {"paths": ", ".join(paths)} - data = { - "endDate": {"day": 4, "month": 1, "year": 2022}, - "notes": "this is a test note.", - "startDate": {"day": 1, "month": 7, "year": 2010}, - } - mock_connection.patch.assert_called_once_with( - f"{URI}/{USER_ID}", json=data, params=params - ) - - def test_update_calls_patch_with_expected_params_when_empty_strings_provided( - self, mock_connection - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - paths = ["startDate", "endDate", "notes"] - user_risk_profile_service.update(USER_ID, end_date="", start_date="", notes="") - params = {"paths": ", ".join(paths)} - data = { - "endDate": None, - "notes": None, - "startDate": None, - } - mock_connection.patch.assert_called_once_with( - f"{URI}/{USER_ID}", json=data, params=params - ) - - def test_get_page_calls_get_with_expected_params(self, mock_connection): - user_risk_profile_service = UserRiskProfileService(mock_connection) - user_risk_profile_service.get_page(page_num=1, page_size=10) - data = { - "page": 1, - "page_size": 10, - "manager_id": None, - "title": None, - "division": None, - "department": None, - "employment_type": None, - "country": None, - "region": None, - "locality": None, - "active": None, - "deleted": None, - "support_user": None, - } - mock_connection.get.assert_called_once_with(URI, params=data) - - def test_get_page_calls_get_with_optional_params(self, mock_connection): - user_risk_profile_service = UserRiskProfileService(mock_connection) - user_risk_profile_service.get_page( - page_num=1, - page_size=10, - manager_id="manager-1", - title="engineer", - division="division", - department="engineering", - employment_type="full-time", - country="usa", - region="midwest", - locality="local", - active=True, - deleted=False, - support_user=False, - ) - data = { - "page": 1, - "page_size": 10, - "manager_id": "manager-1", - "title": "engineer", - "division": "division", - "department": "engineering", - "employment_type": "full-time", - "country": "usa", - "region": "midwest", - "locality": "local", - "active": True, - "deleted": False, - "support_user": False, - } - mock_connection.get.assert_called_once_with(URI, params=data) - - def test_get_all_calls_get_expected_number_of_times( - self, mock_connection, mock_get_all_response, mock_get_all_response_empty - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - mock_connection.get.side_effect = [ - mock_get_all_response, - mock_get_all_response, - mock_get_all_response_empty, - ] - - py42.settings.items_per_page = 1 - for _ in user_risk_profile_service.get_all(): - pass - - py42.settings.items_per_page = 500 - assert mock_connection.get.call_count == 3 - - def test_get_all_calls_get_with_expected_uri_and_params( - self, mock_connection, mock_get_all_response_empty - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - mock_connection.get.side_effect = [mock_get_all_response_empty] - - for _ in user_risk_profile_service.get_all( - manager_id="manager-1", - title="engineer", - division="division", - department="engineering", - employment_type="full-time", - country="usa", - region="midwest", - locality="local", - active=True, - deleted=False, - support_user=False, - ): - pass - - data = { - "page": 1, - "page_size": 500, - "manager_id": "manager-1", - "title": "engineer", - "division": "division", - "department": "engineering", - "employment_type": "full-time", - "country": "usa", - "region": "midwest", - "locality": "local", - "active": True, - "deleted": False, - "support_user": False, - } - mock_connection.get.assert_called_once_with(URI, params=data) - - def test_add_cloud_aliases_calls_post_with_expected_params(self, mock_connection): - user_risk_profile_service = UserRiskProfileService(mock_connection) - user_risk_profile_service.add_cloud_aliases(USER_ID, CLOUD_ALIASES) - data = {"cloudAliases": CLOUD_ALIASES, "userId": USER_ID} - mock_connection.post.assert_called_once_with( - f"{URI}/{USER_ID}/add-cloud-aliases", json=data - ) - - def test_add_cloud_aliases_raises_py42_error_when_id_not_found( - self, mock_connection, mock_not_found_error - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - mock_connection.post.side_effect = mock_not_found_error - with pytest.raises(Py42NotFoundError) as err: - user_risk_profile_service.add_cloud_aliases("fake-id", "cloud-alias") - - assert ( - err.value.args[0] - == "User risk profile for user with the ID 'fake-id' not found." - ) - - def test_add_cloud_aliases_raises_py42_error_when_alias_limit_reached( - self, mock_connection, mocker - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - mock_alias_limit_error = create_mock_error( - Py42BadRequestError, - mocker, - "Cloud usernames must be less than or equal to 2", - ) - mock_connection.post.side_effect = mock_alias_limit_error - with pytest.raises(Py42BadRequestError) as err: - user_risk_profile_service.add_cloud_aliases( - "fake-id", ["too", "many", "aliases"] - ) - - assert ( - err.value.args[0] - == "Cloud alias limit exceeded. A max of 2 cloud aliases are allowed." - ) - - def test_delete_cloud_aliases_calls_post_with_expected_params( - self, mock_connection - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - user_risk_profile_service.delete_cloud_aliases(USER_ID, CLOUD_ALIASES) - data = {"cloudAliases": CLOUD_ALIASES, "userId": USER_ID} - mock_connection.post.assert_called_once_with( - f"{URI}/{USER_ID}/delete-cloud-aliases", json=data - ) - - def test_delete_cloud_aliases_raises_py42_error_when_id_not_found( - self, mock_connection, mock_not_found_error - ): - user_risk_profile_service = UserRiskProfileService(mock_connection) - mock_connection.post.side_effect = mock_not_found_error - with pytest.raises(Py42NotFoundError) as err: - user_risk_profile_service.delete_cloud_aliases("fake-id", "cloud-alias") - - assert ( - err.value.args[0] - == "User risk profile for user with the ID 'fake-id' not found." - ) diff --git a/tests/services/test_users.py b/tests/services/test_users.py index 9dcd416ba..46971edcb 100644 --- a/tests/services/test_users.py +++ b/tests/services/test_users.py @@ -5,18 +5,18 @@ from tests.conftest import create_mock_error from tests.conftest import create_mock_response -import py42.settings -from py42.exceptions import Py42ActiveLegalHoldError -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42InvalidEmailError -from py42.exceptions import Py42InvalidPasswordError -from py42.exceptions import Py42InvalidUsernameError -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42OrgNotFoundError -from py42.exceptions import Py42UserAlreadyExistsError -from py42.exceptions import Py42UsernameMustBeEmailError -from py42.services.users import UserService +import pycpg.settings +from pycpg.exceptions import PycpgActiveLegalHoldError +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgInternalServerError +from pycpg.exceptions import PycpgInvalidEmailError +from pycpg.exceptions import PycpgInvalidPasswordError +from pycpg.exceptions import PycpgInvalidUsernameError +from pycpg.exceptions import PycpgNotFoundError +from pycpg.exceptions import PycpgOrgNotFoundError +from pycpg.exceptions import PycpgUserAlreadyExistsError +from pycpg.exceptions import PycpgUsernameMustBeEmailError +from pycpg.services.users import UserService USER_URI = "/api/v1/User" USER_URI_V3 = "/api/v3/users" @@ -44,8 +44,8 @@ "userId": 12345, "userUid": TEST_USER_UID, "status": "Active", - "username": "test@code42.com", - "email": "test@code42.com", + "username": "test@crashplan.com", + "email": "test@crashplan.com", "firstName": "test", "lastName": "mctest", } @@ -127,36 +127,36 @@ def put_api_mock_response(self, mocker): @pytest.fixture def internal_server_error(self, mocker): - return create_mock_error(Py42InternalServerError, mocker, "") + return create_mock_error(PycpgInternalServerError, mocker, "") @pytest.fixture def user_duplicate_error_response(self, mocker): return create_mock_error( - Py42InternalServerError, mocker, MOCK_USER_DUPLICATE_ERROR_TEXT + PycpgInternalServerError, mocker, MOCK_USER_DUPLICATE_ERROR_TEXT ) @pytest.fixture def username_must_be_email_error_response(self, mocker): return create_mock_error( - Py42InternalServerError, mocker, MOCK_USERNAME_MUST_BE_EMAIL_TEXT + PycpgInternalServerError, mocker, MOCK_USERNAME_MUST_BE_EMAIL_TEXT ) @pytest.fixture def invalid_email_error_response(self, mocker): return create_mock_error( - Py42InternalServerError, mocker, MOCK_INVALID_EMAIL_TEXT + PycpgInternalServerError, mocker, MOCK_INVALID_EMAIL_TEXT ) @pytest.fixture def invalid_password_error_response(self, mocker): return create_mock_error( - Py42InternalServerError, mocker, MOCK_INVALID_PASSWORD_TEXT + PycpgInternalServerError, mocker, MOCK_INVALID_PASSWORD_TEXT ) @pytest.fixture def invalid_username_error_response(self, mocker): return create_mock_error( - Py42InternalServerError, mocker, MOCK_INVALID_USERNAME_TEXT + PycpgInternalServerError, mocker, MOCK_INVALID_USERNAME_TEXT ) def test_create_user_calls_post_with_expected_url_and_params( @@ -194,7 +194,7 @@ def test_create_user_calls_post_and_returns_user_duplicate_error( password = "password" name = "TESTNAME" note = "Test Note" - with pytest.raises(Py42UserAlreadyExistsError): + with pytest.raises(PycpgUserAlreadyExistsError): user_service.create_user( org_uid, username, username, password, name, name, note ) @@ -204,7 +204,7 @@ def test_create_user_when_get_unhandled_internal_server_error_raises_base_error( ): user_service = UserService(mock_connection) mock_connection.post.side_effect = internal_server_error - with pytest.raises(Py42InternalServerError): + with pytest.raises(PycpgInternalServerError): user_service.create_user("123", "123@example.com", "123@example.com") def test_get_all_calls_get_with_uri_and_params( @@ -240,7 +240,7 @@ def test_get_user_by_id_calls_get_with_uri_and_params( def test_get_all_calls_get_expected_number_of_times( self, mock_connection, mock_get_users_response, mock_get_users_empty_response ): - py42.settings.items_per_page = 1 + pycpg.settings.items_per_page = 1 service = UserService(mock_connection) mock_connection.get.side_effect = [ mock_get_users_response, @@ -249,7 +249,7 @@ def test_get_all_calls_get_expected_number_of_times( ] for _ in service.get_all(): pass - py42.settings.items_per_page = 500 + pycpg.settings.items_per_page = 500 assert mock_connection.get.call_count == 3 def test_get_scim_data_by_uid_calls_get_with_expected_uri_and_params( @@ -267,7 +267,7 @@ def test_get_available_roles_calls_get_with_expected_uri(self, mock_connection): mock_connection.get.assert_called_once_with(uri) @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -284,7 +284,7 @@ def test_get_role_ids_calls_get_roles_and_returns_role_ids( user_id = 12345 with patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "get_roles", return_value=mock_get_roles_response, ) as mock_get_roles: @@ -295,7 +295,7 @@ def test_get_role_ids_calls_get_roles_and_returns_role_ids( assert role_ids == ["desktop-user", "proe-user", "customer-cloud-admin"] @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -388,11 +388,11 @@ def test_get_page_when_org_not_found_raises_expected_error( ): text = '[{"name":"SYSTEM","description":"Organization was not found"}]' mock_connection.get.side_effect = create_mock_error( - Py42BadRequestError, mocker, text + PycpgBadRequestError, mocker, text ) service = UserService(mock_connection) - with pytest.raises(Py42OrgNotFoundError) as err: + with pytest.raises(PycpgOrgNotFoundError) as err: service.get_page(1, org_uid="TestOrgUid") assert "The organization with UID 'TestOrgUid' was not found." in str(err.value) @@ -400,21 +400,21 @@ def test_get_page_when_org_not_found_raises_expected_error( def test_get_page_when_bad_request_raises(self, mocker, mock_connection): mock_connection.get.side_effect = create_mock_error( - Py42BadRequestError, mocker, "BAD REQUEST" + PycpgBadRequestError, mocker, "BAD REQUEST" ) service = UserService(mock_connection) - with pytest.raises(Py42BadRequestError): + with pytest.raises(PycpgBadRequestError): service.get_page(1, org_uid="TestOrgUid") def test_deactivate_when_user_in_legal_hold_raises_active_legal_hold_error( self, mocker, mock_connection ): mock_connection.post.side_effect = create_mock_error( - Py42BadRequestError, mocker, "ACTIVE_LEGAL_HOLD" + PycpgBadRequestError, mocker, "ACTIVE_LEGAL_HOLD" ) client = UserService(mock_connection) - with pytest.raises(Py42ActiveLegalHoldError) as err: + with pytest.raises(PycpgActiveLegalHoldError) as err: client.deactivate(1234) expected = ( @@ -485,7 +485,7 @@ def test_update_user_when_get_internal_server_error_containing_username_must_be_ ): user_service = UserService(mock_connection) mock_connection.put.side_effect = username_must_be_email_error_response - with pytest.raises(Py42UsernameMustBeEmailError) as err: + with pytest.raises(PycpgUsernameMustBeEmailError) as err: user_service.update_user("123", username="foo") assert str(err.value) == "Username must be an email address." @@ -495,7 +495,7 @@ def test_update_user_when_get_internal_server_error_containing_email_invalid_rai ): user_service = UserService(mock_connection) mock_connection.put.side_effect = invalid_email_error_response - with pytest.raises(Py42InvalidEmailError) as err: + with pytest.raises(PycpgInvalidEmailError) as err: user_service.update_user("123", username="foo", email="test") assert "'test' is not a valid email." in str(err.value) @@ -506,7 +506,7 @@ def test_update_user_when_get_internal_server_error_containing_password_invalid_ ): user_service = UserService(mock_connection) mock_connection.put.side_effect = invalid_password_error_response - with pytest.raises(Py42InvalidPasswordError) as err: + with pytest.raises(PycpgInvalidPasswordError) as err: user_service.update_user("123", username="foo", password="test") assert str(err.value) == "Invalid password." @@ -516,7 +516,7 @@ def test_update_user_when_get_internal_server_error_containing_username_invalid_ ): user_service = UserService(mock_connection) mock_connection.put.side_effect = invalid_username_error_response - with pytest.raises(Py42InvalidUsernameError) as err: + with pytest.raises(PycpgInvalidUsernameError) as err: user_service.update_user("123", username="foo") assert str(err.value) == "Invalid username." @@ -526,11 +526,11 @@ def test_update_user_when_get_unhandled_internal_server_error_raises_base_error( ): user_service = UserService(mock_connection) mock_connection.put.side_effect = internal_server_error - with pytest.raises(Py42InternalServerError): + with pytest.raises(PycpgInternalServerError): user_service.update_user("123", username="foo") @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -542,7 +542,7 @@ def test_block_calls_post_with_expected_uri(self, mock_connection): mock_connection.post.assert_called_once_with(uri) @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -554,7 +554,7 @@ def test_unblock_calls_post_with_expected_uri(self, mock_connection): mock_connection.post.assert_called_once_with(uri) @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -567,7 +567,7 @@ def test_deactivate_calls_post_with_expected_url_and_params(self, mock_connectio mock_connection.post.assert_called_once_with(uri, json=data) @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -580,7 +580,7 @@ def test_reactivate_calls_post_with_expected_url_and_params(self, mock_connectio mock_connection.post.assert_called_once_with(uri, json=data) @patch.object( - py42.services.users.UserService, + pycpg.services.users.UserService, "_get_user_uid_by_id", return_value=TEST_USER_UID, ) @@ -617,11 +617,11 @@ def test_get_current_raises_error_about_api_clients_if_user_not_found( ): service = UserService(mock_connection) mock_connection.get.side_effect = create_mock_error( - Py42NotFoundError, + PycpgNotFoundError, mocker, """[{"name":"SYSTEM","description":"User not found"}]""", ) - with pytest.raises(Py42NotFoundError) as err: + with pytest.raises(PycpgNotFoundError) as err: service.get_current() assert ( "User not found. Please be aware that this method is incompatible with api client authentication." diff --git a/tests/services/test_watchlists.py b/tests/services/test_watchlists.py deleted file mode 100644 index 8b5c2cda0..000000000 --- a/tests/services/test_watchlists.py +++ /dev/null @@ -1,495 +0,0 @@ -import json - -import pytest -from requests import Response -from tests.conftest import create_mock_error -from tests.conftest import create_mock_response - -import py42.settings -from py42.clients.watchlists import WatchlistType -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42Error -from py42.exceptions import Py42InvalidWatchlistType -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42WatchlistNotFound -from py42.exceptions import Py42WatchlistOrUserNotFound -from py42.response import Py42Response -from py42.services.watchlists import WatchlistsService - -WATCHLIST_ID = "42-code-123" -WATCHLIST_TYPE = "DEPARTING_EMPLOYEE" -GET_ALL_RESPONSE = '{"watchlists":["test"], "totalCount":1}' -GET_ALL_RESPONSE_EMPTY = '{"watchlists": [], "totalCount":0}' -GET_ALL_INCLUDED_USERS_RESPONSE = '{"includedUsers":["test"], "totalCount":1}' -GET_ALL_INCLUDED_USERS_RESPONSE_EMPTY = '{"includedUsers": [], "totalCount":0}' -GET_ALL_MEMBERS_RESPONSE = '{"watchlistMembers":["test"], "totalCount":2}' -GET_ALL_MEMBERS_RESPONSE_EMPTY = '{"watchlistMembers": [], "totalCount":0}' -WATCHLIST_RESPONSE = { - "listType": WATCHLIST_TYPE, - "stats": {"includedUsersCount": 0}, - "tenantId": "1-tenant", - "watchlistId": WATCHLIST_ID, -} - -URI = "/v1/watchlists" - - -@pytest.fixture -def mock_not_found_error(mocker): - return create_mock_error(Py42NotFoundError, mocker, "Not Found Error Msg") - - -@pytest.fixture -def mock_user_not_found_error(mocker): - return create_mock_error(Py42BadRequestError, mocker, "User not found") - - -@pytest.fixture -def mock_watchlist_not_found_error(mocker): - return create_mock_error(Py42BadRequestError, mocker, "Watchlist not found") - - -@pytest.fixture -def mock_get_all_included_users_response(mocker): - return create_mock_response(mocker, GET_ALL_INCLUDED_USERS_RESPONSE) - - -@pytest.fixture -def mock_get_all_included_users_response_empty(mocker): - return create_mock_response(mocker, GET_ALL_INCLUDED_USERS_RESPONSE_EMPTY) - - -@pytest.fixture -def mock_get_all_members_response(mocker): - return create_mock_response(mocker, GET_ALL_MEMBERS_RESPONSE) - - -@pytest.fixture -def mock_get_all_members_response_empty(mocker): - return create_mock_response(mocker, GET_ALL_MEMBERS_RESPONSE_EMPTY) - - -@pytest.fixture -def mock_get_all_response(mocker): - return create_mock_response(mocker, GET_ALL_RESPONSE) - - -@pytest.fixture -def mock_get_all_response_empty(mocker): - return create_mock_response(mocker, GET_ALL_RESPONSE_EMPTY) - - -class TestWatchlistsService: - def test_get_calls_get_with_expected_params(self, mock_connection): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get(WATCHLIST_ID) - mock_connection.get.assert_called_once_with(f"{URI}/{WATCHLIST_ID}") - - def test_get_raises_py42_not_found_when_id_not_found( - self, mock_connection, mock_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.get.side_effect = mock_not_found_error - with pytest.raises(Py42WatchlistNotFound) as err: - watchlists_service.get("invalid-id") - - assert err.value.args[0] == "Watchlist ID 'invalid-id' not found." - - def test_delete_calls_delete_with_expected_params_and_updates_dict( - self, mock_connection, mocker - ): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service._watchlist_type_id_map = {} - watchlists_service.watchlist_type_id_map[WATCHLIST_TYPE] = WATCHLIST_ID - - # mock delete response - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = json.dumps(WATCHLIST_RESPONSE) - type(requests_response).status_code = mocker.PropertyMock(return_value=200) - mock_connection.delete.return_value = Py42Response(requests_response) - - watchlists_service.delete(WATCHLIST_ID) - mock_connection.delete.assert_called_once_with(f"{URI}/{WATCHLIST_ID}") - assert WATCHLIST_TYPE not in watchlists_service.watchlist_type_id_map - - def test_delete_raises_py42_not_found_when_id_not_found( - self, mock_connection, mock_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.delete.side_effect = mock_not_found_error - with pytest.raises(Py42WatchlistNotFound) as err: - watchlists_service.delete("invalid-id") - - assert err.value.args[0] == "Watchlist ID 'invalid-id' not found." - - def test_get_page_calls_get_with_expected_params(self, mock_connection): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_page() - mock_connection.get.assert_called_once_with( - URI, params={"page": 1, "page_size": None} - ) - - def test_get_page_calls_get_with_optional_params(self, mock_connection): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_page(page_num=1, page_size=10) - data = { - "page": 1, - "page_size": 10, - } - mock_connection.get.assert_called_once_with(URI, params=data) - - def test_get_all_calls_get_page_expected_number_of_times( - self, mock_connection, mock_get_all_response, mock_get_all_response_empty - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.get.side_effect = [ - mock_get_all_response, - mock_get_all_response, - mock_get_all_response_empty, - ] - - py42.settings.items_per_page = 1 - for _ in watchlists_service.get_all(): - pass - py42.settings.items_per_page = 500 - - assert mock_connection.get.call_count == 3 - - def test_create_calls_post_with_expected_params_and_updates_dictionary( - self, mocker, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service._watchlist_type_id_map = {} - - # mock create call response - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = json.dumps(WATCHLIST_RESPONSE) - mock_connection.post.return_value = Py42Response(requests_response) - - watchlists_service.create(WatchlistType.DEPARTING) - - mock_connection.post.assert_called_once_with( - URI, - json={ - "watchlistType": "DEPARTING_EMPLOYEE", - "title": None, - "description": None, - }, - ) - assert ( - watchlists_service.watchlist_type_id_map["DEPARTING_EMPLOYEE"] - == WATCHLIST_ID - ) - - def test_create_raises_py42_invalid_type_when_invalid_watchlist_type( - self, mock_connection, mocker - ): - watchlists_service = WatchlistsService(mock_connection) - mock_invalid_type_error = create_mock_error( - Py42BadRequestError, - mocker, - "Error converting value \\\"INVALID-WATCHLIST-TYPE\\\" to type 'WatchlistSdk.Model.WatchlistType'.", - ) - mock_connection.post.side_effect = mock_invalid_type_error - with pytest.raises(Py42InvalidWatchlistType) as err: - watchlists_service.create("INVALID-WATCHLIST-TYPE") - - assert ( - "'INVALID-WATCHLIST-TYPE' cannot be converted to a valid watchlist type" - in err.value.args[0] - ) - - def test_get_page_included_users_calls_get_with_expected_params( - self, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_page_included_users(WATCHLIST_ID) - data = {"page": 1, "page_size": None} - mock_connection.get.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/included-users", params=data - ) - - def test_get_page_included_users_calls_get_with_optional_params( - self, mock_connection - ): - - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_page_included_users(WATCHLIST_ID, 1, 10) - data = { - "page": 1, - "page_size": 10, - } - mock_connection.get.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/included-users", params=data - ) - - def test_get_all_included_users_calls_get_page_expected_number_of_times( - self, - mock_connection, - mock_get_all_included_users_response, - mock_get_all_included_users_response_empty, - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.get.side_effect = [ - mock_get_all_included_users_response, - mock_get_all_included_users_response, - mock_get_all_included_users_response_empty, - ] - - py42.settings.items_per_page = 1 - for _ in watchlists_service.get_all_included_users(watchlist_id=WATCHLIST_ID): - pass - py42.settings.items_per_page = 500 - - assert mock_connection.get.call_count == 3 - - def test_add_included_users_by_watchlist_id_calls_post_with_expected_params( - self, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - user_ids = ["user@email.com", "test@code42.com"] - watchlists_service.add_included_users_by_watchlist_id( - watchlist_id=WATCHLIST_ID, user_ids=user_ids - ) - data = {"userIds": user_ids, "watchlistId": WATCHLIST_ID} - mock_connection.post.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/included-users/add", json=data - ) - - def test_add_included_users_by_watchlist_id_raises_py42_not_found_when_id_not_found( - self, mock_connection, mock_watchlist_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.post.side_effect = mock_watchlist_not_found_error - with pytest.raises(Py42WatchlistNotFound) as err: - watchlists_service.add_included_users_by_watchlist_id( - watchlist_id="invalid-id", user_ids=["user1", "user2"] - ) - - assert err.value.args[0] == "Watchlist ID 'invalid-id' not found." - - def test_add_included_users_by_watchlist_id_raises_py42_not_found_when_user_id_not_found( - self, mock_connection, mock_user_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.post.side_effect = mock_user_not_found_error - with pytest.raises(Py42NotFoundError) as err: - watchlists_service.add_included_users_by_watchlist_id( - watchlist_id="invalid-id", user_ids=["user1", "user2"] - ) - - assert err.value.args[0] == "User not found" - - def test_add_included_users_by_watchlist_type_calls_add_with_expected_params_when_watchlist_exists( - self, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - user_ids = ["user@email.com", "test@code42.com"] - - # set watchlist dict - watchlists_service._watchlist_type_id_map = {} - watchlists_service.watchlist_type_id_map[WATCHLIST_TYPE] = WATCHLIST_ID - - watchlists_service.add_included_users_by_watchlist_type( - user_ids, WATCHLIST_TYPE - ) - data = {"userIds": user_ids, "watchlistId": WATCHLIST_ID} - mock_connection.post.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/included-users/add", json=data - ) - - def test_add_included_users_by_watchlist_type_creates_watchlist_and_calls_with_expected_params_when_watchlist_does_not_exist( - self, mock_connection, mocker - ): - watchlists_service = WatchlistsService(mock_connection) - user_ids = ["user@email.com", "test@code42.com"] - watchlists_service._watchlist_type_id_map = {} - - # mock create call response - requests_response = mocker.MagicMock(spec=Response) - requests_response.text = json.dumps(WATCHLIST_RESPONSE) - mock_connection.post.return_value = Py42Response(requests_response) - - watchlists_service.add_included_users_by_watchlist_type( - user_ids, WATCHLIST_TYPE - ) - - assert ( - watchlists_service.watchlist_type_id_map["DEPARTING_EMPLOYEE"] - == WATCHLIST_ID - ) - data = {"userIds": user_ids, "watchlistId": WATCHLIST_ID} - mock_connection.post.assert_called_with( - f"{URI}/{WATCHLIST_ID}/included-users/add", json=data - ) - - def test_add_included_users_by_watchlist_type_raises_py42_not_found_when_user_id_not_found( - self, mock_connection, mock_user_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - - # set watchlist dict - watchlists_service._watchlist_type_id_map = {} - watchlists_service.watchlist_type_id_map[WATCHLIST_TYPE] = WATCHLIST_ID - - mock_connection.post.side_effect = mock_user_not_found_error - with pytest.raises(Py42NotFoundError) as err: - watchlists_service.add_included_users_by_watchlist_type( - user_ids=["user1", "user2"], watchlist_type=WATCHLIST_TYPE - ) - - assert err.value.args[0] == "User not found" - - def test_delete_included_users_by_watchlist_id_calls_post_with_expected_params_and_updates_dict( - self, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - user_ids = ["user@email.com", "test@code42.com"] - watchlists_service.delete_included_users_by_watchlist_id( - watchlist_id=WATCHLIST_ID, user_ids=user_ids - ) - data = {"userIds": user_ids, "watchlistId": WATCHLIST_ID} - mock_connection.post.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/included-users/delete", json=data - ) - - def test_delete_included_users_by_watchlist_id_raises_py42_not_found_when_id_not_found( - self, mock_connection, mock_watchlist_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.post.side_effect = mock_watchlist_not_found_error - with pytest.raises(Py42WatchlistNotFound) as err: - watchlists_service.delete_included_users_by_watchlist_id( - watchlist_id="invalid-id", user_ids=["user1", "user2"] - ) - - assert err.value.args[0] == "Watchlist ID 'invalid-id' not found." - - def test_delete_included_users_by_watchlist_id_raises_py42_not_found_when_user_id_not_found( - self, mock_connection, mock_user_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.post.side_effect = mock_user_not_found_error - with pytest.raises(Py42NotFoundError) as err: - watchlists_service.delete_included_users_by_watchlist_id( - watchlist_id="invalid-id", user_ids=["user1", "user2"] - ) - - assert err.value.args[0] == "User not found" - - def test_delete_included_users_by_watchlist_type_calls_post_with_expected_params_when_watchlist_exists( - self, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - user_ids = ["user@email.com", "test@code42.com"] - - # set watchlist dict - watchlists_service._watchlist_type_id_map = {} - watchlists_service.watchlist_type_id_map[WATCHLIST_TYPE] = WATCHLIST_ID - - watchlists_service.delete_included_users_by_watchlist_type( - user_ids, WATCHLIST_TYPE - ) - - data = {"userIds": user_ids, "watchlistId": WATCHLIST_ID} - mock_connection.post.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/included-users/delete", json=data - ) - - def test_delete_included_users_by_watchlist_type_raises_error_when_watchlist_does_not_exist( - self, mock_connection - ): - watchlists_service = WatchlistsService(mock_connection) - user_ids = ["user@email.com", "test@code42.com"] - - with pytest.raises(Py42Error) as err: - watchlists_service.delete_included_users_by_watchlist_type( - user_ids, WATCHLIST_TYPE - ) - - assert ( - err.value.args[0] == f"Couldn't find watchlist of type:'{WATCHLIST_TYPE}'." - ) - - def test_delete_included_users_by_watchlist_type_raises_py42_not_found_when_user_id_not_found( - self, mock_connection, mock_user_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - - # set watchlist dict - watchlists_service._watchlist_type_id_map = {} - watchlists_service.watchlist_type_id_map[WATCHLIST_TYPE] = WATCHLIST_ID - - mock_connection.post.side_effect = mock_user_not_found_error - with pytest.raises(Py42NotFoundError) as err: - watchlists_service.delete_included_users_by_watchlist_type( - user_ids=["user1", "user2"], watchlist_type=WATCHLIST_TYPE - ) - - assert err.value.args[0] == "User not found" - - def test_get_page_members_calls_get_with_expected_params(self, mock_connection): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_page_watchlist_members(WATCHLIST_ID) - data = { - "page": 1, - "page_size": None, - } - mock_connection.get.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/members", params=data - ) - - def test_get_page_members_calls_get_with_optional_params(self, mock_connection): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_page_watchlist_members( - WATCHLIST_ID, page_num=1, page_size=10 - ) - data = { - "page": 1, - "page_size": 10, - } - mock_connection.get.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/members", params=data - ) - - def test_get_all_members_calls_get_page_expected_number_of_times( - self, - mock_connection, - mock_get_all_members_response, - mock_get_all_members_response_empty, - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.get.side_effect = [ - mock_get_all_members_response, - mock_get_all_members_response, - mock_get_all_members_response_empty, - ] - - py42.settings.items_per_page = 1 - for _ in watchlists_service.get_all_watchlist_members(WATCHLIST_ID): - pass - py42.settings.items_per_page = 500 - - assert mock_connection.get.call_count == 3 - - def test_get_watchlist_member_calls_get_with_expected_params(self, mock_connection): - watchlists_service = WatchlistsService(mock_connection) - watchlists_service.get_watchlist_member(WATCHLIST_ID, "user-42") - mock_connection.get.assert_called_once_with( - f"{URI}/{WATCHLIST_ID}/members/user-42" - ) - - def test_get_watchlist_member_raises_py42_not_found_when_id_not_found( - self, mock_connection, mock_not_found_error - ): - watchlists_service = WatchlistsService(mock_connection) - mock_connection.get.side_effect = mock_not_found_error - with pytest.raises(Py42WatchlistOrUserNotFound) as err: - watchlists_service.get_watchlist_member( - "invalid-watchlist-id", "invalid-user-id" - ) - - assert ( - err.value.args[0] - == "Watchlist ID 'invalid-watchlist-id' or User ID 'invalid-user-id' not found." - ) diff --git a/tests/settings/test_debug.py b/tests/settings/test_debug.py index 262840dc2..390fcd851 100644 --- a/tests/settings/test_debug.py +++ b/tests/settings/test_debug.py @@ -2,41 +2,41 @@ import pytest -import py42 -from py42.settings import debug +import pycpg +from pycpg.settings import debug @pytest.fixture def none_enabled(): - py42.settings.debug.level = debug.NONE + pycpg.settings.debug.level = debug.NONE @pytest.fixture def warning_enabled(): - py42.settings.debug.level = logging.WARNING + pycpg.settings.debug.level = logging.WARNING yield warning_enabled - py42.settings.debug.level = debug.NONE + pycpg.settings.debug.level = debug.NONE @pytest.fixture def info_enabled(): - py42.settings.debug.level = debug.INFO + pycpg.settings.debug.level = debug.INFO yield info_enabled - py42.settings.debug.level = debug.NONE + pycpg.settings.debug.level = debug.NONE @pytest.fixture def debug_enabled(): - py42.settings.debug.level = debug.DEBUG + pycpg.settings.debug.level = debug.DEBUG yield debug_enabled - py42.settings.debug.level = debug.NONE + pycpg.settings.debug.level = debug.NONE @pytest.fixture def trace_enabled(): - py42.settings.debug.level = debug.TRACE + pycpg.settings.debug.level = debug.TRACE yield trace_enabled - py42.settings.debug.level = debug.NONE + pycpg.settings.debug.level = debug.NONE test_logger_name = "test" @@ -44,10 +44,10 @@ def trace_enabled(): @pytest.fixture def custom_logger(): - default_logger = py42.settings.debug.logger - py42.settings.debug.logger = logging.getLogger(test_logger_name) + default_logger = pycpg.settings.debug.logger + pycpg.settings.debug.logger = logging.getLogger(test_logger_name) yield custom_logger - py42.settings.debug.logger = default_logger + pycpg.settings.debug.logger = default_logger def test_setting_debug_level_to_warning_sets_default_logger_to_warning(warning_enabled): @@ -95,5 +95,5 @@ def test_setting_debug_level_to_trace_sets_custom_logger_to_debug( def test_debug_settings_class_creates_default_logger(): - assert debug.logger.name == "py42" + assert debug.logger.name == "pycpg" assert debug.level == logging.NOTSET diff --git a/tests/settings/test_settings.py b/tests/settings/test_settings.py index 89a33f265..35c9dab23 100644 --- a/tests/settings/test_settings.py +++ b/tests/settings/test_settings.py @@ -2,10 +2,10 @@ import pytest -import py42.settings as settings -from py42.__version__ import __version__ +import pycpg.settings as settings +from pycpg.__version__ import __version__ -DEFAULT_USER_AGENT_FORMAT = "py42/{0} python/{1}" +DEFAULT_USER_AGENT_FORMAT = "pycpg/{0} python/{1}" @pytest.fixture @@ -13,8 +13,8 @@ def default_user_agent(): python_version = ( f"{sys.version_info[0]}.{sys.version_info[1]}.{sys.version_info[2]}" ) - py42_version = __version__ - default_user_agent = DEFAULT_USER_AGENT_FORMAT.format(py42_version, python_version) + pycpg_version = __version__ + default_user_agent = DEFAULT_USER_AGENT_FORMAT.format(pycpg_version, python_version) return default_user_agent diff --git a/tests/settings/test_util.py b/tests/settings/test_util.py index ed863e1e1..61a86e7d6 100644 --- a/tests/settings/test_util.py +++ b/tests/settings/test_util.py @@ -1,8 +1,8 @@ import pytest from tests.conftest import create_mock_response -import py42.settings as settings -from py42.services.util import get_all_pages +import pycpg.settings as settings +from pycpg.services.util import get_all_pages @pytest.fixture diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index c27503785..26b967ae9 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,80 +1,80 @@ import pytest from tests.conftest import REQUEST_EXCEPTION_MESSAGE -from py42.exceptions import Py42BadRequestError -from py42.exceptions import Py42ConflictError -from py42.exceptions import Py42ForbiddenError -from py42.exceptions import Py42HTTPError -from py42.exceptions import Py42InternalServerError -from py42.exceptions import Py42NotFoundError -from py42.exceptions import Py42ResponseError -from py42.exceptions import Py42TooManyRequestsError -from py42.exceptions import Py42UnauthorizedError -from py42.exceptions import raise_py42_error +from pycpg.exceptions import PycpgBadRequestError +from pycpg.exceptions import PycpgConflictError +from pycpg.exceptions import PycpgForbiddenError +from pycpg.exceptions import PycpgHTTPError +from pycpg.exceptions import PycpgInternalServerError +from pycpg.exceptions import PycpgNotFoundError +from pycpg.exceptions import PycpgResponseError +from pycpg.exceptions import PycpgTooManyRequestsError +from pycpg.exceptions import PycpgUnauthorizedError +from pycpg.exceptions import raise_pycpg_error -class TestPy42Errors: - def test_raise_py42_error_raises_bad_request_error(self, error_response): +class TestPycpgErrors: + def test_raise_pycpg_error_raises_bad_request_error(self, error_response): error_response.response.status_code = 400 - with pytest.raises(Py42BadRequestError): - raise_py42_error(error_response) + with pytest.raises(PycpgBadRequestError): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_unauthorized_error(self, error_response): + def test_raise_pycpg_error_raises_unauthorized_error(self, error_response): error_response.response.status_code = 401 - with pytest.raises(Py42UnauthorizedError, match=REQUEST_EXCEPTION_MESSAGE): - raise_py42_error(error_response) + with pytest.raises(PycpgUnauthorizedError, match=REQUEST_EXCEPTION_MESSAGE): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_forbidden_error(self, error_response): + def test_raise_pycpg_error_raises_forbidden_error(self, error_response): error_response.response.status_code = 403 - with pytest.raises(Py42ForbiddenError): - raise_py42_error(error_response) + with pytest.raises(PycpgForbiddenError): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_not_found_error(self, error_response): + def test_raise_pycpg_error_raises_not_found_error(self, error_response): error_response.response.status_code = 404 - with pytest.raises(Py42NotFoundError): - raise_py42_error(error_response) + with pytest.raises(PycpgNotFoundError): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_conflict_error(self, error_response): + def test_raise_pycpg_error_raises_conflict_error(self, error_response): error_response.response.status_code = 409 - with pytest.raises(Py42ConflictError): - raise_py42_error(error_response) + with pytest.raises(PycpgConflictError): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_internal_server_error(self, error_response): + def test_raise_pycpg_error_raises_internal_server_error(self, error_response): error_response.response.status_code = 500 - with pytest.raises(Py42InternalServerError): - raise_py42_error(error_response) + with pytest.raises(PycpgInternalServerError): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_py42_http_error(self, error_response): + def test_raise_pycpg_error_raises_pycpg_http_error(self, error_response): error_response.response.status_code = 600 - with pytest.raises(Py42HTTPError): - raise_py42_error(error_response) + with pytest.raises(PycpgHTTPError): + raise_pycpg_error(error_response) - def test_raise_py42_error_raises_too_many_requests_error(self, error_response): + def test_raise_pycpg_error_raises_too_many_requests_error(self, error_response): error_response.response.status_code = 429 - with pytest.raises(Py42TooManyRequestsError): - raise_py42_error(error_response) + with pytest.raises(PycpgTooManyRequestsError): + raise_pycpg_error(error_response) @pytest.mark.parametrize("status_code", [400, 401, 403, 404, 409, 429, 500, 600]) - def test_raise_py42_http_error_has_correct_response_type( + def test_raise_pycpg_http_error_has_correct_response_type( self, error_response, status_code ): error_response.response.status_code = status_code try: - raise_py42_error(error_response) + raise_pycpg_error(error_response) except Exception as e: assert isinstance(e.response, type(error_response.response)) - def test_raise_py42_error_when_has_unexpected_error_returns_api_error_response( + def test_raise_pycpg_error_when_has_unexpected_error_returns_api_error_response( self, mock_error_response, mocker ): mock_error_response.response.status_code = 410 error_message = '{"error": { "message": "error"}}' mock_error_response.response.text = error_message - mock_method = mocker.patch.object(Py42ResponseError, "__init__", autospec=True) - with pytest.raises(Py42HTTPError): - raise_py42_error(mock_error_response) + mock_method = mocker.patch.object(PycpgResponseError, "__init__", autospec=True) + with pytest.raises(PycpgHTTPError): + raise_pycpg_error(mock_error_response) mock_method.assert_called_with( - Py42HTTPError(mock_error_response), + PycpgHTTPError(mock_error_response), mock_error_response.response, "Failure in HTTP call {}. Response content: {}".format( str(mock_error_response), error_message diff --git a/tests/test_response.py b/tests/test_response.py index b6169eafb..7b2d3ea96 100644 --- a/tests/test_response.py +++ b/tests/test_response.py @@ -1,8 +1,8 @@ import pytest from requests import Response -from py42.exceptions import Py42Error -from py42.response import Py42Response +from pycpg.exceptions import PycpgError +from pycpg.response import PycpgResponse JSON_LIST_WITH_DATA_NODE = ( '{"data": {"item_list_key": [{"foo": "foo_val"}, {"bar": "bar_val"}]}}' @@ -16,7 +16,7 @@ PLAIN_TEXT = "TEST_PLAIN_TEXT" -class TestPy42Response: +class TestPycpgResponse: @pytest.fixture def mock_response_list_data_node(self, mocker): mock_response = mocker.MagicMock(spec=Response) @@ -64,93 +64,93 @@ def mock_response_not_json(self, mocker): def test_getitem_returns_list_items_with_data_node( self, mock_response_list_data_node ): - response = Py42Response(mock_response_list_data_node) + response = PycpgResponse(mock_response_list_data_node) assert type(response["item_list_key"]) == list def test_getitem_returns_dict_keys_with_data_node( self, mock_response_dict_data_node ): - response = Py42Response(mock_response_dict_data_node) + response = PycpgResponse(mock_response_dict_data_node) assert type(response["item_list_key"]) == dict def test_getitem_returns_list_items_no_data_node( self, mock_response_list_no_data_node ): - response = Py42Response(mock_response_list_no_data_node) + response = PycpgResponse(mock_response_list_no_data_node) assert type(response["item_list_key"]) == list def test_getitem_returns_dict_keys_no_data_node( self, mock_response_dict_no_data_node ): - response = Py42Response(mock_response_dict_no_data_node) + response = PycpgResponse(mock_response_dict_no_data_node) assert type(response["item_list_key"]) == dict def test_getitem_returns_empty_list_empty_data_node( self, mock_response_dict_empty_data_node ): - response = Py42Response(mock_response_dict_empty_data_node) + response = PycpgResponse(mock_response_dict_empty_data_node) assert response.data == [] def test_setitem_modifies_dict_keys_with_data_node_to_expected_value( self, mock_response_dict_data_node ): - response = Py42Response(mock_response_dict_data_node) + response = PycpgResponse(mock_response_dict_data_node) response["item_list_key"]["foo"] = "newfooval" assert response["item_list_key"]["foo"] == "newfooval" def test_setitem_modifies_dict_keys_with_no_data_node_to_expected_value( self, mock_response_dict_no_data_node ): - response = Py42Response(mock_response_dict_no_data_node) + response = PycpgResponse(mock_response_dict_no_data_node) response["item_list_key"]["foo"] = "newfooval" assert response["item_list_key"]["foo"] == "newfooval" def test_setitem_modifies_list_items_with_data_node_to_expected_value( self, mock_response_list_data_node ): - response = Py42Response(mock_response_list_data_node) + response = PycpgResponse(mock_response_list_data_node) response["item_list_key"][0] = "testmodifylistitem" assert response["item_list_key"][0] == "testmodifylistitem" def test_setitem_modifies_list_items_with_no_data_node_to_expected_value( self, mock_response_list_no_data_node ): - response = Py42Response(mock_response_list_no_data_node) + response = PycpgResponse(mock_response_list_no_data_node) response["item_list_key"][0] = "testmodifylistitem" assert response["item_list_key"][0] == "testmodifylistitem" def test_text_json_no_data_node_returns_raw_json( self, mock_response_list_no_data_node ): - response = Py42Response(mock_response_list_no_data_node) + response = PycpgResponse(mock_response_list_no_data_node) assert response.text == JSON_LIST_NO_DATA_NODE def test_raw_text_with_data_node_returns_raw_json_with_data_node( self, mock_response_list_data_node ): - response = Py42Response(mock_response_list_data_node) + response = PycpgResponse(mock_response_list_data_node) assert response.raw_text == JSON_LIST_WITH_DATA_NODE def test_raw_text_no_data_node_returns_raw_json_no_data_node( self, mock_response_not_json ): - response = Py42Response(mock_response_not_json) + response = PycpgResponse(mock_response_not_json) assert response.raw_text == PLAIN_TEXT def test_status_code_returns_expected_value(self, mock_response_not_json): - response = Py42Response(mock_response_not_json) + response = PycpgResponse(mock_response_not_json) assert response.status_code == 200 def test_status_code_returns_expected_url(self, mock_response_not_json): - response = Py42Response(mock_response_not_json) + response = PycpgResponse(mock_response_not_json) assert response.url == "http://www.example.com" def test_headers_returns_request_headers(self, mock_response_not_json): - response = Py42Response(mock_response_not_json) + response = PycpgResponse(mock_response_not_json) assert response.headers == mock_response_not_json.headers def test_iter_content_calls_request_iter_content(self, mock_response_not_json): - response = Py42Response(mock_response_not_json) + response = PycpgResponse(mock_response_not_json) response.iter_content(128, True) mock_response_not_json.iter_content.assert_called_once_with( chunk_size=128, decode_unicode=True @@ -159,7 +159,7 @@ def test_iter_content_calls_request_iter_content(self, mock_response_not_json): def test_iter_can_be_looped_over_multiple_times( self, mock_response_dict_no_data_node ): - response = Py42Response(mock_response_dict_no_data_node) + response = PycpgResponse(mock_response_dict_no_data_node) items = 0 for _ in response["item_list_key"]: items += 1 @@ -171,58 +171,58 @@ def test_iter_can_be_looped_over_multiple_times( items += 1 assert items == 2 - def test_setitem_raises_py42_error_on_invalid_assignment( + def test_setitem_raises_pycpg_error_on_invalid_assignment( self, mock_response_not_json ): - response = Py42Response(mock_response_not_json) - with pytest.raises(Py42Error): + response = PycpgResponse(mock_response_not_json) + with pytest.raises(PycpgError): response[0] = "test" - def test_getitem_raises_py42_error_on_invalid_subscript( + def test_getitem_raises_pycpg_error_on_invalid_subscript( self, mock_response_not_json ): - response = Py42Response(mock_response_not_json) - with pytest.raises(Py42Error): + response = PycpgResponse(mock_response_not_json) + with pytest.raises(PycpgError): response["test"] def test_content_dict_no_data_node_returns_expected_dict( self, mock_response_dict_no_data_node ): - response = Py42Response(mock_response_dict_no_data_node) + response = PycpgResponse(mock_response_dict_no_data_node) assert response.content == JSON_DICT_NO_DATA_NODE.encode("utf-8") def test_content_dict_data_node_returns_expected_dict( self, mock_response_dict_data_node ): - response = Py42Response(mock_response_dict_data_node) + response = PycpgResponse(mock_response_dict_data_node) assert response.content == JSON_DICT_WITH_DATA_NODE.encode("utf-8") def test_content_list_no_data_node_returns_expected_list( self, mock_response_list_no_data_node ): - response = Py42Response(mock_response_list_no_data_node) + response = PycpgResponse(mock_response_list_no_data_node) assert response.content == JSON_LIST_NO_DATA_NODE.encode("utf-8") def test_content_list_data_node_returns_expected_list( self, mock_response_list_data_node ): - response = Py42Response(mock_response_list_data_node) + response = PycpgResponse(mock_response_list_data_node) assert response.content == JSON_LIST_WITH_DATA_NODE.encode("utf-8") def test_data_with_data_node_returns_list_items(self, mock_response_list_data_node): - response = Py42Response(mock_response_list_data_node) + response = PycpgResponse(mock_response_list_data_node) assert type(response.data["item_list_key"]) == list def test_data_with_data_node_returns_dict_keys(self, mock_response_dict_data_node): - response = Py42Response(mock_response_dict_data_node) + response = PycpgResponse(mock_response_dict_data_node) assert type(response.data["item_list_key"]) == dict def test_data_no_data_node_returns_list_items_( self, mock_response_list_no_data_node ): - response = Py42Response(mock_response_list_no_data_node) + response = PycpgResponse(mock_response_list_no_data_node) assert type(response.data["item_list_key"]) == list def test_data_no_data_node_returns_dict_keys(self, mock_response_dict_no_data_node): - response = Py42Response(mock_response_dict_no_data_node) + response = PycpgResponse(mock_response_dict_no_data_node) assert type(response.data["item_list_key"]) == dict diff --git a/tests/test_usercontext.py b/tests/test_usercontext.py index 28854314c..dfaebe525 100644 --- a/tests/test_usercontext.py +++ b/tests/test_usercontext.py @@ -1,8 +1,8 @@ import pytest from tests.conftest import create_mock_response -from py42.services.administration import AdministrationService -from py42.usercontext import UserContext +from pycpg.services.administration import AdministrationService +from pycpg.usercontext import UserContext _GET_CURRENT_USER = """ { diff --git a/tests/test_util.py b/tests/test_util.py index 272669c23..2d3908178 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -1,6 +1,6 @@ from datetime import datetime -import py42.util as util +import pycpg.util as util def test_convert_timestamp_to_str_returns_expected_str(): diff --git a/tox.ini b/tox.ini index aad002bff..a242c0fc6 100644 --- a/tox.ini +++ b/tox.ini @@ -19,12 +19,12 @@ commands = # --tb=short: short traceback print mode # --strict: marks not registered in configuration file raise errors # --ignore=tests/integration: exclude integration tests - pytest --cov=py42 --cov-report xml --cov-report term -v -rsxX -l --tb=short --strict --ignore=tests/integration + pytest --cov=pycpg --cov-report xml --cov-report term -v -rsxX -l --tb=short --strict-markers --ignore=tests/integration [testenv:docs] deps = - sphinx == 8.1.3 - myst-parser == 4.0.0 + sphinx == 8.2.3 + myst-parser == 4.0.1 sphinx_rtd_theme == 3.0.2 docutils == 0.21.2 @@ -46,18 +46,11 @@ commands = [pytest] markers = integration: mark test as a integration test -alert_id = 1cae9f92-5fd7-4504-b363-9bc45015adaa device_id = 251691 -observer_rule_id = d52cbfe0-f9de-468e-afbe-3c91037322da -# securitydata -md5_hash = 202cb962ac59075b964b07152d234b70 -sha256_hash = a665a45920422f9d417e4867efdc4fb8a04a1f3fff1fa07e998e86f7f7a27ae3 user_uid = 984118686188300065 -file_data = 123 + # archive device_guid = 935873453596901068 destination_device_guid = 673679195225718785 archive_guid = 912100293346985227 path = C:/ -# case -case_event_id = 0_1d71796f-af5b-4231-9d8e-df6434da4663_984418168383179707_986472569691401170_12