diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1fc7838..3011d45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - id: isort stages: [commit,push] name: isort - entry: poetry run isort -rc + entry: poetry run isort language: system types: [python] - id: black diff --git a/dbtc/cli.py b/dbtc/cli.py index f629f3f..75dcaae 100644 --- a/dbtc/cli.py +++ b/dbtc/cli.py @@ -995,6 +995,20 @@ def trigger_job( 'job.' ), ), + mode: str = typer.Option( + 'standard', + help=( + 'Possible values are ["standard", "restart_from_failure", "autoscale"] ' + 'standard: runs existing job as-is ' + 'restart_from_failure: determine whether the last run of the target job ' + ' exited with an error. If yes, restart from the point of failure ' + 'autoscale: determine with the target job is currently running ' + ' If yes, create and then run the clone.' + ), + ), + autoscale_delete_post_run: bool = typer.Option( + True, help=('Delete job created via autoscaling after it finishes running') + ), ): """Trigger job to run.""" _dbt_cloud_request( @@ -1007,6 +1021,8 @@ def trigger_job( poll_interval=poll_interval, restart_from_failure=restart_from_failure, trigger_on_failure_only=trigger_on_failure_only, + mode=mode, + autoscale_delete_post_run=autoscale_delete_post_run, ) diff --git a/dbtc/client/cloud/base.py b/dbtc/client/cloud/base.py index 0226dc9..d9b9783 100644 --- a/dbtc/client/cloud/base.py +++ b/dbtc/client/cloud/base.py @@ -1,8 +1,8 @@ # stdlib import argparse -import enum import shlex import time +from datetime import datetime from functools import partial, wraps from typing import Dict, Iterable, List @@ -11,32 +11,13 @@ # first party from dbtc.client.base import _Client - - -class JobRunStatus(enum.IntEnum): - QUEUED = 1 - STARTING = 2 - RUNNING = 3 - SUCCESS = 10 - ERROR = 20 - CANCELLED = 30 - - -RUN_COMMANDS = ['build', 'run', 'test', 'seed', 'snapshot'] -GLOBAL_CLI_ARGS = { - 'warn_error': {'flags': ('--warn-error',), 'action': 'store_true'}, - 'use_experimental_parser': { - 'flags': ('--use-experimental-parser',), - 'action': 'store_true', - }, -} -SUB_COMMAND_CLI_ARGS = { - 'vars': {'flags': ('--vars',)}, - 'args': {'flags': ('--args',)}, - 'fail_fast': {'flags': ('-x', '--fail-fast'), 'action': 'store_true'}, - 'full_refresh': {'flags': ('--full-refresh',), 'action': 'store_true'}, - 'store_failures': {'flags': ('--store-failures',), 'action': 'store_true'}, -} +from dbtc.client.cloud import models +from dbtc.client.cloud.configs.dbt_core_cli import ( + global_cli_args, + run_commands, + sub_command_cli_args, +) +from dbtc.client.cloud.configs.enums import JobRunModes, JobRunStatus def _version_decorator(func, version): @@ -49,6 +30,7 @@ def wrapper(self, *args, **kwargs): return wrapper +# Version Decorators v2 = partial(_version_decorator, version='v2') v3 = partial(_version_decorator, version='v3') v4 = partial(_version_decorator, version='v4') @@ -60,7 +42,7 @@ def __init__(self, **kwargs): self.session = requests.Session() self.session.headers = self.headers self.parser = argparse.ArgumentParser() - all_cli_args = {**GLOBAL_CLI_ARGS, **SUB_COMMAND_CLI_ARGS} + all_cli_args = {**global_cli_args, **sub_command_cli_args} for arg_specs in all_cli_args.values(): flags = arg_specs['flags'] self.parser.add_argument( @@ -77,10 +59,28 @@ def _header_property(self): return 'api_key' + def _clone_resource(self, resource: str, **kwargs): + create_args = kwargs.pop('create_args', None) + payload = getattr(self, f'get_{resource}')(**kwargs)['data'] + + # Can't recreate a resource with an ID + payload.pop('id', None) + if create_args is not None: + kwargs = {k: v for k, v in kwargs.items() if k in create_args} + kwargs['payload'] = payload + return getattr(self, f'create_{resource}')(**kwargs) + def _make_request( self, path: str, *, method: str = 'get', **kwargs ) -> requests.Response: """Make request to API.""" + + # Model is not an argument that the request method accepts, needs to be removed + model = kwargs.pop('model', None) + if model is not None: + + # This will validate the payload as well as add any optional fields + kwargs['json'] = model(**kwargs['json']).dict() full_url = self.full_url(path) response = self.session.request(method=method, url=full_url, **kwargs) return response @@ -130,6 +130,12 @@ def _get_by_name(self, items: List, item_name: str, value: str = 'name'): obj = None return obj + def _validate_job_run_mode(self, mode): + if mode not in JobRunModes: + return False + + return True + @v3 def assign_group_permissions( self, account_id: int, group_id: int, payload: Dict @@ -194,6 +200,27 @@ def cancel_run(self, account_id: int, run_id: int) -> Dict: method='post', ) + @v2 + def clone_job( + self, + account_id: int, + job_id: int, + ): + + """Create a job using the configuration of another + + !!! tip + If a job is currently running, replicate the job definition to a new job, + and trigger + + Args: + account_id (int): Numeric ID of the account to retrieve + job_id (int): Numeric ID of the job to trigger + """ + return self._clone_resource( + 'job', account_id=account_id, job_id=job_id, create_args=['account_id'] + ) + @v3 def create_adapter(self, account_id: int, project_id: int, payload: Dict) -> Dict: """Create an adapter @@ -291,6 +318,7 @@ def create_job(self, account_id: int, payload: Dict) -> Dict: f'accounts/{account_id}/jobs/', method='post', json=payload, + model=models.Job, ) @v3 @@ -302,7 +330,10 @@ def create_project(self, account_id: int, payload: Dict) -> Dict: payload (dict): Dictionary representing the project to create """ return self._simple_request( - f'accounts/{account_id}/projects/', method='post', json=payload + f'accounts/{account_id}/projects/', + method='post', + json=payload, + model=models.Project, ) @v3 @@ -796,7 +827,11 @@ def list_invited_users(self, account_id: int) -> Dict: @v2 def list_jobs( - self, account_id: int, *, order_by: str = None, project_id: int = None + self, + account_id: int, + *, + order_by: str = None, + project_id: int = None, ) -> Dict: """List jobs in an account or specific project. @@ -1002,6 +1037,123 @@ def test_connection(self, account_id: int, payload: Dict) -> Dict: f'accounts/{account_id}/connections/test/', method='post', json=payload ) + @v2 + def _get_restart_job_definition( + self, + account_id: int, + job_id: int, + payload: Dict, + ): + + """Identifies whether there was a failure on the previous run of the job. + When failures are identified, returns an updated job definition to + restart from the point of failure. + + Args: + account_id (int): Numeric ID of the account to retrieve + job_id (int): Numeric ID of the job to trigger + payload (dict): Payload required for post request + """ + + def parse_args(cli_args: Iterable[str], namespace: argparse.Namespace): + string = '' + for arg in cli_args: + value = getattr(namespace, arg, None) + if value: + arg = arg.replace('_', '-') + if isinstance(value, bool): + string += f' --{arg}' + else: + string += f" --{arg} '{value}'" + return string + + has_failures = False + + last_run_data = self.list_runs( + account_id=account_id, + include_related=['run_steps'], + job_definition_id=job_id, + order_by='-id', + limit=1, + )['data'][0] + + last_run_status = last_run_data['status_humanized'].lower() + last_run_id = last_run_data['id'] + + if last_run_status == 'error': + rerun_steps = [] + + for run_step in last_run_data['run_steps']: + status = run_step['status_humanized'].lower() + # Skipping cloning, profile setup, and dbt deps - always + # the first three steps in any run + if run_step['index'] <= 3 or status == 'success': + self.console.log( + f'Skipping rerun for command "{run_step["name"]}" ' + 'as it does not need to be repeated.' + ) + + else: + + # get the dbt command used within this step + command = run_step['name'].partition('`')[2].partition('`')[0] + namespace, remaining = self.parser.parse_known_args( + shlex.split(command) + ) + sub_command = remaining[1] + + if ( + sub_command not in run_commands + and status in ['error', 'cancelled', 'skipped'] + ) or (sub_command in run_commands and status == 'skipped'): + rerun_steps.append(command) + + # errors and failures are when we need to inspect to figure + # out the point of failure + else: + + # get the run results scoped to the step which had an error + # an error here indicates that either: + # 1) the fail-fast flag was set, in which case + # the run_results.json file was never created; or + # 2) there was a problem on dbt Cloud's side saving + # this artifact + try: + step_results = self.get_run_artifact( + account_id=account_id, + run_id=last_run_id, + path='run_results.json', + step=run_step['index'], + )['results'] + + # If the artifact isn't found, the API returns a 404 with + # no json. The ValueError will catch the JSONDecodeError + except ValueError: + rerun_steps.append(command) + else: + rerun_nodes = ' '.join( + [ + record['unique_id'].split('.')[2] + for record in step_results + if record['status'] in ['error', 'skipped', 'fail'] + ] + ) + global_args = parse_args(global_cli_args.keys(), namespace) + sub_command_args = parse_args( + sub_command_cli_args.keys(), namespace + ) + modified_command = f'dbt{global_args} {sub_command} -s {rerun_nodes}{sub_command_args}' # noqa: E501 + rerun_steps.append(modified_command) + self.console.log( + f'Modifying command "{command}" as an error ' + 'or failure was encountered.' + ) + if len(rerun_steps) > 0: + has_failures = True + payload.update({"steps_override": rerun_steps}) + + return payload, has_failures + @v2 def trigger_job( self, @@ -1013,6 +1165,8 @@ def trigger_job( poll_interval: int = 10, restart_from_failure: bool = False, trigger_on_failure_only: bool = False, + mode: str = 'standard', + autoscale_delete_post_run: bool = True, ): """Trigger a job by its ID @@ -1030,7 +1184,16 @@ def trigger_job( restart_from_failure to True. This has the effect of only triggering the job when the prior invocation was not successful. Otherwise, the function will exit prior to triggering the job. - + mode (str, optional): Must be one of ['standard', 'restart_from_failure', + 'autoscaling']. + - standard mode triggers the job to run as-is. + - restart_from_failure checks for errors on the prior invocation and, + if found, restarts failed models only. + - autoscale checks whether the job_id is actively running. If so, + creates a copy of the running job + autoscale_delete_post_run (bool, optional): Only relevant when + mode = 'autoscale' + Remove a job replicated via autoscaling after it finishes running. """ def run_status_formatted(run: Dict, time: float) -> str: @@ -1046,125 +1209,75 @@ def run_status_formatted(run: Dict, time: float) -> str: f', View here: {url}' ) - def parse_args(cli_args: Iterable[str], namespace: argparse.Namespace): - string = '' - for arg in cli_args: - value = getattr(namespace, arg, None) - if value: - arg = arg.replace('_', '-') - if isinstance(value, bool): - string += f' --{arg}' - else: - string += f" --{arg} '{value}'" - return string - + # this is here to not break existing stuff 09.26.2022 if restart_from_failure: - self.console.log(f'Restarting job {job_id} from last failed state.') - last_run_data = self.list_runs( - account_id=account_id, - include_related=['run_steps'], - job_definition_id=job_id, - order_by='-id', - limit=1, - )['data'][0] - - last_run_status = last_run_data['status_humanized'].lower() - last_run_id = last_run_data['id'] - - if last_run_status == 'error': - rerun_steps = [] - - for run_step in last_run_data['run_steps']: + mode = 'restart_from_failure' - status = run_step['status_humanized'].lower() - # Skipping cloning, profile setup, and dbt deps - always - # the first three steps in any run - if run_step['index'] <= 3 or status == 'success': - self.console.log( - f'Skipping rerun for command "{run_step["name"]}" ' - 'as it does not need to be repeated.' - ) + mode_is_valid = self._validate_job_run_mode(mode) + if not mode_is_valid: + raise Exception( + f'mode: {mode} is not one of ' + '["standard", "restart_from_failure", "autoscale"]' + ) - else: + if mode == 'restart_from_failure': + self.console.log(f'Restarting job {job_id} from last failed state.') + payload, has_failures = self._get_restart_job_definition( + account_id=account_id, job_id=job_id, payload=payload + ) - # get the dbt command used within this step - command = run_step['name'].partition('`')[2].partition('`')[0] - namespace, remaining = self.parser.parse_known_args( - shlex.split(command) - ) - sub_command = remaining[1] - - if ( - sub_command not in RUN_COMMANDS - and status in ['error', 'cancelled', 'skipped'] - ) or (sub_command in RUN_COMMANDS and status == 'skipped'): - rerun_steps.append(command) + if trigger_on_failure_only and not has_failures: + self.console.log( + 'Process triggered with trigger_on_failure_only set to True but ' + 'no failed run steps found. Terminating.' + ) + return None - # errors and failures are when we need to inspect to figure - # out the point of failure - else: + elif mode == 'autoscale': + self.console.log( + 'Triggered with autoscaling set to True. ' + 'Detecting any running instances' + ) + most_recent_job_run = self.list_runs( + account_id=account_id, job_definition_id=job_id, limit=1, order_by='-id' + )['data'][0] + most_recent_job_run_status = most_recent_job_run['status_humanized'] - # get the run results scoped to the step which had an error - # an error here indicates that either: - # 1) the fail-fast flag was set, in which case - # the run_results.json file was never created; or - # 2) there was a problem on dbt Cloud's side saving - # this artifact - try: - step_results = self.get_run_artifact( - account_id=account_id, - run_id=last_run_id, - path='run_results.json', - step=run_step['index'], - )['results'] - - # If the artifact isn't found, the API returns a 404 with - # no json. The ValueError will catch the JSONDecodeError - except ValueError: - rerun_steps.append(command) - else: - rerun_nodes = ' '.join( - [ - record['unique_id'].split('.')[2] - for record in step_results - if record['status'] - in ['error', 'skipped', 'fail'] - ] - ) - global_args = parse_args( - GLOBAL_CLI_ARGS.keys(), namespace - ) - sub_command_args = parse_args( - SUB_COMMAND_CLI_ARGS.keys(), namespace - ) - modified_command = f'dbt{global_args} {sub_command} -s {rerun_nodes}{sub_command_args}' # noqa: E501 - rerun_steps.append(modified_command) - self.console.log( - f'Modifying command "{command}" as an error ' - 'or failure was encountered.' - ) + self.console.log( + f'Status for most recent run of job {job_id} ' + f'is {most_recent_job_run_status}.' + ) - payload.update({"steps_override": rerun_steps}) + if most_recent_job_run_status not in ['Queued', 'Starting', 'Running']: self.console.log( - f'Triggering modified job to re-run failed steps: {rerun_steps}' + f'autoscale set to true but base job with id {job_id} is free ' + 'triggering base job and ignoring autoscale configuration.' ) + autoscale_delete_post_run = False else: - self.console.log( - 'Process triggered with restart_from_failure set to True but no ' - 'failed run steps found.' + self.console.log(f'job_id {job_id} has an active run. Cloning job.') + + new_job_definition = self.clone_job( + account_id=account_id, job_id=job_id ) - if trigger_on_failure_only: - self.console.log( - 'Not triggering job because prior run was successful.' - ) - return + + # TODO: need to figure out the best way to disambiguate replicated jobs. + creation_time = datetime.now().strftime('%Y-%m-%d-%H-%M-%S') + new_job_name = '-'.join([new_job_definition['name'], creation_time]) + new_job_definition['name'] = new_job_name + job_id = self.create_job( + account_id=account_id, payload=new_job_definition + )['data']['id'] + + self.console.log(f'Created new job with job_id: {job_id}') run = self._simple_request( f'accounts/{account_id}/jobs/{job_id}/run/', method='post', json=payload, ) + if not run['status']['is_success']: self.console.log(f'Run NOT triggered for job {job_id}. See run response.') return run @@ -1185,6 +1298,9 @@ def parse_args(cli_args: Iterable[str], namespace: argparse.Namespace): ]: break + if mode == 'autoscale' and autoscale_delete_post_run: + self.delete_job(account_id=account_id, job_id=job_id) + return run @v3 @@ -1200,7 +1316,7 @@ def update_connection( payload (dict): Dictionary representing the connection to update """ return self._simple_request( - f'accounts/{account_id}/projects/{project_id}/connections/{connection_id}/', + f'accounts/{account_id}/projects/{project_id}/connections/{connection_id}/', # noqa: E501 method='post', json=payload, ) @@ -1218,7 +1334,7 @@ def update_credentials( payload (dict): Dictionary representing the credentials to update """ return self._simple_request( - f'accounts/{account_id}/projects/{project_id}/credentials/{credentials_id}/', # noqa: E50 + f'accounts/{account_id}/projects/{project_id}/credentials/{credentials_id}/', # noqa: E501 method='post', json=payload, ) diff --git a/dbtc/client/cloud/configs/__init__.py b/dbtc/client/cloud/configs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dbtc/client/cloud/configs/dbt_cloud_api.py b/dbtc/client/cloud/configs/dbt_cloud_api.py new file mode 100644 index 0000000..c980c06 --- /dev/null +++ b/dbtc/client/cloud/configs/dbt_cloud_api.py @@ -0,0 +1,46 @@ +# stdlib +from typing import Dict + + +class dbtCloudAPIRequestFactory(object): + def __init__(self, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + def _create_job_request(self) -> Dict: + """Minimal set of required fields needed to create a new dbt Cloud job, + including default values + """ + return { + 'name': None, + 'id': None, + 'execution': None, + 'account_id': None, + 'project_id': None, + 'environment_id': None, + 'dbt_version': None, + 'execute_steps': None, + 'state': None, + 'deferring_job_definition_id': None, + 'triggers': None, + 'settings': None, + 'schedule': None, + } + + def create_job_request(self, data={}) -> Dict: + """Completes the _create_job_request template with values from data and + overrides + + Args: + data (dict): payload to create the initial request. Typically, this will be + the result of a GET on the job definition from an existing job to be used + for dbt Cloud migrations + """ + # copy everything EXCEPT for the existing dbt Cloud job ID + result = self._create_job_request() + if data != {}: + for key in result.keys(): + if key != 'id': + result[key] = data[key] + + return result diff --git a/dbtc/client/cloud/configs/dbt_core_cli.py b/dbtc/client/cloud/configs/dbt_core_cli.py new file mode 100644 index 0000000..bc9df6a --- /dev/null +++ b/dbtc/client/cloud/configs/dbt_core_cli.py @@ -0,0 +1,17 @@ +run_commands = ['build', 'run', 'test', 'seed', 'snapshot'] + +global_cli_args = { + 'warn_error': {'flags': ('--warn-error',), 'action': 'store_true'}, + 'use_experimental_parser': { + 'flags': ('--use-experimental-parser',), + 'action': 'store_true', + }, +} + +sub_command_cli_args = { + 'vars': {'flags': ('--vars',)}, + 'args': {'flags': ('--args',)}, + 'fail_fast': {'flags': ('-x', '--fail-fast'), 'action': 'store_true'}, + 'full_refresh': {'flags': ('--full-refresh',), 'action': 'store_true'}, + 'store_failures': {'flags': ('--store-failures',), 'action': 'store_true'}, +} diff --git a/dbtc/client/cloud/configs/enums.py b/dbtc/client/cloud/configs/enums.py new file mode 100644 index 0000000..e909e3f --- /dev/null +++ b/dbtc/client/cloud/configs/enums.py @@ -0,0 +1,17 @@ +# stdlib +import enum + + +class JobRunStatus(enum.IntEnum): + QUEUED = 1 + STARTING = 2 + RUNNING = 3 + SUCCESS = 10 + ERROR = 20 + CANCELLED = 30 + + +class JobRunModes(str, enum.Enum): + STANDARD = 'standard' + RESTART = 'restart_from_failure' + AUTOSCALE = 'autoscale' diff --git a/dbtc/client/cloud/models/__init__.py b/dbtc/client/cloud/models/__init__.py new file mode 100644 index 0000000..93852cb --- /dev/null +++ b/dbtc/client/cloud/models/__init__.py @@ -0,0 +1,2 @@ +from .job import Job # noqa: F401 +from .project import Project # noqa: F401 diff --git a/dbtc/client/cloud/models/constants.py b/dbtc/client/cloud/models/constants.py new file mode 100644 index 0000000..e861841 --- /dev/null +++ b/dbtc/client/cloud/models/constants.py @@ -0,0 +1,7 @@ +# stdlib +import enum + + +class State(enum.IntEnum): + active = 1 + deleted = 2 diff --git a/dbtc/client/cloud/models/job.py b/dbtc/client/cloud/models/job.py new file mode 100644 index 0000000..2f19d7e --- /dev/null +++ b/dbtc/client/cloud/models/job.py @@ -0,0 +1,63 @@ +# stdlib +from typing import List, Literal, Optional + +# third party +from pydantic import BaseModel + +from .constants import State + + +class _JobExecution(BaseModel): + timeout_seconds: int + + +class _JobSchedule(BaseModel): + cron: str + date: Literal['custom_cron', 'days_of_week', 'every_day'] + time: Literal['every_hour', 'at_exact_hours'] + + +class _JobSettings(BaseModel): + threads: int + target_name: str + + +class _JobTrigger(BaseModel): + github_webhook: bool + schedule: bool + git_provider_webhook: Optional[bool] = None + + +class Job(BaseModel): + + # Required + account_id: int + environment_id: int + generate_docs: bool + name: str + project_id: int + run_generate_sources: bool + state: Literal[State.active, State.deleted] + + # Optional + dbt_version: Optional[str] = None + deactivated: bool = False + deferring_job_definiton_id: Optional[int] = None + execute_steps: Optional[List[str]] = None + execution: Optional[_JobExecution] = None + id: Optional[int] = None + is_deferrable: Optional[bool] = False + run_failure_count: int = 0 + schedule: Optional[_JobSchedule] = None + settings: Optional[_JobSettings] = None + triggers: Optional[_JobTrigger] = None + + def __init__(self, **data): + schedule = data.get('schedule', {}) + date = schedule.get('date', {}).get('type', None) + time = schedule.get('time', {}).get('type', None) + if date is not None: + data['schedule']['date'] = date + if time is not None: + data['schedule']['time'] = time + super().__init__(**data) diff --git a/dbtc/client/cloud/models/project.py b/dbtc/client/cloud/models/project.py new file mode 100644 index 0000000..0136288 --- /dev/null +++ b/dbtc/client/cloud/models/project.py @@ -0,0 +1,23 @@ +# stdlib +from typing import Optional + +# third party +from pydantic import BaseModel + +from .constants import State + + +class Project(BaseModel): + + # Required + account_id: int + name: str + + # Optional + id: Optional[int] = None + connection_id: Optional[int] = None + dbt_project_subdirectory: Optional[str] = None + docs_job_id: Optional[int] = None + freshness_job_id: Optional[int] = None + repository_id: Optional[int] = None + state: int = State.active diff --git a/poetry.lock b/poetry.lock index 261a20a..47a2e4f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -600,6 +600,21 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pydantic" +version = "1.10.2" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pyflakes" version = "2.4.0" @@ -938,7 +953,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "02325e1d8e94719b09b921bed4f10cec541135cb29e02514367fa34594346b7b" +content-hash = "ecbd26f1cd1c50c8dd6a3199923a7c4bc888401255ea46a61097f8c3578ddb90" [metadata.files] appnope = [ @@ -1178,6 +1193,7 @@ pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] +pydantic = [] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, diff --git a/pyproject.toml b/pyproject.toml index c23c73d..c0fc77e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ python = "^3.8" sgqlc = "^15.0" requests = "^2.27.1" typer = {extras = ["all"], version = "^0.6.1"} +pydantic = "^1.10.2" [tool.poetry.dev-dependencies] black = "^22.1.0"