From 91a4568db1145fe18010360e4642864348d6e716 Mon Sep 17 00:00:00 2001 From: RicardoJDaleprane Date: Thu, 18 Sep 2025 15:52:28 -0300 Subject: [PATCH] Commit Inicial --- .dockerignore | 9 + .idea/.gitignore | 8 + .idea/AdminUuidPostgreSql.iml | 15 + .idea/dictionaries/project.xml | 11 + .../inspectionProfiles/profiles_settings.xml | 6 + .idea/misc.xml | 7 + .idea/modules.xml | 8 + .idea/vcs.xml | 6 + Dockerfile | 26 + alembic.ini | 118 + alembic/README | 1 + alembic/env.py | 300 +++ alembic/env_padrao.py | 100 + alembic/iniciar-bd-multitenat.txt | 53 + alembic/script.py.mako | 27 + app/__init__.py | 0 app/config.py | 18 + app/database/RelationalTableRepository.py | 592 +++++ app/database/RepositoryBase.py | 474 ++++ app/database/TratamentoErros.py | 109 + app/database/__init__.py | 0 app/database/audit_log.py | 468 ++++ app/database/formatar_retorno_bd.py | 76 + app/database/models.py | 655 ++++++ app/database/session.py | 143 ++ app/main.py | 110 + app/multi_tenant/__init__.py | 0 app/multi_tenant/criar_tenant.py | 117 + app/multi_tenant/tenant.py | 47 + app/multi_tenant/tenant_utils.py | 47 + app/rbac/README.txt | 33 + app/rbac/__init__.py | 0 app/rbac/auth.py | 46 + app/rbac/bkp_classes_customizadas.py | 43 + app/rbac/classes_customizadas.py | 103 + app/rbac/modelos.txt | 60 + app/rbac/permissions.py | 43 + app/rbac/rbac.py | 24 + app/rbac/routes_login.py | 63 + app/rbac/routes_usuario_logado.py | 15 + app/rbac/schemas.py | 44 + app/routers/__init__.py | 0 app/routers/dependencies.py | 56 + app/routers/rotas.py | 87 + app/routers/rotas_dinamicas.py | 281 +++ app/routers/router_pessoa.py | 211 ++ app/routers/router_registry.py | 13 + app/s3/RepositoryS3.py | 271 +++ app/s3/__init__.py | 0 app/s3/router_s3.py | 170 ++ app/s3/router_s3_sem_repository.py | 122 + app/s3/schema_s3.py | 17 + app/schemas/__init__.py | 5 + app/schemas/endereco_schemas.py | 64 + app/schemas/papel_shemas.py | 46 + app/schemas/pessoa_schemas.py | 118 + app/schemas/tipo_endereco_schemas.py | 44 + app/schemas/usuario_schemas.py | 68 + app/schemas/utils.py | 29 + app/schemas/validacoes.py | 139 ++ app/scripts/__init__.py | 0 app/scripts/bkp_create_initial_users.py | 41 + app/scripts/create_initial_user.py | 37 + app/scripts/create_initial_users.py | 76 + app/scripts/initialize_permissions.py | 214 ++ app/scripts/initialize_permissions_roles.py | 231 ++ app/scripts/initialize_pessoa.py | 60 + app/scripts/initizalize_financeiro.py | 48 + assets/style.css | 319 +++ atualizar_tabelas_inquilinos.py | 43 + check_db.py | 55 + docker.txt | 20 + iniciar.txt | 6 + iniciar_multi_tenant.txt | 14 + iniciar_permissoes_e_papeis.py | 50 + novo_inquilino.py | 45 + poetry.lock | 2014 +++++++++++++++++ pyproject.toml | 36 + pytest.ini | 5 + start.sh | 31 + test_main.http | 11 + teste.txt | 8 + tests/__init__.py | 3 + tests/_test_client.py | 6 + tests/anterior.py | 190 ++ tests/bkp_conftest.py | 251 ++ tests/conftest.py | 335 +++ tests/init_db_pytest.py | 44 + tests/test_000_usuario.py | 0 tests/test_003_pessoa.py | 1414 ++++++++++++ tests/test_004_endereco.py | 334 +++ 91 files changed, 11707 insertions(+) create mode 100644 .dockerignore create mode 100644 .idea/.gitignore create mode 100644 .idea/AdminUuidPostgreSql.iml create mode 100644 .idea/dictionaries/project.xml create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml create mode 100644 Dockerfile create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/env_padrao.py create mode 100644 alembic/iniciar-bd-multitenat.txt create mode 100644 alembic/script.py.mako create mode 100644 app/__init__.py create mode 100644 app/config.py create mode 100644 app/database/RelationalTableRepository.py create mode 100644 app/database/RepositoryBase.py create mode 100644 app/database/TratamentoErros.py create mode 100644 app/database/__init__.py create mode 100644 app/database/audit_log.py create mode 100644 app/database/formatar_retorno_bd.py create mode 100644 app/database/models.py create mode 100644 app/database/session.py create mode 100644 app/main.py create mode 100644 app/multi_tenant/__init__.py create mode 100644 app/multi_tenant/criar_tenant.py create mode 100644 app/multi_tenant/tenant.py create mode 100644 app/multi_tenant/tenant_utils.py create mode 100644 app/rbac/README.txt create mode 100644 app/rbac/__init__.py create mode 100644 app/rbac/auth.py create mode 100644 app/rbac/bkp_classes_customizadas.py create mode 100644 app/rbac/classes_customizadas.py create mode 100644 app/rbac/modelos.txt create mode 100644 app/rbac/permissions.py create mode 100644 app/rbac/rbac.py create mode 100644 app/rbac/routes_login.py create mode 100644 app/rbac/routes_usuario_logado.py create mode 100644 app/rbac/schemas.py create mode 100644 app/routers/__init__.py create mode 100644 app/routers/dependencies.py create mode 100644 app/routers/rotas.py create mode 100644 app/routers/rotas_dinamicas.py create mode 100644 app/routers/router_pessoa.py create mode 100644 app/routers/router_registry.py create mode 100644 app/s3/RepositoryS3.py create mode 100644 app/s3/__init__.py create mode 100644 app/s3/router_s3.py create mode 100644 app/s3/router_s3_sem_repository.py create mode 100644 app/s3/schema_s3.py create mode 100644 app/schemas/__init__.py create mode 100644 app/schemas/endereco_schemas.py create mode 100644 app/schemas/papel_shemas.py create mode 100644 app/schemas/pessoa_schemas.py create mode 100644 app/schemas/tipo_endereco_schemas.py create mode 100644 app/schemas/usuario_schemas.py create mode 100644 app/schemas/utils.py create mode 100644 app/schemas/validacoes.py create mode 100644 app/scripts/__init__.py create mode 100644 app/scripts/bkp_create_initial_users.py create mode 100644 app/scripts/create_initial_user.py create mode 100644 app/scripts/create_initial_users.py create mode 100644 app/scripts/initialize_permissions.py create mode 100644 app/scripts/initialize_permissions_roles.py create mode 100644 app/scripts/initialize_pessoa.py create mode 100644 app/scripts/initizalize_financeiro.py create mode 100644 assets/style.css create mode 100644 atualizar_tabelas_inquilinos.py create mode 100644 check_db.py create mode 100644 docker.txt create mode 100644 iniciar.txt create mode 100644 iniciar_multi_tenant.txt create mode 100644 iniciar_permissoes_e_papeis.py create mode 100644 novo_inquilino.py create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 pytest.ini create mode 100644 start.sh create mode 100644 test_main.http create mode 100644 teste.txt create mode 100644 tests/__init__.py create mode 100644 tests/_test_client.py create mode 100644 tests/anterior.py create mode 100644 tests/bkp_conftest.py create mode 100644 tests/conftest.py create mode 100644 tests/init_db_pytest.py create mode 100644 tests/test_000_usuario.py create mode 100644 tests/test_003_pessoa.py create mode 100644 tests/test_004_endereco.py diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..b664492 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +iniciar.txt +iniciar_multi_tenant.txt +docker.txt +__pycache__/ +*.py[cod] +*.pyo +*.pyd +.env +.git diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..13566b8 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml diff --git a/.idea/AdminUuidPostgreSql.iml b/.idea/AdminUuidPostgreSql.iml new file mode 100644 index 0000000..92aeffe --- /dev/null +++ b/.idea/AdminUuidPostgreSql.iml @@ -0,0 +1,15 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/dictionaries/project.xml b/.idea/dictionaries/project.xml new file mode 100644 index 0000000..a942b73 --- /dev/null +++ b/.idea/dictionaries/project.xml @@ -0,0 +1,11 @@ + + + + expiracao + maximos + minimos + registrados + validacoes + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000..105ce2d --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..a503786 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..bd486ce --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..dfd76ea --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +FROM python:3.12-slim + +ENV POETRY_VIRTUALENVS_CREATE=false + +WORKDIR /code + +# Copie os arquivos do Poetry para o contêiner +COPY pyproject.toml poetry.lock ./ + +RUN pip install poetry + +RUN poetry config installer.max-workers 10 +RUN poetry install --no-interaction --no-ansi --no-root + +COPY ./app /code/app +COPY ./alembic /code/alembic +COPY iniciar_permissoes_e_papeis.py /code/ +COPY alembic.ini /code/ +COPY check_db.py /code/ +COPY start.sh /code/ + +# Dar permissão de execução aos scripts +RUN chmod +x /code/start.sh + +#CMD ["fastapi", "run", "app/main.py", "--port", "80", "--workers", "4"] +CMD ["./start.sh"] diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..114a8a4 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,118 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +version_path_separator = newline + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic1[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic1/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic1/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic1.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +#version_path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +;level = DEBUG +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +;level = INFO +level = DEBUG +handlers = console +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..e0d0858 --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..4f3446f --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,300 @@ +# import asyncio +# import logging +# from logging.config import fileConfig +# from alembic import context +# from sqlalchemy import text, pool +# from sqlalchemy.ext.asyncio import async_engine_from_config +# from sqlalchemy.ext.asyncio import AsyncEngine +# from app.database.models import Base +# from app.config import URL_BD +# +# config = context.config +# config.set_main_option("sqlalchemy.url", URL_BD) +# +# if config.config_file_name is not None: +# fileConfig(config.config_file_name) +# +# target_metadata = Base.metadata +# +# logging.basicConfig() +# # logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) +# +# +# logging.getLogger("sqlalchemy.engine").setLevel(logging.DEBUG) +# +# +# def run_migrations_offline() -> None: +# raise NotImplementedError("Modo offline não implementado.") +# +# +# async def run_async_migrations() -> None: +# connectable: AsyncEngine = async_engine_from_config( +# config.get_section(config.config_ini_section, {}), +# prefix="sqlalchemy.", +# poolclass=pool.NullPool, +# ) +# +# args = context.get_x_argument(as_dictionary=True) +# special_schema = args.get("special_schema") +# current_tenant = args.get("tenant") +# +# if current_tenant is None and special_schema is None: +# raise Exception("Você deve fornecer 'tenant' ou 'special_schema' como argumento.") +# elif current_tenant is not None and special_schema is not None: +# raise Exception("'tenant' e 'special_schema' não podem ser usados simultaneamente.") +# +# async with connectable.connect() as async_connection: +# await async_connection.run_sync( +# lambda connection: run_migrations_online_internal(connection, current_tenant, special_schema) +# ) +# +# await connectable.dispose() +# +# +# def run_migrations_online_internal(connection, current_tenant: str, special_schema: str) -> None: +# if special_schema == "shared": +# script_location = "alembic/versions/shared" +# schema_name = "shared" +# """ +# Esquema da Tabela definido como o mesmo valor do esquema a serem criadas as Tabelas +# Como a tabelas comuns já tem o esquema definido nelas essa configuração garante que a tabela versão será +# criada no mesmo esquma das tabelas compartilhadas +# """ +# schema_table = special_schema +# +# else: +# script_location = "alembic/versions/tenants" +# schema_name = current_tenant +# """ +# Esquema da Tabela definido como None +# Como a tabelas dos inquilinos serão cadastradas cada um em um esquema diferente elas saõ configuradas como None +# Se nesse ponto já definirmos o esquema como o do inquilino a migração junto como o search_path exclui a tablea +# de versão no script de migração por isso é necessáiro configurar o esquema como None no Upgrade junto com +# o search_path a tabela de versão vai ser criada no mesmo esquema do inquilino +# """ +# schema_table = None +# +# context.script.version_locations = [script_location] +# +# create_schema_if_not_exists(connection, schema_name) +# # connection.execute(text('set search_path to "%s"' % schema_name)) +# # connection.commit() +# # connection.dialect.default_schema_name = schema_name +# # print("Default schema set to:", connection.dialect.default_schema_name) +# +# +# if current_tenant: +# connection.execute(text(f'SET search_path TO "{current_tenant}"')) +# # connection.execute(text('set search_path to "%s"' % current_tenant)) +# connection.commit() +# else: +# print("set schema") +# connection.execute(text(f'SET search_path TO "shared"')) +# # connection.execute(text('set search_path to "%s"' % current_tenant)) +# connection.commit() +# +# # Verificar o search_path configurado +# result = connection.execute(text("SHOW search_path")) +# current_search_path = result.scalar() +# print(f"Current search_path: {current_search_path}") +# +# def include_object(object, name, type_, reflected, compare_to): +# +# schema = getattr(object, "schema", None) +# +# if special_schema == "shared": +# # Sobrescreve o schema para None apenas se for 'shared' +# # object.schema = None +# # Inclusçaõ na Migração apenas as tabelas compartilhadas +# if schema == "shared": +# return True +# else: +# return False +# +# if special_schema is None: +# # Inclusão na Migração apenas as tabelas dos inquilinos +# if schema is None: +# return True +# else: +# return False +# +# # Exclui por padrão se não atender aos critérios +# return False +# +# # def include_name(name, type_, parent_names): +# # if type_ == "table": +# # return name in target_metadata.tables +# # else: +# # return True +# +# context.configure( +# connection=connection, +# target_metadata=target_metadata, +# include_object=include_object, +# version_table_schema=schema_table, +# include_schemas=True, +# # dialect_opts={"paramstyle": "named"}, +# # include_name=include_name, +# +# +# +# ) +# +# with context.begin_transaction(): +# context.run_migrations() +# +# +# def create_schema_if_not_exists(connection, schema_name: str): +# query = text(f'CREATE SCHEMA IF NOT EXISTS "{schema_name}"') +# connection.execute(query) +# connection.commit() +# +# +# def run_migrations_online() -> None: +# asyncio.run(run_async_migrations()) +# +# +# if context.is_offline_mode(): +# run_migrations_offline() +# else: +# run_migrations_online() +import asyncio +import logging +from logging.config import fileConfig +from alembic import context +from sqlalchemy import text, pool +from sqlalchemy.ext.asyncio import async_engine_from_config +from sqlalchemy.ext.asyncio import AsyncEngine +from app.database.models import Base +from app.config import URL_BD +from sqlalchemy.orm import clear_mappers + +clear_mappers() +config = context.config +config.set_main_option("sqlalchemy.url", URL_BD) + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +target_metadata = Base.metadata + +logging.basicConfig() +logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) + + +def run_migrations_offline() -> None: + raise NotImplementedError("Modo offline não implementado.") + + +async def run_async_migrations() -> None: + connectable: AsyncEngine = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + args = context.get_x_argument(as_dictionary=True) + special_schema = args.get("special_schema") + current_tenant = args.get("tenant") + + if current_tenant is None and special_schema is None: + raise Exception("Você deve fornecer 'tenant' ou 'special_schema' como argumento.") + elif current_tenant is not None and special_schema is not None: + raise Exception("'tenant' e 'special_schema' não podem ser usados simultaneamente.") + + async with connectable.connect() as async_connection: + await async_connection.run_sync( + lambda connection: run_migrations_online_internal(connection, current_tenant, special_schema) + ) + + await connectable.dispose() + + +def run_migrations_online_internal(connection, current_tenant: str, special_schema: str) -> None: + if special_schema == "shared": + script_location = "alembic/versions/shared" + schema_name = "shared" + """ + Esquema da Tabela definido como o mesmo valor do esquema a serem criadas as Tabelas + Como a tabelas comuns já tem o esquema definido nelas essa configuração garante que a tabela versão será + criada no mesmo esquma das tabelas compartilhadas + """ + schema_table = "shared" + else: + script_location = "alembic/versions/tenants" + schema_name = current_tenant + """ + Esquema da Tabela definido como None + Como a tabelas dos inquilinos serão cadastradas cada um em um esquema diferente elas saõ configuradas como None + Se nesse ponto já definirmos o esquema como o do inquilino a migração junto como o search_path exclui a tablea + de versão no script de migração por isso é necessáiro configurar o esquema como None no Upgrade junto com + o search_path a tabela de versão vai ser criada no mesmo esquema do inquilino + """ + schema_table = None + + context.script.version_locations = [script_location] + + create_schema_if_not_exists(connection, schema_name) + + if current_tenant: + # print("print dentro current_tenant ") + # connection.execute(text(f'SET search_path TO "{current_tenant}"')) + connection.execute(text('set search_path to "%s"' % current_tenant)) + connection.commit() + connection.dialect.default_schema_name = current_tenant + + if special_schema: + # print("print dentro special_schema ") + # connection.execute(text(f'SET search_path TO "{special_schema}"')) + connection.execute(text('set search_path to "%s"' % special_schema)) + connection.commit() + connection.dialect.default_schema_name = special_schema + + def include_object(object, name, type_, reflected, compare_to): + + if special_schema == "shared": + # Inclusçaõ na Migração apenas as tabelas compartilhadas + if (type_ == "table" and (name.startswith("rbac_") or name == "inquilinos")) or type_ == "column" \ + or type_ == "foreign_key_constraint": + print(f"Table included: {name}") + return True + else: + return False + + if special_schema is None: + # Inclusão na Migração apenas as tabelas dos inquilinos + if (type_ == "table" and not (name.startswith("rbac_") or name == "inquilinos")) or type_ == "column"\ + or type_ == "foreign_key_constraint": + return True + else: + return False + + # Exclui por padrão se não atender aos critérios + return False + + context.configure( + connection=connection, + target_metadata=target_metadata, + include_object=include_object, + version_table_schema=schema_table, + # include_schemas=True, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def create_schema_if_not_exists(connection, schema_name: str): + query = text(f'CREATE SCHEMA IF NOT EXISTS "{schema_name}"') + connection.execute(query) + connection.commit() + + +def run_migrations_online() -> None: + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/env_padrao.py b/alembic/env_padrao.py new file mode 100644 index 0000000..cc3ca93 --- /dev/null +++ b/alembic/env_padrao.py @@ -0,0 +1,100 @@ +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +from Apagar.config import config as app_config +from app.database.session import Base + +# Import the initialization function +from Apagar.init_permissions_bkp1 import init_permissions + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config +config.set_main_option("sqlalchemy.url", app_config.DB_CONFIG) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + # Call the initialization script + init_permissions() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/iniciar-bd-multitenat.txt b/alembic/iniciar-bd-multitenat.txt new file mode 100644 index 0000000..f9a38a6 --- /dev/null +++ b/alembic/iniciar-bd-multitenat.txt @@ -0,0 +1,53 @@ +- Primeiro usar a copia do arquivo ou criar uma migração vazia + alembic revision -m "Criar tabelas do schema shared" + se usar a cópia do arquivo os outros passo são desnecessários + +- Ajustar imports da migração + from typing import Sequence, Union + from alembic import op + import sqlalchemy as sa + import fastapi_users_db_sqlalchemy + from sqlalchemy import MetaData, schema + from app.database.models import Base + +- Incluir a função para filtra os shcemas shared para criar apenas eles + def get_shared_metadata(): + """Filtra as tabelas do schema 'shared'.""" + meta = MetaData() + for table in Base.metadata.tables.values(): + if table.schema != "tenant": # Filtra apenas as tabelas do schema 'shared' + table.to_metadata(meta) + return meta + +- Ajustar função upgrade + """Criação do schema 'shared' e tabelas associadas.""" + conn = op.get_bind() + + # Criar o schema 'shared' se não existir + schema_exists_query = sa.text( + "SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'shared';" + ) + result = conn.execute(schema_exists_query) + schema_exists = result.scalar() is not None + + if not schema_exists: + op.execute(schema.CreateSchema("shared")) + print("Schema 'shared' criado com sucesso.") + + # Criar as tabelas do schema 'shared' + metadata = get_shared_metadata() + metadata.create_all(bind=conn) # Cria as tabelas do shared + print("Tabelas do schema 'shared' criadas com sucesso.") + +- Ajustar função downgrade + """Remoção do schema 'shared' e tabelas associadas.""" + conn = op.get_bind() + + # Remover as tabelas do schema 'shared' + metadata = get_shared_metadata() + metadata.drop_all(bind=conn) + print("Tabelas do schema 'shared' removidas com sucesso.") + + # Remover o schema 'shared' + op.execute("DROP SCHEMA IF EXISTS shared CASCADE") + print("Schema 'shared' removido com sucesso.") diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..50fb31e --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,27 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import fastapi_users_db_sqlalchemy +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..0d48d98 --- /dev/null +++ b/app/config.py @@ -0,0 +1,18 @@ +import os + + +def str_to_bool(value: str) -> bool: + return value.lower() in ['true', '1', 't', 'yes', 'y'] + + +COLUNA = os.getenv('COLUNA', 'uuid') +URL_BD = os.getenv('URL_BD', 'postgresql+asyncpg://sonora:sonora@192.168.0.11:5432/pytest') +URL_BD_TESTE = os.getenv('URL_BD_TESTE', 'postgresql+asyncpg://sonora:sonora@192.168.0.11:5432/pytest') +SECRET = os.getenv('SECRET', '6be9ce93ea990b59f4448f5e84b37d785d7585245dbf2cc81e340389c2fdb4af') +ECHO = str_to_bool(os.getenv('ECHO', 'False')) +ENV = os.getenv('ENV', 'teste') +S3_ACCESS_KEY_ID = os.getenv('S3_ACCESS_KEY_ID', 'JFqmuTx4qh51kuGIzSZI') +S3_SECRET_ACCESS_KEY = os.getenv('S3_SECRET_ACCESS_KEY', 'ZjjvaDGpwDWpYO6zxgOhI0T9ibrRe7JnNl7AXyjH') +S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME', 'sistema') +S3_REGION_NAME = os.getenv('S3_REGION_NAME', 'br-vilavelha') +S3_ENDPOINT_URL = os.getenv('S3_ENDPOINT_URL', 'https://s3-api.sonoraav.com.br') diff --git a/app/database/RelationalTableRepository.py b/app/database/RelationalTableRepository.py new file mode 100644 index 0000000..f6982bb --- /dev/null +++ b/app/database/RelationalTableRepository.py @@ -0,0 +1,592 @@ +# Importações de bibliotecas padrão +from typing import Generic, TypeVar, Dict, Type, Union, Any, List +from uuid import UUID +import copy + +# Importações de bibliotecas de terceiros +from sqlalchemy import BinaryExpression, literal +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.inspection import inspect +from sqlalchemy.future import select +from sqlalchemy import delete +from pydantic import BaseModel +from fastapi import HTTPException +from app.database.audit_log import audit_log + +# Importações do seu próprio projeto +from app.database.RepositoryBase import ( + RepositoryBase, IntegrityError, IntegrityConflictException, + SnippetException, NotFoundException +) +from app.database import models + +Model = TypeVar("Model", bound=models.Base) +Schema = TypeVar("Schema", bound=BaseModel) + + +class RelationalTableRepository(RepositoryBase[Model], Generic[Model]): + def __init__(self, model: type[Model], session: AsyncSession) -> None: + super().__init__(model, session) + + async def filter( + self, + *expressions: BinaryExpression, + ) -> list[Model]: + query = select(self.model) + if expressions: + query = query.where(*expressions) + return list(await self.session.scalars(query)) + + async def create(self, data_one: Schema, *args: Any, **kwargs: Any) -> Model: + try: + if kwargs.get("related_info_extra_columns"): + db_model = await self.process_related_items_with_extra_columns(data_one, **kwargs) + else: + db_model = await self.process_related_items(data_one, **kwargs) + + # Commit e refresh do modelo + await self.session.commit() + await self.session.refresh(db_model) + return db_model + + except HTTPException as e: + # Repassa a HTTPException específica + raise e + + except Exception as e: + raise HTTPException(status_code=500, detail="Erro ao criar relação com entidades relacionadas") from e + + async def create_many(self, data: List[Schema], return_models: bool = False, *args: Any, **kwargs: Any) -> ( + list[Model] | bool): + db_models = [] + if kwargs.get("related_info_extra_columns"): + for single_data in data: + db_model = await self.process_related_items_with_extra_columns(single_data, **kwargs) + db_models.append(db_model) + else: + for single_data in data: + db_model = await self.process_related_items(single_data, **kwargs) + db_models.append(db_model) + + try: + self.session.add_all(db_models) + await self.session.commit() + + except IntegrityError: + raise IntegrityConflictException( + f"Na tabela {self.model.__tablename__} existe conflito com dados já cadastrados em campo único.", + ) + except Exception as e: + raise SnippetException(f"Unknown error occurred: {e}") from e + + if not return_models: + return True + + for m in db_models: + await self.session.refresh(m) + + return db_models + + # async def update_by_id(self, update: Schema, coluna: str, *args: Any, **kwargs: Any) -> Model: + # related_info_append = kwargs.get('related_info_append', []) + # related_info_add = kwargs.get('related_info_add', []) + # + # # Prepara as variáveis para os relacionamentos simples e muitos para muitos + # simple_relationship_fields = { + # info["key"]: {"related_model": info["related_model"], "exclude_field": info["foreign_key"]} + # for info in related_info_add + # } + # many_to_many_relationship_fields = { + # info["key"]: info["related_model"] for info in related_info_append + # } + # + # uuid = str(update.uuid) + # db_model = await self.get_one_by_id(uuid, coluna, with_for_update=True) + # + # if not db_model: + # raise NotFoundException( + # f"{self.model.__tablename__} {coluna}={uuid} não encontrada." + # ) + # + # values = update.model_dump(exclude_unset=True) + # + # # Atualiza campos simples + # for k, v in values.items(): + # if k not in simple_relationship_fields and k not in many_to_many_relationship_fields: + # setattr(db_model, k, v) + # + # # Atualiza relacionamentos simples + # for field, details in simple_relationship_fields.items(): + # related_model = details["related_model"] + # foreign_key = details["exclude_field"] # Isso é o "fk_pessoa_uuid" + # + # # Se a chave não estiver presente ou estiver com uma lista vazia, remove os itens relacionados + # if field not in values or values.get(field) == []: + # foreign_key_column = getattr(related_model, foreign_key) + # + # # Garantimos que a comparação é feita entre expressões SQL válidas + # await self.session.execute( + # delete(related_model).where(foreign_key_column == literal(db_model.uuid)) + # ) + # await self.session.flush() + # else: + # # Faz a Atualização de valores da lista e exclusão do que não estão na lista + # related_items = values.get(field) + # await self.update_simple_relationship(db_model=db_model, field=field, related_items=related_items, + # related_model=related_model, session=self.session, + # foreign_key=foreign_key) + # + # # Atualiza relacionamentos muitos para muitos + # for field, related_model in many_to_many_relationship_fields.items(): + # # Se a chave não estiver presente ou estiver com uma lista vazia, remove os itens relacionados + # if field not in values or values.get(field) == []: + # setattr(db_model, field, []) + # else: + # related_item_ids = values.get(field) + # await self.update_many_to_many_relationship(db_model, field, related_item_ids, related_model, + # coluna=coluna) + # + # try: + # await self.session.commit() + # await self.session.refresh(db_model) + # return db_model + # except IntegrityError: + # raise IntegrityConflictException( + # f"{self.model.__tablename__} {coluna}={uuid} conflito com dados existentes." + # ) + + # @audit_log + async def update_by_id(self, update: Schema, coluna: str, *args: Any, **kwargs: Any) -> Model: + # Obtém as configurações dos relacionamentos + related_info_append = kwargs.get('related_info_append', []) + related_info_add = kwargs.get('related_info_add', []) + related_info_extra = kwargs.get('related_info_extra_columns', []) + + # Preparação dos relacionamentos simples (one-to-many) e muitos-para-muitos simples + simple_relationship_fields = { + info["key"]: {"related_model": info["related_model"], "exclude_field": info["foreign_key"]} + for info in related_info_add + } + many_to_many_relationship_fields = { + info["key"]: info["related_model"] for info in related_info_append + } + # Para relacionamentos com extra columns, armazenamos a configuração completa + extra_relationship_fields = {info["key"]: info for info in related_info_extra} + + # Busca o objeto base a ser atualizado + uuid = str(update.uuid) + db_model = await self.get_one_by_id(uuid, coluna, with_for_update=True) + if not db_model: + raise NotFoundException(f"{self.model.__tablename__} {coluna}={uuid} não encontrada.") + + # Guardar o estado atual antes da modificação (cópia profunda para evitar problemas com lazy attributes) + original_model = copy.deepcopy(db_model) + + values = update.model_dump(exclude_unset=True) + + # Atualiza os campos simples (excluindo os campos que representam relacionamentos) + all_relationship_keys = set(simple_relationship_fields.keys()) | set( + many_to_many_relationship_fields.keys()) | set(extra_relationship_fields.keys()) + for k, v in values.items(): + if k not in all_relationship_keys: + setattr(db_model, k, v) + + # Atualiza relacionamentos simples (one-to-many) + for field, details in simple_relationship_fields.items(): + related_model = details["related_model"] + foreign_key = details["exclude_field"] # por exemplo, "fk_pessoa_uuid" + if field not in values or values.get(field) == []: + foreign_key_column = getattr(related_model, foreign_key) + await self.session.execute( + delete(related_model).where(foreign_key_column == literal(db_model.uuid)) + ) + await self.session.flush() + else: + related_items = values.get(field) + await self.update_simple_relationship( + db_model=db_model, + field=field, + related_items=related_items, + related_model=related_model, + session=self.session, + foreign_key=foreign_key + ) + + # Atualiza relacionamentos muitos-para-muitos simples + for field, related_model in many_to_many_relationship_fields.items(): + if field not in values or values.get(field) == []: + setattr(db_model, field, []) + else: + related_item_ids = values.get(field) + await self.update_many_to_many_relationship( + db_model=db_model, + field=field, + related_item_ids=related_item_ids, + related_model=related_model, + coluna=coluna) + + # Atualiza relacionamentos muitos-para-muitos com campos extras + for field, config in extra_relationship_fields.items(): + # Se o campo não foi enviado no update, ignore a atualização desse relacionamento. + if field not in values: + continue + + new_items = values.get(field) # Espera-se que seja uma lista (pode ser vazia) + await self.update_many_to_many_extra_relationship( + db_model=db_model, + field=field, + new_items=new_items, + association_model=config["association_model"], + base_foreign_key=config["base_foreign_key"], + related_foreign_key=config["related_foreign_key"], + extra_fields=config.get("extra_fields", []) + ) + + try: + await self.session.commit() + await self.session.refresh(db_model) + return db_model + except IntegrityError: + raise IntegrityConflictException( + f"{self.model.__tablename__} {coluna}={uuid} conflito com dados existentes." + ) + + async def update_many_by_ids(self, *args, **kwargs): + raise NotImplementedError("Update many não implementado para relacionamentos.") + + @staticmethod + def _create_base_model(data: Schema, db_data, + related_info_append: List[Dict[str, Any]], + related_info_add: List[Dict[str, Any]], + related_info_extra_columns: List[Dict[str, Any]]) -> Model: + # Inicia com as chaves dos relacionamentos simples (append e add) + exclude_keys = {info["key"] for info in related_info_append + related_info_add + related_info_extra_columns} + + return db_data(**data.model_dump(exclude=exclude_keys)) + + async def _collect_related_items(self, data: Schema, related_info_append: List[Dict[str, Any]]) -> ( + Dict[str, List[Any]]): + related_items_to_append = {} + + for info in related_info_append: + + key = info["key"] + related_model = info["related_model"] + foreign_key_field = info["foreign_key_field"] + related_items_to_append[key] = [] + + related_data = getattr(data, key, []) + + if not related_data: + continue # Pula para a próxima iteração se não houver dados para este campo + + for related_item in related_data: + + related_item_id = related_item[foreign_key_field] if isinstance(related_item, dict) else getattr( + related_item, foreign_key_field) + + related_item_instance = await self.session.get(related_model, related_item_id) + + if not related_item_instance: + raise HTTPException(status_code=400, detail=f"ID {related_item_id} inválido") + related_items_to_append[key].append(related_item_instance) + + return related_items_to_append + + async def process_related_items(self, data: Schema, **kwargs: Any) -> Model: + # Obtendo argumentos adicionais + db_data = kwargs.get('db_data') + related_info_append = kwargs.get('related_info_append', []) + related_info_add = kwargs.get('related_info_add', []) + related_info_extra_columns = [] + # Criação do modelo base + db_model = self._create_base_model(data, db_data, related_info_append, related_info_add, + related_info_extra_columns) + # Processamento de related_info_append, se presente + if related_info_append: + related_items_to_append = await self._collect_related_items(data, related_info_append) + if related_items_to_append: # Verifica se há itens para serem relacionados + self._append_related_items(db_model, related_items_to_append) + # Adiciona o modelo à sessão + self.session.add(db_model) + await self.session.flush() + # Processamento de related_info_add, se presente + if related_info_add: + await self._add_related_items(data, db_model, related_info_add) + + return db_model + + async def process_related_items_with_extra_columns(self, data: Schema, **kwargs: Any) -> Model: + """ + Processa os relacionamentos onde a tabela de associação possui colunas extras. + + Espera-se que em kwargs seja informado: + - db_data: a classe/modelo base a ser instanciado com os dados. + - related_info_extra_columns: uma lista de dicionários com as seguintes chaves: + * key: nome do atributo no schema contendo os dados do relacionamento. + * association_model: o modelo da tabela de associação (com colunas extras). + * base_foreign_key: nome da coluna que referencia o modelo base. + * related_foreign_key: nome da coluna que referencia a entidade relacionada. + * related_model: modelo da entidade relacionada (para validação, por exemplo). + * extra_fields: (opcional) lista de nomes dos campos extras que deverão ser extraídos. + """ + # Cria o modelo base usando os dados enviados no schema + + db_data = kwargs.get('db_data') + related_info_append = [] + related_info_extra_columns = kwargs.get('related_info_extra_columns', []) + related_info_add = kwargs.get('related_info_add', []) + + db_model = self._create_base_model(data, db_data, related_info_append, related_info_add, + related_info_extra_columns, + ) + + self.session.add(db_model) + await self.session.flush() # Garante que db_model possua sua PK definida (ex.: uuid) + # Processa os relacionamentos com colunas extras + related_info_extra = kwargs.get('related_info_extra_columns', []) + + for info in related_info_extra: + key = info.get("key") + association_model = info.get("association_model") + base_foreign_key = info.get("base_foreign_key") + related_foreign_key = info.get("related_foreign_key") + related_model = info.get("related_model") # Para validação do item relacionado + extra_fields = info.get("extra_fields", []) + + # Obtém os dados do relacionamento a partir do schema + related_items = getattr(data, key, []) + + if not related_items: + continue + + for item in related_items: + + # Verifica se o identificador da entidade relacionada está presente no item + if not hasattr(item, related_foreign_key): + raise HTTPException( + status_code=400, + detail=f"O campo '{related_foreign_key}' é obrigatório em '{key}'." + ) + related_item_id = item[related_foreign_key] if isinstance(item, dict) else getattr(item, + related_foreign_key) + + # Valida se o item relacionado existe (usando o modelo relacionado) + if not related_model: + raise HTTPException( + status_code=400, + detail="Não foi definido o modelo relacionado para o relacionamento." + ) + related_instance = await self.session.get(related_model, related_item_id) + + if not related_instance: + raise HTTPException( + status_code=400, + detail=f"ID {related_item_id} inválido para {related_model.__tablename__}" + ) + + # Extrai os valores dos campos extras, se existirem + extra_data = {field: getattr(item, field) for field in extra_fields if hasattr(item, field)} + + # Cria a instância da tabela de associação, populando as FKs e os campos extras + association_instance = association_model(**{ + base_foreign_key: db_model.uuid, + related_foreign_key: related_item_id, + **extra_data + }) + + self.session.add(association_instance) + + # Realiza um flush para persistir os itens da associação antes de continuar + await self.session.flush() + + # Se houver relacionamentos do tipo um-para-muitos (add), processa-os também + related_info_add = kwargs.get('related_info_add', []) + if related_info_add: + await self._add_related_items(data, db_model, related_info_add) + + return db_model + + @staticmethod + def _append_related_items(db_model: Model, related_items_to_append: Dict[str, List[Model]]) -> None: + for key, items in related_items_to_append.items(): + getattr(db_model, key).extend(items) + + async def _add_related_items(self, data: Schema, db_model: Model, related_info_add: List[Dict[str, Any]]) -> None: + + for info in related_info_add: + key = info["key"] + foreign_key = info["foreign_key"] + related_model = info["related_model"] + relations = info.get("relations", []) # Pode ser uma lista de relações + + related_items = getattr(data, key, []) + if not related_items: + continue # Pula para a próxima iteração se não houver dados para este campo + + for related_item in related_items: + for relation in relations: + related_model_fk = relation.get("related_model_fk") + foreign_key_fk = relation.get("foreign_key_fk") + + if related_model_fk and foreign_key_fk: + fk = getattr(related_item, foreign_key_fk) + relacao = await self.session.get(related_model_fk, fk) + if not relacao: + raise HTTPException(status_code=404, + detail=f"{related_model_fk.__name__} com UUID {fk} não encontrado") + + new_item = related_model(**related_item.model_dump(), **{foreign_key: db_model.uuid}) + self.session.add(new_item) + + @staticmethod + async def update_simple_relationship( + db_model: Model, + field: str, + related_items: List[Dict[str, Any]], + related_model: Type[Model], + session: AsyncSession, + foreign_key: str + ) -> None: + current_items = getattr(db_model, field, []) + + # Lista de UUIDs dos itens enviados no JSON + new_item_uuids = {str(item['uuid']) for item in related_items if 'uuid' in item} + + # Remover itens que não estão mais presentes no JSON + for item in current_items: + if str(item.uuid) not in new_item_uuids: + foreign_key_column = getattr(related_model, foreign_key) + await session.execute( + delete(related_model).where(foreign_key_column == literal(db_model.uuid)).where( + related_model.uuid == literal(item.uuid) + ) + ) + await session.flush() + + # Atualizar ou adicionar novos itens + for related_item in related_items: + if 'uuid' in related_item: + item_uuid = str(related_item['uuid']) + db_item = next((item for item in current_items if str(item.uuid) == item_uuid), None) + if db_item: + for k, v in related_item.items(): + setattr(db_item, k, v) + else: + raise NotFoundException(f"Related item {field} with UUID {item_uuid} not found.") + else: + new_item = related_model(**related_item) + setattr(new_item, db_model.__class__.__name__.lower() + "_uuid", db_model.uuid) + session.add(new_item) + current_items.append(new_item) + + setattr(db_model, field, current_items) + await session.flush() + + async def update_many_to_many_relationship( + self, + db_model: Model, + field: str, + related_item_ids: List[Union[str, UUID, Dict[str, Any]]], # Aceita "strings", UUIDs ou dicionários + related_model: Type[Model], # Adicionando o modelo relacionado + coluna: str = "uuid" # Adicionando a coluna para uso dinâmico + ) -> None: + current_items = getattr(db_model, field, []) + current_item_ids = {str(getattr(item, coluna)) for item in current_items} + + # Extrair "IDs" dos dicionários, se necessário + new_item_ids = set() + for item in related_item_ids: + if isinstance(item, dict) and coluna in item: + new_item_ids.add(str(item[coluna])) + else: + new_item_ids.add(str(item)) + + # Remover itens que não estão mais relacionados + items_to_remove = current_item_ids - new_item_ids + for item in current_items: + if str(getattr(item, coluna)) in items_to_remove: + current_items.remove(item) + + # Adicionar novos itens + items_to_add = new_item_ids - current_item_ids + for item_id in items_to_add: + related_item = await self.session.get(related_model, item_id) + if related_item: + current_items.append(related_item) + else: + raise NotFoundException(f"Related item {field} with UUID {item_id} not found.") + + setattr(db_model, field, current_items) + + async def update_many_to_many_extra_relationship( + self, + db_model: Model, + field: str, + new_items: List[Union[Dict[str, Any], Any]], # Aceita dicionários ou objetos com .dict() + association_model: Type[Model], + base_foreign_key: str, + related_foreign_key: str, + extra_fields: List[str] = [], + ) -> None: + """ + Atualiza um relacionamento muitos-para-muitos com colunas extras. + + Parâmetros: + - db_model: objeto base que possui o relacionamento. + - field: nome do atributo em db_model que contém a lista de associações. + - new_items: lista de novos itens para o relacionamento (pode ser dicionário ou objeto com .dict()). + - association_model: classe mapeada da tabela de associação. + - base_foreign_key: nome do atributo da associação que referencia db_model (ex: 'fk_manutencao_uuid'). + - related_foreign_key: nome do atributo da associação que referencia o objeto relacionado (ex: 'fk_itens_equipamentos_uuid'). + - extra_fields: lista de campos extras que também devem ser atualizados ou definidos. + """ + current_assocs = getattr(db_model, field, []) + + if not new_items: + # Se o payload enviar uma lista vazia, remova todas as associações + for assoc in current_assocs: + await self.session.delete(assoc) + setattr(db_model, field, []) + await self.session.flush() + return + + new_assoc_instances = [] + for new_item in new_items: + # Suporta tanto dicionários quanto objetos com método .dict() + new_item_data = new_item if isinstance(new_item, dict) else new_item.dict() + + if related_foreign_key not in new_item_data: + raise HTTPException( + status_code=400, + detail=f"O campo '{related_foreign_key}' é obrigatório em '{field}'." + ) + new_related_id = new_item_data[related_foreign_key] + + # Procura uma associação existente que corresponda ao valor do foreign key + existing_assoc = next( + (assoc for assoc in current_assocs if str(getattr(assoc, related_foreign_key)) == str(new_related_id)), + None + ) + + if existing_assoc: + # Atualiza os campos extras se houver valores informados + for ef in extra_fields: + if ef in new_item_data: + setattr(existing_assoc, ef, new_item_data[ef]) + new_assoc_instances.append(existing_assoc) + else: + # Cria uma nova associação, incluindo os campos extras + extra_data = {ef: new_item_data.get(ef) for ef in extra_fields if ef in new_item_data} + new_assoc = association_model(**{ + base_foreign_key: db_model.uuid, + related_foreign_key: new_related_id, + **extra_data + }) + self.session.add(new_assoc) + await self.session.flush() + new_assoc_instances.append(new_assoc) + + # Mescla as associações atuais com as novas (ou atualizadas) + merged_assocs = list({*current_assocs, *new_assoc_instances}) + setattr(db_model, field, merged_assocs) diff --git a/app/database/RepositoryBase.py b/app/database/RepositoryBase.py new file mode 100644 index 0000000..443e4c2 --- /dev/null +++ b/app/database/RepositoryBase.py @@ -0,0 +1,474 @@ +# Importações de bibliotecas padrão +from uuid import UUID +from typing import Generic, TypeVar, Any, List, Optional, Dict, Union + +# Importações de bibliotecas de terceiros +from sqlalchemy import select, delete, or_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.exc import IntegrityError, SQLAlchemyError +from pydantic import BaseModel +from fastapi import HTTPException, status +from sqlalchemy.orm import joinedload +from sqlalchemy import and_ + +# Importações do seu próprio projeto +from app.database import models +from app.database.audit_log import audit_log +from app.database.TratamentoErros import ErrorHandler + +Model = TypeVar("Model", bound=models.Base) +Schema = TypeVar("Schema", bound=BaseModel) + + +class SnippetException(Exception): + pass + + +class IntegrityConflictException(Exception): + pass + + +class NotFoundException(Exception): + pass + + +class RepositoryBase(Generic[Model]): + """Repository for performing database queries.""" + + def __init__( + self, model: type[Model], + session: AsyncSession, + default_order_by: str = None, + ascending: bool = True) -> None: + self.model = model + self.session = session + self.default_order_by = default_order_by + self.ascending = ascending + + async def create(self, data_one: Schema) -> Model: + + try: + db_model = self.model(**data_one.model_dump()) + self.session.add(db_model) + await self.session.commit() + await self.session.refresh(db_model) + return db_model.__dict__ + + except SQLAlchemyError as e: + handler = ErrorHandler() + handler.handle_error(e) + + # Testado ok + async def create_many(self, data: List[Schema], return_models: bool = False) -> list[Model] | bool: + + # Cria instâncias dos modelos a partir dos dados fornecidos + db_models = [self.model(**d.model_dump()) for d in data] + + try: + self.session.add_all(db_models) + await self.session.commit() + except SQLAlchemyError as e: + handler = ErrorHandler() + handler.handle_error(e) + + if not return_models: + return True + + for m in db_models: + await self.session.refresh(m) + + return db_models + + async def get_one_by_id(self, uuid: str | UUID, coluna: str, with_for_update: bool = False, ) -> Model: + + try: + q = select(self.model).where(getattr(self.model, coluna) == uuid) + + except AttributeError: + raise HTTPException(status_code=400, detail=f"A Coluna {coluna} não existe em: {self.model.__tablename__}.") + + # Verifica se o modelo tem a coluna 'ativo' e adiciona a condição + if hasattr(self.model, 'ativo'): + q = q.where(self.model.ativo.is_(True)) + + if with_for_update: + q = q.with_for_update() + + results = await self.session.execute(q) + result = results.unique().scalar_one_or_none() + + if result is None: + raise HTTPException(status_code=404, + detail=f"Registro com {coluna}={uuid} não encontrado na tabela " + f"{self.model.__tablename__}.") + return result + + async def get_many_by_ids(self, coluna: str, uuids: List[str | UUID] = None, with_for_update: bool = False, + order_by: str = None, ascending: bool = True): + try: + q = select(self.model) + + if uuids: + try: + q = q.where(getattr(self.model, coluna).in_(uuids)) + except AttributeError: + raise HTTPException( + status_code=400, + detail=f"A coluna '{coluna}' não existe em: {self.model.__tablename__}." + ) + # Verifica se o modelo tem a coluna 'ativo' e adiciona a condição + if hasattr(self.model, 'ativo'): + q = q.where(self.model.ativo.is_(True)) + + if with_for_update: + q = q.with_for_update() + + # Verificar se a ordenação foi solicitada e aplicar à consulta + if order_by: + order_by_column = getattr(self.model, order_by, None) + if not order_by_column: + raise HTTPException( + status_code=400, + detail=f"A coluna de ordenação '{order_by}' não foi encontrada na tabela " + f"{self.model.__tablename__}." + ) + q = q.order_by(order_by_column.asc() if ascending else order_by_column.desc()) + + rows = await self.session.execute(q) + + return rows.unique().scalars().all() + + except SQLAlchemyError as e: + handler = ErrorHandler() + handler.handle_error(e) + + async def get_filter( + self, + coluna: str, + uuids: Optional[List[Union[str, UUID]]] = None, + filters: Optional[List[Dict[str, Any]]] = None, + relationships: Optional[List[str]] = None, + order_by: Optional[Union[str, List[str]]] = None, # Aceita str ou List[str] + ascending: Optional[List[bool]] = None + ): + + try: + query = select(self.model) + + # Adicionar relacionamentos com joinedload para otimizar carregamento + if relationships: + for relation in relationships: + try: + relation_attr = getattr(self.model, relation) + query = query.options(joinedload(relation_attr)) + except AttributeError: + raise ValueError( + f"Relacionamento '{relation}' não encontrado no modelo '{self.model.__name__}'.") + + # Aplicar filtros dinâmicos com suporte a múltiplos níveis de relacionamento + if filters: + # Inicializamos um conjunto para controlar os joins já visitados + visited_joins = set() + + # Acumular condições separadas por tipo lógico + and_conditions = [] + or_conditions = [] + + for condition in filters: # Iteramos diretamente sobre a lista de condições + + column_path = getattr(condition, "column") # Acessamos diretamente os atributos + operator = getattr(condition, "operator", "==") # Operador padrão é igualdade + value = getattr(condition, "value") + logical = getattr(condition, "logical", "AND") # Operador lógico padrão é AND + + path_parts = column_path.split(".") + current_model = self.model + + for i, part in enumerate(path_parts[:-1]): # Navegar pelos relacionamentos no caminho + if part not in visited_joins: # Verificar se o relacionamento já foi adicionado + # Obtemos o relacionamento usando o modelo atual + try: + related_table = getattr(current_model, part).property.mapper.class_ + except AttributeError as e: + raise ValueError( + f"Relacionamento '{part}' não encontrado em '{current_model.__name__}'." + f"Erro: {str(e)}" + ) + + # Adicionamos o relacionamento à query com um join + query = query.join(related_table, isouter=True) + + # Registramos o relacionamento no conjunto para evitar duplicação + visited_joins.add(part) + else: + # Atualizar o modelo mesmo que o join seja pulado + try: + related_table = getattr(current_model, part).property.mapper.class_ + except AttributeError as e: + raise ValueError( + f"Relacionamento '{part}' não encontrado em '{current_model.__name__}'." + f"Erro: {str(e)}" + ) + + # Atualizamos o modelo atual para o próximo relacionamento + current_model = related_table + + # Obtém a coluna final no caminho para aplicar o filtro + try: + final_column = getattr(current_model, path_parts[-1]) + except AttributeError as e: + raise ValueError( + f"Coluna '{path_parts[-1]}' não encontrada em '{current_model.__name__}'." + f"Erro: {str(e)}" + ) + + # Mapear operadores para SQLAlchemy + operator_mapping = { + "==": final_column == value, + "!=": final_column != value, + ">": final_column > value, + "<": final_column < value, + ">=": final_column >= value, + "<=": final_column <= value, + "IN": final_column.in_(value) if isinstance(value, list) else final_column == value, + "NOT IN": final_column.notin_(value) if isinstance(value, list) else final_column != value, + "LIKE": final_column.like(f"%{value}%"), + "ILIKE": final_column.ilike(f"%{value}%"), # Apenas para PostgreSQL + "IS NULL": final_column.is_(None), + "IS NOT NULL": final_column.isnot(None), + "BETWEEN": final_column.between(value[0], value[1]) if isinstance(value, list) and len( + value) == 2 else None, + } + + # Adiciona a condição à lista apropriada (AND ou OR) + if operator in operator_mapping: + condition_expression = operator_mapping[operator] + if logical.upper() == "AND": + and_conditions.append(condition_expression) + elif logical.upper() == "OR": + or_conditions.append(condition_expression) + else: + raise ValueError(f"Operador '{operator}' não suportado.") + + # Aplicar condições acumuladas na query + if and_conditions: + query = query.filter(and_(*and_conditions)) # Aplica todos os AND combinados + if or_conditions: + query = query.filter(or_(*or_conditions)) # Aplica todos os OR combinados + + # Filtrar por IDs + if uuids: + query = query.where(getattr(self.model, coluna).in_(uuids)) + + # Verifica se o modelo tem a coluna 'ativo' e adiciona a condição + if hasattr(self.model, 'ativo'): + query = query.where(self.model.ativo.is_(True)) + + # Ordenação + # Resolução de colunas no contexto do modelo e seus relacionamentos + if order_by: + if ascending is None: + ascending = [True] * len(order_by) # Define `True` para todas as colunas por padrão + + if filters: # Caso existam filtros, usamos o formato atual + for i, order_col in enumerate(order_by): + path_parts = order_col.split(".") + column = self.model + + # Percorrer os relacionamentos + for part in path_parts[:-1]: + column = getattr(column, part).property.mapper.class_ + + # Resgatar a coluna final + final_column = getattr(column, path_parts[-1], None) + if final_column is None: + raise ValueError(f"Coluna de ordenação '{order_col}' não encontrada.") + + # Adicionar a ordenação na consulta + query = query.order_by( + final_column.asc() if ascending[i] else final_column.desc() + ) + else: # Caso não existam filtros, usamos o formato simples + order_by_column = getattr(self.model, order_by, None) + if not order_by_column: + raise ValueError( + f"A coluna de ordenação '{order_by}' não foi encontrada na tabela " + f"{self.model.__tablename__}." + ) + query = query.order_by( + order_by_column.asc() if ascending else order_by_column.desc() + ) + + # Executar a consulta + result = await self.session.execute(query) + return result.scalars().all() + + except SQLAlchemyError as e: + # Lidar com erros do SQLAlchemy + http_exception = HTTPException( + status_code=400, + detail=f"Erro interno do servidor ao acessar o banco de dados: {str(e)}" + ) + + raise http_exception + + @audit_log + async def update_by_id(self, update: Schema, coluna: str) -> dict: + uuid = str(update.uuid) + db_model = await self.get_one_by_id(uuid, coluna, with_for_update=True) + + if not db_model: + raise HTTPException(status_code=404, + detail=f"{self.model.__tablename__}{coluna}={uuid} não encontrada.") + + # Guardar o estado atual antes da modificação + original_model = db_model.__dict__.copy() + + values = update.model_dump(exclude_unset=True) + for k, v in values.items(): + setattr(db_model, k, v) + + try: + + return {"db_models": db_model, "original_models": original_model, "operation": "UPDATE"} + except IntegrityError: + raise IntegrityConflictException( + f"{self.model.__tablename__} {coluna}={uuid} conflito com dados existentes." + ) + + @audit_log + async def update_many_by_ids(self, updates: List[Schema], coluna: str, return_models: bool = False) -> dict: + uuids = [str(update.uuid) for update in updates] + db_models = await self.get_many_by_ids(coluna, uuids, with_for_update=True) + + if not db_models: + raise HTTPException(status_code=404, + detail=f"{self.model.__tablename__} {coluna}={uuids} não encontrada.") + try: + # Capturar o estado original dos modelos antes da modificação + original_models = [db_model.__dict__.copy() for db_model in db_models] + + # Aplicar as atualizações + for db_model in db_models: + update_data = next((item for item in updates if item.uuid == getattr(db_model, coluna)), None) + if update_data: + values = update_data.model_dump(exclude_unset=True) + for k, v in values.items(): + setattr(db_model, k, v) + + # Retornar os modelos atualizados e os originais para o audit_log + return { + "db_models": db_models, # Lista de modelos que foram modificados + "original_models": original_models, # Lista de estados originais + "operation": "UPDATE" + } + except SQLAlchemyError as e: + handler = ErrorHandler() + handler.handle_error(e) + + @audit_log + async def remove_by_id(self, uuid: str | UUID, coluna: str) -> dict: + if not uuid: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, + detail="Não foi informado nenhum UUID") + + # Tentar buscar o objeto antes de removê-lo para auditoria + db_model = await self.get_one_by_id(uuid, coluna) + if not db_model: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, + detail=f"{self.model.__tablename__} com {coluna}={uuid} não encontrado.") + + try: + query = delete(self.model).where(getattr(self.model, coluna) == uuid) + except AttributeError: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, + detail=f"A Coluna {coluna} não existe em: {self.model.__tablename__}.") + + try: + rows = await self.session.execute(query) + await self.session.flush() # Confirma a exclusão, mas não comita ainda + + if rows.rowcount is None or rows.rowcount == 0: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, ) + + # Retorna o modelo deletado e o rowcount + return {"db_models": db_model, "rowcount": rows.rowcount, "operation": "DELETE"} + except SQLAlchemyError as e: + handler = ErrorHandler() + handler.handle_error(e) + + @audit_log + async def remove_many_by_ids(self, uuids: List[str | UUID], coluna: str) -> dict: + if not uuids: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, + detail=f"Não foi informando nenhum uuid") + + # Obter os modelos antes de deletá-los para fins de auditoria + db_models = await self.get_many_by_ids(coluna, uuids) + + if not db_models: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, + detail=f"{self.model.__tablename__} {coluna}={uuids} não encontrada.") + + try: + query = delete(self.model).where(getattr(self.model, coluna).in_(uuids)) + rows = await self.session.execute(query) + except IntegrityError: + await self.session.rollback() + raise IntegrityConflictException( + f"Erro ao deletar registros em {self.model.__tablename__}." + ) + + if rows.rowcount is None or rows.rowcount == 0: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + + # Retornar os modelos deletados e a contagem de linhas para o audit_log + return { + "db_models": db_models, # Modelos que foram deletados + "operation": "DELETE", # Especifica que é uma operação de deleção + "rowcount": rows.rowcount # Número de registros deletados + } + + async def remove_by_column(self, column_name: str, value: str | UUID) -> dict: + """ + Remove um registro com base no nome da coluna e no valor correspondente. + """ + try: + # Verificar se a coluna existe no modelo + if not hasattr(self.model, column_name): + raise HTTPException( + status_code=404, + detail=f"A coluna {column_name} não existe em {self.model.__tablename__}." + ) + + # Executar a exclusão + query = delete(self.model).where(getattr(self.model, column_name) == value) + rows = await self.session.execute(query) + await self.session.flush() + + if rows.rowcount is None or rows.rowcount == 0: + raise HTTPException( + status_code=404, + detail=f"Nenhum registro encontrado em {self.model.__tablename__} com {column_name}={value}." + ) + + return {"rowcount": rows.rowcount, "operation": "DELETE", "column": column_name, "value": value} + + except SQLAlchemyError as e: + handler = ErrorHandler() + handler.handle_error(e) + + async def ativar_registro(self, update: Schema, coluna: str) -> dict: + """ + Ativa um registro atualizando o campo 'ativo' para True. + """ + update.ativo = True + return await self.update_by_id(update, coluna) + + async def desativar_registro(self, update: Schema, coluna: str) -> dict: + """ + Desativa um registro atualizando o campo 'ativo' para False. + """ + + update.ativo = False + return await self.update_by_id(update, coluna) diff --git a/app/database/TratamentoErros.py b/app/database/TratamentoErros.py new file mode 100644 index 0000000..f1c39e2 --- /dev/null +++ b/app/database/TratamentoErros.py @@ -0,0 +1,109 @@ +from fastapi import HTTPException, status +from sqlalchemy.exc import IntegrityError, DataError, OperationalError, TimeoutError +from sqlalchemy.orm.exc import NoResultFound, StaleDataError + + +class ErrorHandler: + + def handle_error(self, exception): + """ + Método principal para tratar exceções do SQLAlchemy e gerar respostas apropriadas. + """ + if isinstance(exception, IntegrityError): + return self.handle_integrity_error(exception) + elif isinstance(exception, DataError): + return self.handle_data_error(exception) + elif isinstance(exception, OperationalError): + return self.handle_operational_error(exception) + elif isinstance(exception, TimeoutError): + return self.handle_timeout_error(exception) + elif isinstance(exception, StaleDataError): + return self.handle_concurrency_error(exception) + elif isinstance(exception, NoResultFound): + return self.handle_no_result_found(exception) + else: + return self.handle_generic_error(exception) + + @staticmethod + def handle_integrity_error(exception): + """ + Trata erros de integridade, como violações de chaves únicas ou campos not-null. + """ + if 'not-null constraint' in str(exception.orig): + column_name = str(exception.orig).split('column "')[1].split('"')[0] + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"O campo '{column_name}' não pode ser nulo. Por favor, forneça um valor válido." + ) + elif 'unique constraint' in str(exception.orig): + column_name = str(exception.orig).split('constraint "')[1].split('"')[0] + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Violação de unicidade: O valor do campo '{column_name}' " + f"já está em uso. Por favor, use um valor único." + ) + else: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Erro de integridade no banco de dados." + ) + + @staticmethod + def handle_data_error(exception): + """ + Trata erros de dados, como formatação ou valores fora dos limites. + """ + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Erro de dados: {str(exception.orig)}" + ) + + @staticmethod + def handle_operational_error(exception): + """ + Trata erros de conexão ou operacionais com o banco de dados. + """ + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Erro de conexão com o banco de dados. Por favor, tente novamente mais tarde." + ) + + @staticmethod + def handle_timeout_error(exception): + """ + Trata erros de timeout em transações com o banco de dados. + """ + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, + detail="Ocorreu um timeout durante a operação. Por favor, tente novamente." + ) + + @staticmethod + def handle_concurrency_error(exception): + """ + Trata erros de concorrência quando há múltiplas transações tentando modificar o mesmo dado. + """ + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Erro de concorrência. O dado foi modificado por outra transação." + ) + + @staticmethod + def handle_no_result_found(exception): + """ + Trata erros de busca sem resultado no banco de dados. + """ + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Nenhum resultado encontrado." + ) + + @staticmethod + def handle_generic_error(exception): + """ + Trata erros genéricos de SQLAlchemy e gera uma resposta padrão de erro. + """ + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Ocorreu um erro inesperado no banco de dados. Por favor, tente novamente mais tarde." + ) diff --git a/app/database/__init__.py b/app/database/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/database/audit_log.py b/app/database/audit_log.py new file mode 100644 index 0000000..cf8c316 --- /dev/null +++ b/app/database/audit_log.py @@ -0,0 +1,468 @@ +from sqlalchemy import inspect +from app.database.models import HistoricoAlteracoes, HistoricoDelete, HistoricoUpdate + +from datetime import datetime, timezone + + +# def audit_log(func): +# async def wrapper(*args, **kwargs): +# self = args[0] # O primeiro argumento será 'self', que é a instância do repositório +# session = self.session +# +# # Verificar se o modelo está habilitado para auditoria +# if not getattr(self.model, 'log_auditoria_habilitado', False): +# # Executa a função original sem auditoria +# result = await func(*args, **kwargs) +# +# # Realiza o commit e refresh +# await session.commit() +# +# # Verificar se é uma operação de UPDATE ou DELETE +# db_models = result.get("db_models") +# operation = result.get("operation") +# rowcount = result.get("rowcount") +# +# # Atualiza o db_model com refresh em caso de UPDATE +# if operation == "UPDATE" and db_models: +# for db_model in db_models: +# await session.refresh(db_model) +# +# # Retorna o que a rota espera: o db_model para update ou rowcount para delete +# if operation == "UPDATE": +# if len(db_models) == 1: +# return db_models[0].__dict__ # Retorna o único modelo como dicionário +# else: +# return db_models # Retorna a lista de modelos +# else: +# return rowcount # Retorna rowcount no caso de DELETE +# +# # Caso a auditoria esteja habilitada, continuar com o processo de auditoria +# # Tenta executar a função original e captura o retorno +# result = await func(*args, **kwargs) +# +# # Captura o db_model e a operação realizada +# db_models = result.get("db_models") +# original_models = result.get("original_models") +# operation = result.get("operation") +# rowcount = result.get("rowcount") +# +# # Para operações de UPDATE +# log_entries = [] +# if operation == "UPDATE" and db_models: +# timestamp = datetime.now(timezone.utc).replace(tzinfo=None) +# +# # Iterar pelos modelos atualizados +# for db_model, original_model in zip(db_models, original_models): +# inspector = inspect(db_model) +# primary_key = str(inspect(db_model).identity[0]) +# +# # Verificar alterações em cada atributo +# for attr in inspector.attrs: +# old_value = original_model.get(attr.key, None) +# new_value = getattr(db_model, attr.key, None) +# +# # Confirmar se houve mudança nos valores +# if old_value != new_value: +# log_entry = Historico_Alteracoes( +# tabela=db_model.__tablename__, +# coluna=attr.key, +# valor_antigo=str(old_value) if old_value else None, +# valor_novo=str(new_value) if new_value else None, +# data_modificacao=timestamp, +# action='UPDATE', +# usuario_id=kwargs.get('user_id'), +# registro_id=primary_key +# ) +# log_entries.append(log_entry) +# +# # Verificar se é uma operação de DELETE +# elif operation == "DELETE": +# timestamp_naive = datetime.now(timezone.utc).replace(tzinfo=None) +# +# if isinstance(db_models, list): # Caso seja uma lista de modelos deletados (delete_many) +# for db_model in db_models: +# primary_key = str(inspect(db_model).identity[0]) +# deletado = {attr: value for attr, value in db_model.__dict__.items() +# if not isinstance(value, list) and not attr.startswith('_')} +# +# json_deletado = json.dumps(deletado, default=str) +# +# log_entry = Historico_Alteracoes( +# tabela=db_model.__tablename__, +# coluna='N/A', # Especificar que esta é uma operação de DELETE +# valor_antigo=str(deletado), # Salvar o estado completo do modelo deletado +# valor_novo=None, # Valor novo é None porque o registro foi deletado +# data_modificacao=timestamp_naive, +# action='DELETE', +# usuario_id=kwargs.get('user_id'), +# registro_id=primary_key +# ) +# log_entries.append(log_entry) +# +# else: # Caso seja apenas um modelo (delete_one) +# primary_key = str(inspect(db_models).identity[0]) +# deletado = {attr: value for attr, value in db_models.__dict__.items() +# if not isinstance(value, list) and not attr.startswith('_')} +# +# log_entry = Historico_Alteracoes( +# tabela=db_models.__tablename__, +# coluna='N/A', # Especificar que esta é uma operação de DELETE +# valor_antigo=str(deletado), # Salvar o estado completo do modelo deletado +# valor_novo=None, # Valor novo é None porque o registro foi deletado +# data_modificacao=timestamp_naive, +# action='DELETE', +# usuario_id=kwargs.get('user_id'), +# registro_id=primary_key +# ) +# log_entries.append(log_entry) +# +# # Se houver log_entries a serem salvas (no caso de UPDATE ou DELETE) +# if log_entries: +# session.add_all(log_entries) +# +# # Realizar o commit no final, para ambos UPDATE e DELETE +# await session.commit() +# +# # Se for um update, atualiza o db_model com refresh +# if operation == "UPDATE" and db_models: +# for db_model in db_models: +# await session.refresh(db_model) +# +# # Retorna o que a rota espera: o db_model para update ou rowcount para delete +# if operation == "UPDATE": +# if len(db_models) == 1: +# return db_models[0].__dict__ # Retorna o único modelo como dicionário +# else: +# return db_models # Retorna a lista de modelos +# else: +# return rowcount # Retorna rowcount no caso de DELETE +# +# return wrapper + +# def audit_log(func): +# async def wrapper(*args, **kwargs): +# self = args[0] # O primeiro argumento será 'self', que é a instância do repositório +# session = self.session +# +# # Verificar se o modelo está habilitado para auditoria +# if not getattr(self.model, 'log_auditoria_habilitado', False): +# # Executa a função original sem auditoria +# result = await func(*args, **kwargs) +# +# # Realiza o commit e refresh +# await session.commit() +# +# # Verificar se é uma operação de UPDATE ou DELETE +# db_models = result.get("db_models") +# operation = result.get("operation") +# rowcount = result.get("rowcount") +# +# # Atualiza o db_model com refresh em caso de UPDATE +# if operation == "UPDATE" and db_models: +# for db_model in db_models: +# await session.refresh(db_model) +# print("refreshing relaizado") +# +# # Retorna o que a rota espera: o db_model para update ou rowcount para delete +# if operation == "UPDATE": +# if len(db_models) == 1: +# print(db_models[0].__dict__) +# return db_models[0].__dict__ # Retorna o único modelo como dicionário +# else: +# return db_models # Retorna a lista de modelos +# else: +# return rowcount # Retorna rowcount no caso de DELETE +# +# # Caso a auditoria esteja habilitada, continuar com o processo de auditoria +# # Tenta executar a função original e captura o retorno +# result = await func(*args, **kwargs) +# +# # Captura o db_model e a operação realizada +# db_models = result.get("db_models") +# original_models = result.get("original_models") +# operation = result.get("operation") +# rowcount = result.get("rowcount") +# +# # Para operações de UPDATE +# log_entries = [] +# timestamp = datetime.now(timezone.utc).replace(tzinfo=None) +# if operation == "UPDATE" and db_models: +# # timestamp = datetime.now(timezone.utc).replace(tzinfo=None) +# +# # Iterar pelos modelos atualizados +# for db_model, original_model in zip(db_models, original_models): +# inspector = inspect(db_model) +# primary_key = str(inspect(db_model).identity[0]) +# +# # Registro na tabela HistoricoAlteracoes +# log_entry_update = HistoricoAlteracoes( +# tabela=db_model.__tablename__, +# data_modificacao=timestamp, +# action='UPDATE', +# usuario_id=kwargs.get('user_id'), +# registro_id=primary_key +# ) +# session.add(log_entry_update) +# +# # Verificar alterações em cada atributo +# for attr in inspector.attrs: +# old_value = original_model.get(attr.key, None) +# new_value = getattr(db_model, attr.key, None) +# +# # Confirmar se houve mudança nos valores +# if old_value != new_value: +# log_update = HistoricoUpdate( +# coluna=attr.key, +# valor_antigo=str(old_value) if old_value else None, +# valor_novo=str(new_value) if new_value else None, +# alteracao=log_entry_update # Relacionando ao log de alteração principal +# ) +# session.add(log_update) +# +# # Verificar se é uma operação de DELETE +# elif operation == "DELETE": +# _timestamp_naive = datetime.now(timezone.utc).replace(tzinfo=None) +# +# if isinstance(db_models, list): # Caso seja uma lista de modelos deletados (delete_many) +# for db_model in db_models: +# primary_key = str(inspect(db_model).identity[0]) +# +# # Filtra os campos que não são relationship +# +# def is_relationship(attr_name, model): +# """ +# Função que verifica se um atributo é do tipo relacionamento no SQLAlchemy +# """ +# # Inspeciona o modelo SQLAlchemy +# mapper = inspect(model.__class__) +# +# # Acessa todas as relationships do modelo +# relationships = mapper.relationships +# +# # Verifica se o atributo atual é uma relationship +# return attr_name in relationships +# +# deletado = {attr: value for attr, value in db_model.__dict__.items() +# if +# not isinstance(value, list) and not attr.startswith('_') and not +# is_relationship(attr, db_model)} +# +# # Registro na tabela HistoricoAlteracoes +# log_entry_delete = HistoricoAlteracoes( +# tabela=db_model.__tablename__, +# data_modificacao=timestamp, +# action='DELETE', +# usuario_id=kwargs.get('user_id'), +# registro_id=primary_key +# ) +# session.add(log_entry_delete) +# +# log_delete = HistoricoDelete( +# registro_deletado=str(deletado), # Serializar o registro deletado +# alteracao=log_entry_delete # Relacionando ao log de alteração principal +# ) +# session.add(log_delete) +# +# else: # Caso seja apenas um modelo (delete_one) +# primary_key = str(inspect(db_models).identity[0]) +# +# def is_relationship(attr_name, model): +# """ +# Função que verifica se um atributo é do tipo relacionamento no SQLAlchemy +# """ +# # Inspeciona o modelo SQLAlchemy +# mapper = inspect(model.__class__) +# +# # Acessa todas as relationships do modelo +# relationships = mapper.relationships +# +# # Verifica se o atributo atual é uma relationship +# return attr_name in relationships +# +# # Filtra os campos que não são relationship +# deletado = {attr: value for attr, value in db_models.__dict__.items() +# if not isinstance(value, list) and not attr.startswith('_') and not +# is_relationship(attr, db_models)} +# +# # Registro na tabela HistoricoAlteracoes +# log_entry_delete = HistoricoAlteracoes( +# tabela=db_models.__tablename__, +# data_modificacao=timestamp, +# action='DELETE', +# usuario_id=kwargs.get('user_id'), +# registro_id=primary_key +# ) +# session.add(log_entry_delete) +# +# log_delete = HistoricoDelete( +# registro_deletado=str(deletado), # Serializar o registro deletado +# alteracao=log_entry_delete # Relacionando ao log de alteração principal +# ) +# session.add(log_delete) +# +# # Se houver log_entries a serem salvas (no caso de UPDATE ou DELETE) +# if log_entries: +# session.add_all(log_entries) +# +# # Realizar o commit no final, para ambos UPDATE e DELETE +# await session.commit() +# +# # Se for um update, atualiza o db_model com refresh +# if operation == "UPDATE" and db_models: +# for db_model in db_models: +# await session.refresh(db_model) +# +# # Retorna o que a rota espera: o db_model para update ou rowcount para delete +# if operation == "UPDATE": +# if len(db_models) == 1: +# return db_models[0].__dict__ # Retorna o único modelo como dicionário +# else: +# return db_models # Retorna a lista de modelos +# else: +# return rowcount # Retorna rowcount no caso de DELETE +# +# return wrapper + +def audit_log(func): + async def wrapper(*args, **kwargs): + self = args[0] # 'self' é a instância do repositório + session = self.session + + # Se auditoria não estiver habilitada, processa sem auditoria + if not getattr(self.model, 'log_auditoria_habilitado', False): + result = await func(*args, **kwargs) + await session.commit() + + db_models = result.get("db_models") + operation = result.get("operation") + rowcount = result.get("rowcount") + + if operation == "UPDATE" and db_models: + if isinstance(db_models, list): + for db_model in db_models: + await session.refresh(db_model) + print("refreshing realizado") + else: + await session.refresh(db_models) + # Retorna exatamente o que a função original produziu + if operation == "UPDATE": + return db_models + else: + return rowcount + + # Auditoria habilitada: chama a função original e captura o retorno + result = await func(*args, **kwargs) + db_models = result.get("db_models") + original_models = result.get("original_models") + operation = result.get("operation") + rowcount = result.get("rowcount") + + # Variável de controle: se o retorno for um objeto único, single_update será True. + single_update = False + original_db_model = None + if not isinstance(db_models, list): + single_update = True + original_db_model = db_models # Guarda o objeto único original + db_models = [db_models] # Encapsula para processamento da auditoria + original_models = [original_models] + + # Processamento da auditoria para UPDATE + if operation == "UPDATE" and db_models: + timestamp = datetime.now(timezone.utc).replace(tzinfo=None) + for db_model, original_model in zip(db_models, original_models): + inspector = inspect(db_model) + primary_key = str(inspect(db_model).identity[0]) + log_entry_update = HistoricoAlteracoes( + tabela=db_model.__tablename__, + data_modificacao=timestamp, + action='UPDATE', + usuario_id=kwargs.get('user_id'), + registro_id=primary_key + ) + session.add(log_entry_update) + + # Itera pelos atributos mapeados e registra alterações + for attr in inspector.attrs: + old_value = original_model.get(attr.key, None) + new_value = getattr(db_model, attr.key, None) + if old_value != new_value: + log_update = HistoricoUpdate( + coluna=attr.key, + valor_antigo=str(old_value) if old_value is not None else None, + valor_novo=str(new_value) if new_value is not None else None, + alteracao=log_entry_update + ) + session.add(log_update) + + # Processamento da auditoria para DELETE + elif operation == "DELETE": + _timestamp_naive = datetime.now(timezone.utc).replace(tzinfo=None) + if isinstance(db_models, list): # Caso seja delete_many + for db_model in db_models: + primary_key = str(inspect(db_model).identity[0]) + + def is_relationship(attr_name, model): + mapper = inspect(model.__class__) + return attr_name in mapper.relationships + + deletado = { + attr: value + for attr, value in db_model.__dict__.items() + if + not isinstance(value, list) and not attr.startswith('_') and not is_relationship(attr, db_model) + } + log_entry_delete = HistoricoAlteracoes( + tabela=db_model.__tablename__, + data_modificacao=_timestamp_naive, + action='DELETE', + usuario_id=kwargs.get('user_id'), + registro_id=primary_key + ) + session.add(log_entry_delete) + log_delete = HistoricoDelete( + registro_deletado=str(deletado), + alteracao=log_entry_delete + ) + session.add(log_delete) + else: # Caso delete_one + primary_key = str(inspect(db_models).identity[0]) + + def is_relationship(attr_name, model): + mapper = inspect(model.__class__) + return attr_name in mapper.relationships + + deletado = { + attr: value + for attr, value in db_models.__dict__.items() + if not isinstance(value, list) and not attr.startswith('_') and not is_relationship(attr, db_models) + } + log_entry_delete = HistoricoAlteracoes( + tabela=db_models.__tablename__, + data_modificacao=_timestamp_naive, + action='DELETE', + usuario_id=kwargs.get('user_id'), + registro_id=primary_key + ) + session.add(log_entry_delete) + log_delete = HistoricoDelete( + registro_deletado=str(deletado), + alteracao=log_entry_delete + ) + session.add(log_delete) + + # Realiza o commit final após registrar a auditoria + await session.commit() + + # Para operações de UPDATE, faz refresh dos objetos + if operation == "UPDATE" and db_models: + for db_model in db_models: + await session.refresh(db_model) + + # Retorno final: se for update e se for update_by_id (single_update=True), + # retorna o objeto único; caso contrário, retorna a lista, mantendo o contrato original. + if operation == "UPDATE": + return original_db_model if single_update else db_models + else: + return rowcount + + return wrapper diff --git a/app/database/formatar_retorno_bd.py b/app/database/formatar_retorno_bd.py new file mode 100644 index 0000000..0ece0ec --- /dev/null +++ b/app/database/formatar_retorno_bd.py @@ -0,0 +1,76 @@ +def format_itens_equipamentos(data): + """ + Formata os dados dos itens de equipamentos em um formato específico. + Retorna o formato: + [ + { + "equipamento_nome": "string", + "tipo_equipamento_nome": "string", + "setor_nome": "string", + "uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "itens": [ + { + "itens_equipamentos_ns": "string", + "itens_equipamentos_patrimonio": "string", + "itens_equipamentos_data_compra": "2024-11-15", + "itens_equipamentos_prazo_garantia": "2024-11-15", + "itens_equipamentos_voltagem": "0", + "itens_equipamentos_valor_aquisicao": 1, + "itens_equipamentos_rfid_uid": "stringst", + "uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "itens_equipamentos_manutencao": true/false, + } + ] + } + ] + """ + formatted_response = [] + + # Mapeia os dados em uma estrutura agrupada + for item in data: + equipamento = item.relacao_equipamento + tipo_equipamento = equipamento.relacao_tipo_equipamento + setor = tipo_equipamento.relacao_setor + + # Procura se já existe uma entrada para o equipamento + equipamento_entry = next( + (entry for entry in formatted_response if entry["equipamento_uuid"] == equipamento.uuid), + None + ) + + # Cria uma nova entrada caso não exista + if not equipamento_entry: + equipamento_entry = { + "equipamento_nome": equipamento.equipamento_nome, + "equipamento_uuid": equipamento.uuid, + "equipamento_fk_tipo_equipamento": equipamento.fk_tipo_equipamento_uuid, + "tipo_equipamento_nome": tipo_equipamento.tipo_equipamento_nome, + "tipo_equipamento_uuid": tipo_equipamento.uuid, + "tipo_equipamento_fk_setor": tipo_equipamento.fk_setor_uuid, + "setor_nome": setor.setor_nome, + "setor_uuid": setor.uuid, + "itens": [] + } + formatted_response.append(equipamento_entry) + + # Adiciona o item à lista de itens do equipamento + equipamento_entry["itens"].append({ + "itens_equipamentos_ns": item.itens_equipamentos_ns, + "itens_equipamentos_patrimonio": item.itens_equipamentos_patrimonio, + "itens_equipamentos_data_compra": item.itens_equipamentos_data_compra, + "itens_equipamentos_prazo_garantia": item.itens_equipamentos_prazo_garantia, + "itens_equipamentos_voltagem": item.itens_equipamentos_voltagem, + "itens_equipamentos_valor_aquisicao": item.itens_equipamentos_valor_aquisicao, + "itens_equipamentos_rfid_uid": item.itens_equipamentos_rfid_uid, + "itens_equipamentos_manutencao": item.itens_equipamentos_manutencao, + "uuid": item.uuid + }) + + return formatted_response + + +# Dicionário de mapeamento para os formatadores +formatters_map = { + "itens_equipamentos": format_itens_equipamentos, + # Adicione outros formatadores aqui conforme necessário +} diff --git a/app/database/models.py b/app/database/models.py new file mode 100644 index 0000000..022538a --- /dev/null +++ b/app/database/models.py @@ -0,0 +1,655 @@ +# Importações de bibliotecas padrão +from __future__ import annotations +import enum +from datetime import datetime +from typing import List, Annotated +from uuid import UUID as UuidType +# from fastapi_users.db import SQLAlchemyBaseUserTable +from fastapi_users_db_sqlalchemy import SQLAlchemyBaseUserTableUUID + +# Importações de bibliotecas de terceiros +from sqlalchemy import Column, String, Boolean, ForeignKey, Table, Integer, Date, Text, Numeric, Enum +from sqlalchemy import DateTime, FetchedValue, func +from sqlalchemy.dialects.postgresql import UUID as UuidColumn +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.orm import relationship, Mapped, mapped_column, with_polymorphic +from sqlalchemy.sql.expression import FunctionElement +from sqlalchemy.sql import expression +from uuid6 import uuid7 + +# from sqlalchemy.ext.asyncio import AsyncAttrs + +# Importações do seu próprio projeto +from .session import Base + +# EXTENSIONS = ["uuid-ossp", "postgis", "postgis_topology"] +# +# naming_convention = { +# "ix": "ix_ct_%(table_name)s_%(column_0_N_name)s", +# "uq": "uq_ct_%(table_name)s_%(column_0_N_name)s", +# "ck": "ck_ct_%(table_name)s_%(constraint_name)s", +# "fk": "fk_ct_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", +# "pk": "pk_ct_%(table_name)s", +# } +# +# +# class Base(DeclarativeBase, AsyncAttrs): +# metadata = MetaData(naming_convention=naming_convention, +# # Multi Tennat +# schema="tenant" +# ) + + +uuid = Annotated[UuidType, mapped_column(primary_key=True)] +str1 = Annotated[str, mapped_column(String(1), nullable=True)] +str2 = Annotated[str, mapped_column(String(2), nullable=True)] +str8 = Annotated[str, mapped_column(String(8), nullable=True)] +str10 = Annotated[str, mapped_column(String(10), nullable=True)] +str10_not_null = Annotated[str, mapped_column(String(10), nullable=False)] +str11 = Annotated[str, mapped_column(String(11), nullable=True)] +str14 = Annotated[str, mapped_column(String(14), nullable=True)] +str20 = Annotated[str, mapped_column(String(20), nullable=True)] +str30_not_null = Annotated[str, mapped_column(String(30), nullable=False)] +str39_uid = Annotated[str, mapped_column(String(39), nullable=True)] +str36_uuid = Annotated[str, mapped_column(String(36), nullable=False)] +str36_uuid_null = Annotated[str, mapped_column(String(36), nullable=True)] +str50 = Annotated[str, mapped_column(String(50), nullable=True)] +str50_null = Annotated[str, mapped_column(String(50), nullable=False)] +str100 = Annotated[str, mapped_column(String(100), nullable=True)] +str150 = Annotated[str, mapped_column(String(150), nullable=True)] +str200 = Annotated[str, mapped_column(String(200), nullable=True)] +url = Annotated[str, mapped_column(String(2083), nullable=True)] +intpk = Annotated[int, mapped_column(primary_key=True, index=True)] +valor_monetario = Annotated[float, mapped_column(Numeric(precision=10, scale=2), nullable=True)] +valor_monetario_not_null = Annotated[float, mapped_column(Numeric(precision=10, scale=2), nullable=False)] +data = Annotated[Date, mapped_column(Date, nullable=True)] +data_null = Annotated[Date, mapped_column(Date, nullable=True)] +data_idx = Annotated[Date, mapped_column(Date, index=True)] +text = Annotated[Text, mapped_column(Text)] +text_null = Annotated[Text, mapped_column(Text, nullable=True)] +boleano = Annotated[Boolean, mapped_column(Boolean, default=True)] +boleano_false = Annotated[Boolean, mapped_column(Boolean, server_default=expression.false())] + + +# ------------------------------------------------------INICIO MIXIN---------------------------------------------------- +class utcnow(FunctionElement): + type = DateTime() + inherit_cache = True + + +@compiles(utcnow, "postgresql") +def pg_utcnow(element, compiler, **kw): + return "TIMEZONE('utc', CURRENT_TIMESTAMP)" + + +class UuidMixin: + uuid: Mapped[UuidType] = mapped_column( + "uuid", + UuidColumn(as_uuid=True), + primary_key=True, + default=uuid7, + nullable=False, + ) + + +class IdMixin: + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True, nullable=False) + + +class AtivoMixin: + ativo = Column(Boolean, nullable=False, server_default=expression.true(), index=True) + data_ativacao: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=utcnow(), + ) + data_desativacao: Mapped[datetime] = mapped_column( + DateTime, + nullable=True, + index=True, + onupdate=func.now(), + server_default=utcnow(), + server_onupdate=FetchedValue(), + ) + + +class TimestampMixin: + created_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=utcnow(), + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=True, + index=True, + onupdate=func.now(), + server_default=utcnow(), + server_onupdate=FetchedValue(), + ) + + +# ------------------------------------------------------FIM MIXIN---------------------------------------------------- + +# ------------------------------------------------------MULTI TENNAT---------------------------------------------------- +class Inquilino(UuidMixin, Base): + __tablename__ = "inquilinos" + __table_args__ = ({"schema": "shared"}) + + nome = Column(String(100), nullable=False, unique=False) + cpf_cnpj = Column(String(14), nullable=False, unique=True) + pessoa_celular = Column(String(20), nullable=True, unique=False) + + # Relacionamento com a tabela de usuários + usuario: Mapped[List["RbacUser"]] = relationship(back_populates="inquilino", passive_deletes=True, + lazy="selectin", + join_depth=2) + + +# ---------------------------------------------------FIM MULTI TENNAT--------------------------------------------------- + + +comercial_relacionamento_pessoa_empresa = Table('comercial_relacionamento_pessoa_empresa', + Base.metadata, + Column('relacao_comercial_uuid', UUID(as_uuid=True), + ForeignKey('comercial_relacoes_comercial.uuid', + ondelete="CASCADE")), + Column('pessoa_uuid', UUID(as_uuid=True), + ForeignKey('comercial_pessoas.uuid', ondelete="CASCADE")), + ) + + +class ComercialTransacaoComercialEnum(enum.Enum): + PAGAMENTO = "PAGAMENTO" + RECEBIMENTO = "RECEBIMENTO" + AMBOS = "AMBOS" + + +class ComercialRelacaoComercial(Base, TimestampMixin, UuidMixin, AtivoMixin): + __tablename__ = "comercial_relacoes_comercial" + log_auditoria_habilitado = False + + descricao_relacao_comercial: str = Column(String(30), nullable=False) + transacao_comercial: Mapped[ComercialTransacaoComercialEnum] = mapped_column(Enum( + ComercialTransacaoComercialEnum, + inherit_schema=True + ), + nullable=False) + pessoa_relacao: Mapped[List["ComercialPessoa"]] = relationship(secondary=comercial_relacionamento_pessoa_empresa, + back_populates='rc', + passive_deletes=True, + lazy="selectin", + join_depth=1, ) + + +class ComercialTipoEndereco(Base, TimestampMixin, UuidMixin, AtivoMixin): + __tablename__ = "comercial_tipos_endereco" + log_auditoria_habilitado = False + + tipo_endereco_descricao: str = Column(String(30), nullable=False) + relacao_endereco_tp: Mapped[List["ComercialEndereco"]] = relationship(back_populates="relacao_tipo_endereco", + passive_deletes=True, + lazy="selectin", + cascade="all, delete-orphan", + join_depth=1) + + +class ComercialPessoa(UuidMixin, Base, TimestampMixin, AtivoMixin): + __tablename__ = "comercial_pessoas" + log_auditoria_habilitado = False + + pessoa_status: bool = Column(Boolean, unique=False, default=True) + pessoa_telefone: Mapped[str20] + pessoa_celular: Mapped[str20] + pessoa_tipo: Mapped[str1] + pessoa_email: Mapped[str150] + pessoa_local_evento: bool = Column(Boolean, unique=False, default=False) + + rc: Mapped[List["ComercialRelacaoComercial"]] = relationship(secondary=comercial_relacionamento_pessoa_empresa, + back_populates='pessoa_relacao', + passive_deletes=True, + lazy="selectin", join_depth=1) + + enderecos: Mapped[List["ComercialEndereco"]] = relationship(back_populates="pessoa", + passive_deletes=True, + lazy="selectin", + join_depth=2) + # usuario: Mapped[List["RbacUser"]] = relationship(back_populates="pessoa", passive_deletes=True, + # lazy="selectin", + # join_depth=2) + + relacao_conta: Mapped[List["FinanceiroConta"]] = relationship(back_populates="relacao_pessoa", + passive_deletes=True, + lazy="selectin", + cascade="all, delete-orphan", + join_depth=1) + + __mapper_args__ = {"polymorphic_identity": "comercial_pessoas", + "polymorphic_on": "pessoa_tipo", + } + + +class ComercialJuridica(ComercialPessoa): + __tablename__ = "comercial_juridicas" + log_auditoria_habilitado = False + + uuid: Mapped[uuid] = mapped_column(ForeignKey("comercial_pessoas.uuid", ondelete="CASCADE"), primary_key=True) + + juridica_cnpj: Mapped[str14] + juridica_email_fiscal: Mapped[str100] + juridica_insc_est: Mapped[str50] + juridica_ins_mun: Mapped[str50] + juridica_razao_social: Mapped[str200] + juridica_representante: Mapped[str100] + + __mapper_args__ = { + "polymorphic_identity": "0", + "polymorphic_load": "selectin" + } + + +class ComercialFisica(ComercialPessoa): + __tablename__ = "comercial_fisicas" + log_auditoria_habilitado = False + + uuid: Mapped[uuid] = mapped_column(ForeignKey("comercial_pessoas.uuid", ondelete="CASCADE"), primary_key=True) + + fisica_cpf: Mapped[str11] + fisica_rg: Mapped[str20] + fisica_genero: Mapped[str2] + fisica_nome: Mapped[str100] + + __mapper_args__ = { + "polymorphic_identity": "1", + "polymorphic_load": "selectin" + } + + +class ComercialEndereco(Base, TimestampMixin, UuidMixin): + __tablename__ = "comercial_enderecos" + log_auditoria_habilitado = False + + endereco_pessoa_status: bool = Column(Boolean, unique=False, default=True) + endereco_pessoa_descricao: Mapped[str50] + endereco_pessoa_numero: Mapped[str8] + endereco_pessoa_complemento: Mapped[str50] + endereco_pessoa_cep: Mapped[str8] + + fk_pessoa_uuid: Mapped[UuidType] = mapped_column(ForeignKey("comercial_pessoas.uuid", + ondelete="CASCADE"), + nullable=False) + pessoa: Mapped["ComercialPessoa"] = relationship(back_populates="enderecos", + lazy="selectin", + join_depth=1) + + fk_tipo_endereco_uuid: Mapped[UuidType] = mapped_column(ForeignKey("comercial_tipos_endereco.uuid", + ondelete="CASCADE"), nullable=False) + relacao_tipo_endereco: Mapped["ComercialTipoEndereco"] = relationship(back_populates="relacao_endereco_tp", + lazy="selectin", + join_depth=2) + + +# __________________________________________USUÁRIOS E PERMISSÃO DE ACESSO______________________________________________ +rbac_papeis_usuario = Table( + 'rbac_papeis_usuario', Base.metadata, + Column('user_uuid', UUID(as_uuid=True), ForeignKey('shared.rbac_usuarios.id'), primary_key=True), + Column('papel_uuid', UUID(as_uuid=True), ForeignKey('shared.rbac_papeis.uuid'), primary_key=True), + # Multi Tennat + schema='shared' +) + +rbac_papel_permissoes = Table( + 'rbac_papel_permissoes', Base.metadata, + Column('papel_uuid', UUID(as_uuid=True), ForeignKey('shared.rbac_papeis.uuid'), primary_key=True), + Column('permissao_id', Integer, ForeignKey('shared.rbac_permissoes.id'), primary_key=True), + # Multi Tennat + schema='shared' + +) + + +class RbacPermissao(Base): + __tablename__ = "rbac_permissoes" + log_auditoria_habilitado = False + id = Column(Integer, primary_key=True, index=True) + nome: Mapped[str50] = mapped_column(nullable=False, unique=True) + papeis: Mapped[List["RbacPapel"]] = relationship(secondary=rbac_papel_permissoes, + back_populates='permissoes', + passive_deletes=True, + lazy="selectin", + join_depth=1, ) + + # Multi Tennat + __table_args__ = ({"schema": "shared"},) + + +class RbacPapel(UuidMixin, Base): + __tablename__ = "rbac_papeis" + log_auditoria_habilitado = False + # id = Column(Integer, primary_key=True, index=True, autoincrement=True) + nome: Mapped[str50] = mapped_column(nullable=False, unique=True) + permissoes: Mapped[List["RbacPermissao"]] = relationship(secondary=rbac_papel_permissoes, + back_populates='papeis', + passive_deletes=True, + lazy="selectin", + join_depth=2, ) + usuarios: Mapped[List["RbacUser"]] = relationship(secondary=rbac_papeis_usuario, + back_populates='papeis', + passive_deletes=True, + lazy="selectin", + join_depth=1, ) + # Multi Tennat + __table_args__ = ({"schema": "shared"},) + + +class RbacUser(SQLAlchemyBaseUserTableUUID, Base): + __tablename__ = "rbac_usuarios" + log_auditoria_habilitado = False + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + papeis: Mapped[List[RbacPapel]] = relationship(secondary=rbac_papeis_usuario, + back_populates='usuarios', + passive_deletes=True, + lazy="selectin", + join_depth=1) + # Multi Tennat + fk_inquilino_uuid: Mapped[UuidType] = mapped_column(ForeignKey("shared.inquilinos.uuid", ondelete="CASCADE"), + nullable=False) + inquilino: Mapped["Inquilino"] = relationship(back_populates="usuario", + lazy="selectin", + passive_deletes=True, + join_depth=1) + nome_completo: Mapped[str100] + __table_args__ = ({"schema": "shared"},) + + +# __________________________________________FINAL USUÁRIOS E PERMISSÃO DE ACESSO________________________________________ + + +# ________________________________________________CONTAS A PAGAR E RECEBER______________________________________________ + +class FinanceiroTipoContaEnum(enum.Enum): + PAGAR = "PAGAR" + RECEBER = "RECEBER" + + +class FinanceiroTipoMovimentacaoEnum(enum.Enum): + CREDITO = "CREDITO" + DEBITO = "DEBITO" + + +class FinanceiroStatus(Base, IdMixin): + __tablename__ = "financeiro_status" + log_auditoria_habilitado = False + + status_nome_status: Mapped[str20] + status_descricao: Mapped[str200] + relacao_conta: Mapped[List["FinanceiroConta"]] = relationship(back_populates="relacao_status", + passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + relacao_parcelas: Mapped[List["FinanceiroParcela"]] = relationship(back_populates="relacao_status", + passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + + +class FinanceiroForma_Pagamento(Base, IdMixin): + __tablename__ = "financeiro_formas_pagamento" + log_auditoria_habilitado = False + + formas_pagamento_descricao: Mapped[str20] + relacao_pagamentos: Mapped[List["FinanceiroPagamento"]] = relationship(back_populates="relacao_formas_pagamento", + passive_deletes=True, lazy="selectin", + cascade="all, delete-orphan", join_depth=1) + + +class FinanceiroCategoria(Base, IdMixin): + __tablename__ = "financeiro_categorias" + log_auditoria_habilitado = False + + categorias_nome: Mapped[str20] + + relacao_conta: Mapped[List["FinanceiroConta"]] = relationship(back_populates="relacao_categorias", + passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + + +class FinanceiroCentro_Custo(Base, IdMixin): + __tablename__ = "financeiro_centros_custo" + log_auditoria_habilitado = False + + centros_custo_nome: Mapped[str20] + centros_custo_descricao: Mapped[str100] + relacao_conta: Mapped[List["FinanceiroConta"]] = relationship(back_populates="relacao_centros_custo", + passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + + +class FinanceiroParcela(Base, IdMixin): + __tablename__ = "financeiro_parcelas" + log_auditoria_habilitado = True + + parcelas_numero_parcela: Mapped[int] = mapped_column(Integer, nullable=False) + parcelas_valor_parcela: Mapped[valor_monetario] + parcelas_valor_juros: Mapped[valor_monetario] + parcelas_valor_multa: Mapped[valor_monetario] + parcelas_valor_desconto: Mapped[valor_monetario] + parcelas_data_vencimento: Mapped[data_idx] + fk_contas_id: Mapped[int] = mapped_column(ForeignKey("financeiro_contas.id", ondelete="CASCADE"), nullable=False) + relacao_conta: Mapped["FinanceiroConta"] = relationship(back_populates="relacao_parcelas", lazy="selectin", + join_depth=2) + fk_status_id: Mapped[int] = mapped_column(ForeignKey("financeiro_status.id", ondelete="CASCADE"), + nullable=False) + relacao_status: Mapped["FinanceiroStatus"] = relationship(back_populates="relacao_parcelas", lazy="selectin", + join_depth=2) + relacao_pagamentos: Mapped[List["FinanceiroPagamento"]] = relationship(back_populates="relacao_parcela", + passive_deletes=True, + lazy="selectin", + cascade="all, delete-orphan", + join_depth=1) + + +class FinanceiroPagamento(Base, IdMixin): + __tablename__ = "financeiro_pagamentos" + log_auditoria_habilitado = True + + data_pagamento: Mapped[data_idx] + valor_pago: Mapped[valor_monetario] + observacao: Mapped[text] + fk_parcelas_id: Mapped[int] = mapped_column(ForeignKey("financeiro_parcelas.id", ondelete="CASCADE"), + nullable=False) + relacao_parcela: Mapped["FinanceiroParcela"] = relationship(back_populates="relacao_pagamentos", lazy="selectin", + join_depth=2) + fk_contas_corrente_id: Mapped[int] = mapped_column(ForeignKey("financeiro_contas_corrente.id", + ondelete="CASCADE"), nullable=False) + relacao_contas_corrente: Mapped["FinanceiroConta_Corrente"] = relationship(back_populates="relacao_pagamentos", + lazy="selectin", join_depth=2) + fk_formas_pagamento_id: Mapped[int] = mapped_column(ForeignKey("financeiro_formas_pagamento.id", + ondelete="CASCADE"), nullable=False) + relacao_formas_pagamento: Mapped["FinanceiroForma_Pagamento"] = relationship(back_populates="relacao_pagamentos", + lazy="selectin", join_depth=2) + relacao_movimentacoes_conta: Mapped[List[ + "FinanceiroMovimentacao_Conta"]] = relationship(back_populates="relacao_pagamentos", + passive_deletes=True, + lazy="selectin", + cascade="all, delete-orphan", + join_depth=1) + + +class FinanceiroConta(Base, IdMixin): + __tablename__ = "financeiro_contas" + log_auditoria_habilitado = True + + contas_tipo_conta: Mapped[FinanceiroTipoContaEnum] = mapped_column(Enum(FinanceiroTipoContaEnum, + inherit_schema=True + ), + nullable=False) + contas_data_emissao: Mapped[data] + contas_data_vencimento: Mapped[data] + contas_valor_total: Mapped[valor_monetario] + contas_valor_juros: Mapped[valor_monetario] + contas_valor_multa: Mapped[valor_monetario] + contas_valor_desconto: Mapped[valor_monetario] + contas_descricao: Mapped[str200] + fk_pessoas_uuid: Mapped[int] = mapped_column(ForeignKey("comercial_pessoas.uuid", ondelete="CASCADE"), + nullable=False) + relacao_pessoa: Mapped["ComercialPessoa"] = relationship(back_populates="relacao_conta", lazy="selectin", + join_depth=2) + fk_status_id: Mapped[int] = mapped_column(ForeignKey("financeiro_status.id", ondelete="CASCADE"), + nullable=False) + relacao_status: Mapped["FinanceiroStatus"] = relationship(back_populates="relacao_conta", lazy="selectin", + join_depth=2) + fk_categorias_id: Mapped[int] = mapped_column(ForeignKey("financeiro_categorias.id", ondelete="CASCADE"), + nullable=False) + relacao_categorias: Mapped["FinanceiroCategoria"] = relationship(back_populates="relacao_conta", lazy="selectin", + join_depth=2) + fk_centros_custo_id: Mapped[int] = mapped_column(ForeignKey("financeiro_centros_custo.id", ondelete="CASCADE"), + nullable=False) + relacao_centros_custo: Mapped["FinanceiroCentro_Custo"] = relationship(back_populates="relacao_conta", + lazy="selectin", + join_depth=2) + relacao_parcelas: Mapped[List["FinanceiroParcela"]] = relationship(back_populates="relacao_conta", + passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + + +class FinanceiroConta_Corrente(Base, IdMixin): + __tablename__ = "financeiro_contas_corrente" + log_auditoria_habilitado = True + + contas_corrente_nome_conta: Mapped[str50] + contas_corrente_saldo_inicial: Mapped[valor_monetario] + contas_corrente_data_criacao: Mapped[data_idx] + contas_corrente_descricao: Mapped[str100] + relacao_movimentacoes_conta: Mapped[List["FinanceiroMovimentacao_Conta"]] = relationship( + back_populates="relacao_contas_corrente", + passive_deletes=True, lazy="selectin", + cascade="all, delete-orphan", join_depth=1) + relacao_pagamentos: Mapped[List["FinanceiroPagamento"]] = relationship(back_populates="relacao_contas_corrente", + passive_deletes=True, lazy="selectin", + cascade="all, delete-orphan", join_depth=1) + + +class FinanceiroMovimentacao_Conta(Base, IdMixin): + __tablename__ = "financeiro_movimentacoes_conta" + log_auditoria_habilitado = True + + movimentacoes_conta_tipo_movimentacao: Mapped[FinanceiroTipoMovimentacaoEnum] = mapped_column( + Enum(FinanceiroTipoMovimentacaoEnum, + # Multi Tennat + inherit_schema=True + # /Multi Tennat + ), + nullable=False, + + ) + movimentacoes_conta_valor_movimentacao: Mapped[valor_monetario] + movimentacoes_conta_data_movimentacao: Mapped[data_idx] + movimentacoes_conta_descricao: Mapped[str200] + fk_contas_corrente_id: Mapped[int] = mapped_column(ForeignKey("financeiro_contas_corrente.id", ondelete="CASCADE"), + nullable=False) + relacao_contas_corrente: Mapped[ + "FinanceiroConta_Corrente"] = relationship(back_populates="relacao_movimentacoes_conta", + lazy="selectin", join_depth=2) + fk_pagamentos_id: Mapped[int] = mapped_column(ForeignKey("financeiro_pagamentos.id", ondelete="CASCADE"), + nullable=False) + relacao_pagamentos: Mapped["FinanceiroPagamento"] = relationship(back_populates="relacao_movimentacoes_conta", + lazy="selectin", join_depth=2) + + +financeiro_conta_manutencao_equipamentos = Table('financeiro_conta_manutencao_equipamentos', Base.metadata, + Column('manutencao_uuid', UUID(as_uuid=True), + ForeignKey('estoque_manutencoes_equipamentos.uuid'), + primary_key=True), + Column('conta_id', Integer, + ForeignKey('financeiro_contas.id'), primary_key=True) + ) + + +# ______________________________________________FIM CONTAS A PAGAR E RECEBER____________________________________________ + +# _____________________________________________________TABLEAS DE LOG___________________________________________________ + +class HistoricoAlteracoes(Base, UuidMixin): + __tablename__ = 'historico_alteracoes' + + tabela: Mapped[str100] + data_modificacao: Mapped[datetime] = mapped_column(DateTime, nullable=False) + action: Mapped[str10] # 'update', 'delete' + usuario_id: Mapped[str36_uuid_null] # Assumindo que o "ID" do usuário é uma "string" + registro_id: Mapped[str36_uuid] # "ID" do registro que está sendo alterado + + # Relacionamentos + updates: Mapped[List["HistoricoUpdate"]] = relationship(back_populates="alteracao", passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + deletes: Mapped["HistoricoDelete"] = relationship(back_populates="alteracao", passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + + +class HistoricoUpdate(Base, UuidMixin): + __tablename__ = 'historico_update' + + fk_historico_alteracoes_uuid: Mapped[UuidType] = mapped_column(ForeignKey("historico_alteracoes.uuid", + ondelete="CASCADE"), nullable=False) + coluna: Mapped[str100] + valor_antigo: Mapped[str200] + valor_novo: Mapped[str200] + # Relacionamento com a tabela principal + alteracao: Mapped["HistoricoAlteracoes"] = relationship(back_populates="updates", lazy="selectin", join_depth=2) + + +class HistoricoDelete(Base, UuidMixin): + __tablename__ = 'historico_delete' + + fk_historico_alteracoes_uuid: Mapped[UuidType] = mapped_column(ForeignKey("historico_alteracoes.uuid", + ondelete="CASCADE"), nullable=False) + registro_deletado: Mapped[text] # Aqui armazenamos o estado completo da linha deletada como JSON + # Relacionamento com a tabela principal + alteracao: Mapped["HistoricoAlteracoes"] = relationship(back_populates="deletes", lazy="selectin", join_depth=2) + + +# ___________________________________________________FIM TABLEAS DE LOG_________________________________________________ + +# ______________________________________________LOCALIZAÇÃO ARQUIVOS S3_________________________________________________ +class S3Arquivo(Base, UuidMixin, TimestampMixin): + __tablename__ = "s3_arquivos" + + arquivos_nome_original: Mapped[str200] + arquivos_nome_armazenado: Mapped[str200] + + associacoes: Mapped["S3ArquivoAssociacao"] = relationship(back_populates="arquivos", passive_deletes=True, + lazy="selectin", cascade="all, delete-orphan", + join_depth=1) + + +class S3ArquivoAssociacao(Base, UuidMixin, TimestampMixin): + __tablename__ = "s3_arquivo_associacoes" + + fk_arquivo_uuid: Mapped[UuidType] = mapped_column(ForeignKey("s3_arquivos.uuid", + ondelete="CASCADE"), nullable=False) + arquivo_associacoes_tabela_relacionada: Mapped[str100] # Nome da tabela associada + arquivo_associacoes_linha_uuid: Mapped[UuidType] # Uuid da linha na tabela associada + + arquivos: Mapped["S3Arquivo"] = relationship(back_populates="associacoes", lazy="selectin", join_depth=2) + + +# ____________________________________________FIM LOCALIZAÇÃO ARQUIVOS S3_______________________________________________ + +# __________________________________________________CONFIGURAÇÃO HERANÇA________________________________________________ +# ____________________________________________NECESSÁRIO FICA NO FIM DO CÓDIGO__________________________________________ + +# Definindo a consulta polimórfica +PESSOA_POLY = with_polymorphic( + ComercialPessoa, # Modelo base + [ComercialFisica, ComercialJuridica] # Subclasses +) +# ________________________________________________FIM CONFIGURAÇÃO HERANÇA______________________________________________ diff --git a/app/database/session.py b/app/database/session.py new file mode 100644 index 0000000..4ef1564 --- /dev/null +++ b/app/database/session.py @@ -0,0 +1,143 @@ +# Importações de bibliotecas padrão +import contextlib +from typing import AsyncIterator, Optional + +from fastapi import Depends +# Importações de bibliotecas de terceiros +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import ( + AsyncConnection, + AsyncEngine, + AsyncSession, + async_sessionmaker, + create_async_engine, + AsyncAttrs, +) +from sqlalchemy.orm import DeclarativeBase +from app.config import ECHO +import traceback + +EXTENSIONS = ["uuid-ossp", "postgis", "postgis_topology"] + +naming_convention = { + "ix": "ix_ct_%(table_name)s_%(column_0_N_name)s", + "uq": "uq_ct_%(table_name)s_%(column_0_N_name)s", + "ck": "ck_ct_%(table_name)s_%(constraint_name)s", + "fk": "fk_ct_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_ct_%(table_name)s", +} + + +class Base(DeclarativeBase, AsyncAttrs): + metadata = MetaData(naming_convention=naming_convention) + + +class DatabaseSessionManager: + def __init__(self): + self._engine: AsyncEngine | None = None + self._sessionmaker: async_sessionmaker | None = None + + def init(self, host: str): + self._engine = create_async_engine(host, echo=ECHO) + self._sessionmaker = async_sessionmaker(autocommit=False, bind=self._engine) + + async def close(self): + if self._engine is None: + raise Exception("DatabaseSessionManager is not initialized") + await self._engine.dispose() + self._engine = None + self._sessionmaker = None + + def is_initialized(self) -> bool: + """Verifica se o engine foi inicializado""" + return self._engine is not None + + @contextlib.asynccontextmanager + async def connect(self) -> AsyncIterator[AsyncConnection]: + if self._engine is None: + raise Exception("DatabaseSessionManager is not initialized") + + async with self._engine.begin() as connection: + try: + yield connection + except Exception: + await connection.rollback() + raise + + @contextlib.asynccontextmanager + async def session(self) -> AsyncIterator[AsyncSession]: + if self._sessionmaker is None: + raise Exception("DatabaseSessionManager is not initialized") + + session = self._sessionmaker() + try: + yield session + except Exception: + await session.rollback() + raise + finally: + await session.close() + + @contextlib.asynccontextmanager + async def session_with_tenant(self, tenant_schema: Optional[str]) -> AsyncIterator[AsyncSession]: + + if self._engine is None: + raise Exception("DatabaseSessionManager is not initialized") + + # Verifica e configura o schema_translate_map + if tenant_schema: + + schema_engine = self._engine.execution_options( + schema_translate_map={None: str(tenant_schema)} + ) + else: + + schema_engine = self._engine.execution_options( + schema_translate_map=None + ) + + # Cria a sessão vinculada ao schema_engine + try: + session = self._sessionmaker(bind=schema_engine) + + + except Exception as e: + print(f"Erro ao configurar o bind da sessão: {e}") + raise + + try: + yield session + except Exception as e: + await session.rollback() + print(f"Erro encontrado: {e}. Traceback: {traceback.format_exc()}") + raise + finally: + await session.close() + # print("Sessão encerrada") + + async def create_all(self, connection: AsyncConnection): + await connection.run_sync(Base.metadata.create_all) + + async def drop_all(self, connection: AsyncConnection): + await connection.run_sync(Base.metadata.drop_all) + + # Multi Tenant + def get_engine(self): + if not self.is_initialized(): + raise Exception("DatabaseSessionManager is not initialized") + return self._engine + + def get_sessionmaker(self): + if self._sessionmaker is None: + raise Exception("DatabaseSessionManager is not initialized") + return self._sessionmaker + + # Multi Tenant + + +sessionmanager = DatabaseSessionManager() + + +async def get_db(): + async with sessionmanager.session() as session: + yield session diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..9e6a7ce --- /dev/null +++ b/app/main.py @@ -0,0 +1,110 @@ +import asyncio +from fastapi import FastAPI, Request + +from contextlib import asynccontextmanager + +from fastapi.exceptions import ResponseValidationError, RequestValidationError +from pydantic import ValidationError + +from starlette.exceptions import HTTPException as StarletteHTTPException +from fastapi.responses import JSONResponse +from app.database.session import sessionmanager + +from app.routers import rotas + +# Importação das Rotas + + +from app.config import URL_BD +from app.routers.router_registry import RouterRegistry + +from fastapi.middleware.cors import CORSMiddleware +# from starlette.middleware.cors import CORSMiddleware + + +def init_app(init_db=True): + lifespan = None + + if init_db: + + sessionmanager.init(URL_BD) + + @asynccontextmanager + async def lifespan(app_init: FastAPI): + yield + if sessionmanager.is_initialized(): # Usa o método público para checar o engine + await sessionmanager.close() + + server = FastAPI(title="Sonora Tecnologia Admin", lifespan=lifespan) + + server.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:4200", + "http://srv-captain--sistema/", + "https://srv-captain--sistema/", + "https://app.sonoraav.com.br", + "http://app.sonoraav.com.br", + ], # Domínio do seu frontend + allow_credentials=True, + allow_methods=["*"], # Permitir todos os métodos (ou especifique "POST" se necessário) + allow_headers=["*"], + ) + + + # Registro dinâmico das rotas + router_registry = RouterRegistry(server, rotas.routers) + router_registry.register_routers() + + return server + + +# Definindo o objeto "app" para estar disponível em todos os contextos +app = init_app() + + +@app.exception_handler(ResponseValidationError) +async def response_validation_exception_handler(request: Request, exc: ResponseValidationError): + return JSONResponse( + status_code=500, + content={"message": "Erro ao validar a resposta do servidor."}, + ) + + +@app.exception_handler(RequestValidationError) +async def validation_exception_handler(request: Request, exc: RequestValidationError): + return JSONResponse( + status_code=422, + content={"detail": exc.errors()}, + ) + + +@app.exception_handler(StarletteHTTPException) +async def starlette_http_exception_handler(request: Request, exc: StarletteHTTPException): + return JSONResponse( + status_code=exc.status_code, + content={"detail": "API Administrativa Eventos"}, + ) + + +@app.exception_handler(ValidationError) +async def pydantic_validation_exception_handler(request: Request, exc: ValidationError): + return JSONResponse( + status_code=422, + content={"detail": exc.errors()}, + ) + + +@app.exception_handler(asyncio.TimeoutError) +async def timeout_error_handler(request: Request, exc: asyncio.TimeoutError): + return JSONResponse( + status_code=504, + content={"detail": "A operação excedeu o tempo limite."}, + ) + + +if __name__ == "__main__": + # Código de inicialização que só roda se o arquivo for executado diretamente + app = init_app() + # Exemplo de inicialização do servidor (se necessário) + # import uvicorn + # uvicorn.run("app", host="0.0.0.0", port=8000) diff --git a/app/multi_tenant/__init__.py b/app/multi_tenant/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/multi_tenant/criar_tenant.py b/app/multi_tenant/criar_tenant.py new file mode 100644 index 0000000..64d933d --- /dev/null +++ b/app/multi_tenant/criar_tenant.py @@ -0,0 +1,117 @@ +from alembic.runtime.migration import MigrationContext +from alembic.config import Config +from alembic.script import ScriptDirectory +from sqlalchemy.ext.asyncio import AsyncEngine +from sqlalchemy import select, insert +from app.database.models import Inquilino, RbacUser, RbacPapel, rbac_papeis_usuario +from app.database.session import sessionmanager +from app.rbac.auth import get_user_db, get_user_manager +from app.rbac.schemas import UserCreate +from fastapi_users.exceptions import UserAlreadyExists + + +async def check_migrations(engine: AsyncEngine): + """ + Verifica se todas as migrações foram aplicadas. + """ + alembic_config = Config("alembic.ini") + script = ScriptDirectory.from_config(alembic_config) + + async with engine.begin() as conn: + def sync_check_migrations(sync_conn): + context = MigrationContext.configure(sync_conn) + current_revision = context.get_current_revision() + latest_revision = script.get_current_head() + + if current_revision != latest_revision: + raise RuntimeError( + f"Database is not up-to-date. Current: {current_revision}, Latest: {latest_revision}. " + "Execute migrations before adding new tenants." + ) + + await conn.run_sync(sync_check_migrations) + + +async def create_user(session, fk_inquilino_uuid, email, password, is_superuser=False): + """ + Cria um usuário no sistema utilizando o gerenciador de usuários do FastAPI Users. + """ + try: + + user_db = await get_user_db(session).__anext__() + user_manager = await get_user_manager(user_db).__anext__() + + try: + user = await user_manager.create( + UserCreate( + email=email, + password=password, + is_superuser=is_superuser, + is_active=True, + fk_inquilino_uuid=fk_inquilino_uuid, + + ) + ) + return user.id + except UserAlreadyExists: + result_user = await session.execute(select(RbacUser).filter_by(email=email)) + existing_user = result_user.scalars().first() + raise RuntimeError(f"Usuário '{email}' já existe, seu ID é {existing_user.id}") + except Exception as e: + raise RuntimeError(f"Erro ao criar usuário '{email}': {e}") + + +async def tenant_create(nome: str, email: str, password: str, cpf_cnpj: str): + """ + Cria um novo tenant (inquilino) no sistema, configura o schema específico + e registra um usuário inicial relacionado ao inquilino. + """ + async with sessionmanager.session() as db: + try: + # Verificar se o tenant já existe + result = await db.execute(select(Inquilino).filter_by(cpf_cnpj=cpf_cnpj)) + existing_tenant = result.scalars().first() + + if existing_tenant: + raise RuntimeError( + f"Tenant com CPF/CNPJ '{cpf_cnpj}' já existe. Nome: {existing_tenant.nome}, " + f"UUID: {existing_tenant.uuid}" + ) + + # Criar o registro do tenant + tenant = Inquilino(nome=nome, cpf_cnpj=cpf_cnpj) + db.add(tenant) + await db.commit() + await db.refresh(tenant) + + # Criar o usuário inicial + user_id = await create_user( + session=db, + fk_inquilino_uuid=tenant.uuid, + email=email, + password=password, + is_superuser=True, + ) + + # Nova sessão para associar o papel ao usuário + async with sessionmanager.session() as new_db: + # Buscar o papel "Super Administrador" + result_papel = await new_db.execute(select(RbacPapel).filter_by(nome="Super Administrador")) + papel = result_papel.scalars().first() + if not papel: + raise RuntimeError("Papel 'Super Administrador' não encontrado.") + + # Relacionar o papel ao usuário + await new_db.execute( + insert(rbac_papeis_usuario).values( + user_uuid=user_id, + papel_uuid=papel.uuid, + ) + ) + await new_db.commit() + + return tenant.uuid + except RuntimeError as e: + raise RuntimeError(f"Erro inesperado durante a criação do cliente: '{nome}': {e}") + except Exception as e: + raise RuntimeError(f"Erro inesperado durante a criação do cliente: '{nome}': {e}") diff --git a/app/multi_tenant/tenant.py b/app/multi_tenant/tenant.py new file mode 100644 index 0000000..95e03fa --- /dev/null +++ b/app/multi_tenant/tenant.py @@ -0,0 +1,47 @@ +from contextlib import contextmanager +from typing import Optional +from app.database.session import sessionmanager +from sqlalchemy import MetaData +from app.database.models import Base + + +@contextmanager +def with_db(tenant_schema: Optional[str] = None): + """ + Gerencia uma sessão do banco de dados com suporte a schema_translate_map. + + Args: + tenant_schema (Optional[str]): Nome do schema do tenant (opcional). + + Yields: + AsyncSession: Sessão configurada para o schema especificado. + """ + if not sessionmanager.is_initialized(): + raise Exception("DatabaseSessionManager is not initialized") + + # Configura o schema_translate_map para o inquilino + if tenant_schema: + schema_translate_map = {None: tenant_schema} + else: + schema_translate_map = None + + # Configura a conexão com o schema correto + connectable = sessionmanager.get_engine().execution_options(schema_translate_map=schema_translate_map) + + # Cria uma sessão vinculada ao connectable configurado + session = sessionmanager.get_sessionmaker(bind=connectable) + try: + yield session + except Exception: + session.rollback() + raise + finally: + session.close() + + +def get_tenant_specific_metadata(): + meta = MetaData(schema=None) + for table in Base.metadata.tables.values(): + if table.schema is None: + table.to_metadata(meta) + return meta diff --git a/app/multi_tenant/tenant_utils.py b/app/multi_tenant/tenant_utils.py new file mode 100644 index 0000000..7a07c7f --- /dev/null +++ b/app/multi_tenant/tenant_utils.py @@ -0,0 +1,47 @@ +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database.session import sessionmanager +from app.rbac.auth import current_active_user +from fastapi_users import models +from fastapi import Depends, HTTPException, status +from typing import AsyncIterator + + +async def get_tenant_schema( + user: models.UP = Depends(current_active_user), # Valida o token e obtém o usuário +) -> models.UP: + """ + Configura o schema do tenant no banco de dados e retorna apenas o usuário autenticado. + """ + + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Usuário não autenticado.", + ) + + # Obtém o UUID do inquilino (assumindo que está no atributo fk_inquilino_uuid) + tenant_uuid = user.fk_inquilino_uuid + + if not tenant_uuid: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Inquilino não associado ao usuário.", + ) + + # Configura o schema do tenant usando `session_with_tenant` + async with sessionmanager.session_with_tenant(tenant_uuid) as session: + yield session + + # Retorna o usuário autenticado para o verify_permissions + # return user + + +async def get_tenant_session( + tenant_schema: str = Depends(get_tenant_schema) +) -> AsyncIterator[AsyncSession]: + """ + Obtém uma sessão configurada para o schema do tenant. + """ + async with sessionmanager.session_with_tenant(tenant_schema) as session: + yield session diff --git a/app/rbac/README.txt b/app/rbac/README.txt new file mode 100644 index 0000000..9d0d22a --- /dev/null +++ b/app/rbac/README.txt @@ -0,0 +1,33 @@ +Instruções para integração do módulo RBAC: + +1. Copie a pasta 'rbac' para o diretório raiz do seu projeto. + +2. Instale a dependencias + - pip install fastapi-users[sqlalchemy] + +3. Adicione o modelo 'User' ao seu projeto: + - Copie o conteúdo do arquivo 'user_model_snippet.txt' para o seu arquivo de modelos. + +4. Inclua as rotas protegidas no seu 'main.py': + - Importe e inclua as rotas: + from rbac.routes import router as rbac_router + app.include_router(rbac_router) + +5. Configure as dependências e sessões do banco de dados conforme necessário. + +6. Exemplo de como adicionar a verificação de permissão nas rotas: +```python +from fastapi import APIRouter, Depends +from rbac.permissions import verify_permissions + +router = APIRouter() + +@router.get("/admin-only") +async def admin_only_route(user: User = Depends(verify_permissions([1]))): + return {"message": "This is an admin-only route"} + +7. Exemplo importação das rotas + +from rbac.routes import router as rbac_router + +app.include_router(rbac_router, prefix="/auth", tags=["auth"]) diff --git a/app/rbac/__init__.py b/app/rbac/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/rbac/auth.py b/app/rbac/auth.py new file mode 100644 index 0000000..57f6c43 --- /dev/null +++ b/app/rbac/auth.py @@ -0,0 +1,46 @@ +from fastapi import Depends +from fastapi_users import FastAPIUsers, UUIDIDMixin, BaseUserManager +from fastapi_users.authentication import BearerTransport, AuthenticationBackend, JWTStrategy +from fastapi_users_db_sqlalchemy import SQLAlchemyUserDatabase +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database import session +from app.database.models import RbacUser +import uuid +from app.config import SECRET + +bearer_transport = BearerTransport(tokenUrl="autenticacao/login") + + +def get_jwt_strategy() -> JWTStrategy: + return JWTStrategy( + secret="SECRET_KEY", + lifetime_seconds=3600, + ) + + +auth_backend = AuthenticationBackend( + name="jwt", + transport=bearer_transport, + get_strategy=get_jwt_strategy, +) + + +async def get_user_db(session_get_user_db: AsyncSession = Depends(session.get_db)): + yield SQLAlchemyUserDatabase(session_get_user_db, RbacUser) + + +# class UserManager(UUIDIDMixin, BaseUserManager[RbacUser, uuid.UUID]): +class UserManager(UUIDIDMixin, BaseUserManager[RbacUser, uuid.UUID]): + reset_password_token_secret = SECRET + verification_token_secret = SECRET + + +async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)): + yield UserManager(user_db) + + +fastapi_users = FastAPIUsers[RbacUser, uuid.UUID](get_user_manager, [auth_backend]) + +current_active_user = fastapi_users.current_user(active=True) + diff --git a/app/rbac/bkp_classes_customizadas.py b/app/rbac/bkp_classes_customizadas.py new file mode 100644 index 0000000..1e33859 --- /dev/null +++ b/app/rbac/bkp_classes_customizadas.py @@ -0,0 +1,43 @@ +import jwt +from fastapi_users.authentication import JWTStrategy +from fastapi_users.jwt import generate_jwt, decode_jwt +from fastapi_users import exceptions +from app.config import SECRET + + +class CustomJWTStrategy(JWTStrategy): + async def write_token(self, user) -> str: + # Coletar todas as permissões do usuário a partir de seus papéis + todas_as_permissoes = set() # Usamos um set para evitar duplicatas + + if hasattr(user, 'papeis'): + for papel in user.papeis: + if hasattr(papel, 'permissoes'): + for permissao in papel.permissoes: + todas_as_permissoes.add(permissao.id) # Usar o ID da permissão + + # Transformar o set em uma lista para o payload do token + lista_de_permissoes = list(todas_as_permissoes) + print("user id") + print(user.id) + + # Aqui, adicionamos as claims personalizadas ao payload + data = { + "sub": str(user.id), + "permissions": lista_de_permissoes, # Acessa diretamente a lista de IDs de permissões coletadas + "aud": self.token_audience, # Audiência, conforme o padrão + } + token = generate_jwt( + data, self.encode_key, self.lifetime_seconds, algorithm=self.algorithm + ) + + return token + + async def read_token(self, token: str, user_manager): + # Decodifica o token JWT usando a função padrão decode_jwt do fastapi_users + try: + payload = decode_jwt(token, SECRET, audience=self.token_audience) + return payload + except Exception as e: + raise ValueError(f"Token inválido: {str(e)}") + diff --git a/app/rbac/classes_customizadas.py b/app/rbac/classes_customizadas.py new file mode 100644 index 0000000..12894f5 --- /dev/null +++ b/app/rbac/classes_customizadas.py @@ -0,0 +1,103 @@ +# import jwt +# from fastapi_users.jwt import decode_jwt, generate_jwt, SecretType +# from fastapi_users.manager import BaseUserManager +# from fastapi_users.authentication.strategy import JWTStrategy +# from fastapi_users import models, exceptions +# from typing import Optional, List +# +# +# class CustomJWTStrategy(JWTStrategy[models.UP, models.ID]): +# def __init__( +# self, +# secret: SecretType, +# lifetime_seconds: Optional[int], +# token_audience: List[str] = ["fastapi-users:auth"], +# algorithm: str = "HS256", +# public_key: Optional[SecretType] = None +# ): +# super().__init__(secret, lifetime_seconds, token_audience, algorithm, public_key) +# +# async def write_token(self, user: models.UP) -> str: +# todas_as_permissoes = set() +# if hasattr(user, 'papeis'): +# for papel in user.papeis: +# if hasattr(papel, 'permissoes'): +# todas_as_permissoes.update(permissao.id for permissao in papel.permissoes) +# +# data = { +# "sub": str(user.id), +# "permissions": list(todas_as_permissoes), +# "aud": self.token_audience, +# } +# return generate_jwt(data, self.encode_key, self.lifetime_seconds, algorithm=self.algorithm) +# +# async def read_token( +# self, token: Optional[str], user_manager: BaseUserManager[models.UP, models.ID] +# ) -> Optional[models.UP]: +# if token is None: +# return None +# +# try: +# data = decode_jwt( +# token, self.decode_key, self.token_audience, algorithms=[self.algorithm] +# ) +# user_id = data.get("sub") +# if user_id is None: +# return None +# +# permissions = data.get("permissions", []) +# +# except jwt.PyJWTError: +# return None +# +# try: +# parsed_id = user_manager.parse_id(user_id) +# user = await user_manager.get(parsed_id) +# if user: +# user.permissions = permissions +# return user +# except (exceptions.UserNotExists, exceptions.InvalidID): +# return None + +import jwt +from fastapi_users.authentication import JWTStrategy +from fastapi_users.jwt import generate_jwt, decode_jwt +from fastapi_users import exceptions +from app.config import SECRET + + +class CustomJWTStrategy(JWTStrategy): + async def write_token(self, user) -> str: + # Coletar todas as permissões do usuário a partir de seus papéis + todas_as_permissoes = set() # Usamos um set para evitar duplicatas + + if hasattr(user, 'papeis'): + for papel in user.papeis: + if hasattr(papel, 'permissoes'): + for permissao in papel.permissoes: + todas_as_permissoes.add(permissao.id) # Usar o ID da permissão + + # Transformar o set em uma lista para o payload do token + lista_de_permissoes = list(todas_as_permissoes) + print("user id") + print(user.id) + + # Aqui, adicionamos as claims personalizadas ao payload + data = { + "sub": str(user.id), + "permissions": lista_de_permissoes, # Acessa diretamente a lista de IDs de permissões coletadas + "aud": self.token_audience, # Audiência, conforme o padrão + } + token = generate_jwt( + data, self.encode_key, self.lifetime_seconds, algorithm=self.algorithm + ) + + return token + + async def read_token(self, token: str, user_manager): + # Decodifica o token JWT usando a função padrão decode_jwt do fastapi_users + try: + payload = decode_jwt(token, SECRET, audience=self.token_audience) + return payload + except Exception as e: + raise ValueError(f"Token inválido: {str(e)}") diff --git a/app/rbac/modelos.txt b/app/rbac/modelos.txt new file mode 100644 index 0000000..1188dae --- /dev/null +++ b/app/rbac/modelos.txt @@ -0,0 +1,60 @@ +from sqlalchemy import Column, String, Table, ForeignKey, Integer, Boolean +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import relationship, declarative_base, Mapped, mapped_column +from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyBaseUserTable +from typing import List + +Base = declarative_base() + + +class Permissao(Base): + __tablename__ = "permissoes" + id = Column(Integer, primary_key=True, index=True) + nome: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + papeis: Mapped[List["Papel"]] = relationship(secondary='papel_permissoes', + back_populates='permissoes', + passive_deletes=True, + lazy="selectin", + join_depth=1) + + +class Papel(Base): + __tablename__ = "papeis" + id = Column(UUID(as_uuid=True), primary_key=True, index=True) + nome: Mapped[str] = mapped_column(String(50), nullable=False, unique=True) + permissoes: Mapped[List[Permissao]] = relationship(secondary='papel_permissoes', + back_populates='papeis', + passive_deletes=True, + lazy="selectin", + join_depth=1) + usuarios: Mapped[List["User"]] = relationship(secondary='papeis_usuario', + back_populates='papeis', + passive_deletes=True, + lazy="selectin", + join_depth=1) + + +class User(SQLAlchemyBaseUserTable, Base): + __tablename__ = "user" + id = Column(UUID(as_uuid=True), primary_key=True, index=True) + username = Column(String, nullable=False, unique=True) + papeis: Mapped[List[Papel]] = relationship(secondary='papeis_usuario', + back_populates='usuarios', + passive_deletes=True, + lazy="selectin", + join_depth=1) + is_active = Column(Boolean, default=True, nullable=False) + is_superuser = Column(Boolean, default=False, nullable=False) + + +papeis_usuario = Table( + 'papeis_usuario', Base.metadata, + Column('user_uuid', UUID(as_uuid=True), ForeignKey('user.id'), primary_key=True), + Column('papel_uuid', UUID(as_uuid=True), ForeignKey('papeis.id'), primary_key=True) +) + +papel_permissoes = Table( + 'papel_permissoes', Base.metadata, + Column('papel_uuid', UUID(as_uuid=True), ForeignKey('papeis.id'), primary_key=True), + Column('permissao_id', Integer, ForeignKey('permissoes.id'), primary_key=True) +) diff --git a/app/rbac/permissions.py b/app/rbac/permissions.py new file mode 100644 index 0000000..003d526 --- /dev/null +++ b/app/rbac/permissions.py @@ -0,0 +1,43 @@ +from typing import List + +from fastapi import Depends, HTTPException, status +from fastapi_users import models +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database.session import get_db +from app.rbac.auth import current_active_user + +from fastapi import Depends, HTTPException, status +from typing import List +from fastapi_users import models +from app.multi_tenant.tenant_utils import get_tenant_schema + + +def verify_permissions(required_permissions: List[int]): + async def permission_dependency( + user: models.UP = Depends(current_active_user), + ): + if user is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Usuário não autenticado.", + ) + + # Coleta todas as permissões do usuário + user_permissions = [perm.id for papel in user.papeis for perm in papel.permissoes] + + + # Se não houver permissões específicas necessárias, qualquer usuário autenticado tem acesso + if not required_permissions: + return user + + # Verifica se o usuário possui pelo menos uma das permissões necessárias + if not any(perm in user_permissions for perm in required_permissions): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Você não tem as permissões necessárias para acessar este recurso.", + ) + + return user # Retorna o objeto `user` para acesso em outras funções + + return permission_dependency diff --git a/app/rbac/rbac.py b/app/rbac/rbac.py new file mode 100644 index 0000000..f8a29ba --- /dev/null +++ b/app/rbac/rbac.py @@ -0,0 +1,24 @@ +# from models import User, Permissao, Papel + + +from app.database.models import RbacUser, RbacPermissao, RbacPapel, rbac_papel_permissoes, rbac_papeis_usuario +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + + +class RBAC: + @staticmethod + async def has_permission(user: RbacUser, permission_id: int, session: AsyncSession) -> bool: + # Carregar permissões associadas aos papéis do usuário + result = await session.execute( + select(RbacPermissao) + .join(rbac_papel_permissoes) + .join(RbacPapel) + .join(rbac_papeis_usuario) + .where(rbac_papeis_usuario.c.user_uuid == user.id) + .where(rbac_papel_permissoes.c.permissao_id == permission_id) + ) + permissoes = result.scalars().all() + + # Verificar se a permissão está presente + return len(permissoes) > 0 diff --git a/app/rbac/routes_login.py b/app/rbac/routes_login.py new file mode 100644 index 0000000..8d2c200 --- /dev/null +++ b/app/rbac/routes_login.py @@ -0,0 +1,63 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi_users.router import ErrorCode +from sqlalchemy.ext.asyncio import AsyncSession +from .auth import fastapi_users, auth_backend, get_user_manager +from app.database.models import RbacPapel +from app.rbac.schemas import UserRead, UserCreate, UserRoles +from fastapi_users.exceptions import UserAlreadyExists, InvalidPasswordException +from app.database.session import get_db + +router = APIRouter( + prefix="/autenticacao", + tags=["Autenticação"], ) + + +# Rotas de autenticação + +@router.post("/register", response_model=UserRead) +async def register(user: UserCreate, roles: UserRoles, session: AsyncSession = Depends(get_db), + user_manager=Depends(get_user_manager)): + try: + created_user = await user_manager.create(user) + # Associação dos papéis ao usuário criado + for papel_id in roles.papeis: + papel = await session.get(RbacPapel, papel_id) + if papel: + created_user.papeis.append(papel) + else: + raise HTTPException(status_code=404, detail=f"Papel com ID {papel_id} não encontrado") + + await session.commit() + await session.refresh(created_user) + return created_user + + except UserAlreadyExists: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ErrorCode.REGISTER_USER_ALREADY_EXISTS, + + ) + + except InvalidPasswordException as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail={ + "code": ErrorCode.REGISTER_INVALID_PASSWORD, + "reason": e.reason, + }, + + ) + + +router.include_router( + fastapi_users.get_auth_router(auth_backend) +) + +router.include_router( + fastapi_users.get_reset_password_router(), + +) +router.include_router( + fastapi_users.get_verify_router(UserRead), + +) diff --git a/app/rbac/routes_usuario_logado.py b/app/rbac/routes_usuario_logado.py new file mode 100644 index 0000000..5771fc8 --- /dev/null +++ b/app/rbac/routes_usuario_logado.py @@ -0,0 +1,15 @@ +from fastapi import APIRouter +from .auth import fastapi_users +from app.rbac.schemas import UserRead, UserUpdate + +router = APIRouter( +) + +# Rotas de autenticação + + +router.include_router( + fastapi_users.get_users_router(UserRead, UserUpdate), + prefix="/usuarios", + tags=["Usuários"], +) diff --git a/app/rbac/schemas.py b/app/rbac/schemas.py new file mode 100644 index 0000000..cec03cf --- /dev/null +++ b/app/rbac/schemas.py @@ -0,0 +1,44 @@ +from pydantic import BaseModel, Field +from typing import Optional +from uuid import UUID +from fastapi_users import schemas +import uuid +from app.schemas.pessoa_schemas import PessoaBaseResponse + + +class PermissionRead(BaseModel): + id: int = Field(..., description="Unique identifier for the permission") + nome: str = Field(..., max_length=50, description="Name of the permission") + + class Config: + from_attributes = True + + +class RoleRead(BaseModel): + uuid: UUID = Field(..., description="Unique identifier for the role") + nome: str = Field(..., max_length=50, description="Name of the role") + + permissoes: list[PermissionRead] = Field(..., description="List of permissions associated with the role") + + class Config: + from_attributes = True + + +class UserRead(schemas.BaseUser[uuid.UUID]): + nome_completo: str = Field(min_length=3, max_length=100) + fk_inquilino_uuid: UUID + papeis: list[RoleRead] + # pessoa: PessoaBaseResponse + + +class UserCreate(schemas.BaseUserCreate): + fk_inquilino_uuid: UUID + nome: str = Field(min_length=3, max_length=100) + + +class UserUpdate(schemas.BaseUserUpdate): + pass + + +class UserRoles(BaseModel): + papeis: list[UUID] diff --git a/app/routers/__init__.py b/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/routers/dependencies.py b/app/routers/dependencies.py new file mode 100644 index 0000000..399389f --- /dev/null +++ b/app/routers/dependencies.py @@ -0,0 +1,56 @@ +# Importações de bibliotecas padrão +from collections.abc import Callable +from typing import Type, TypeVar + +# Importações de bibliotecas de terceiros +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession + +# Importações do seu próprio projeto +from app.database import models, RelationalTableRepository +from app.multi_tenant.tenant_utils import get_tenant_schema + + +# def get_repository_simple_table( +# model: type[models.Base], +# ) -> Callable[[AsyncSession], RepositoryBase.RepositoryBase]: +# def func(session_simple_table: AsyncSession = Depends(get_tenant_schema)): # Sem parênteses +# return RepositoryBase.RepositoryBase(model, session_simple_table) +# +# return func + + +def get_repository_relational_table( + model: type[models.Base], +) -> Callable[[AsyncSession], RelationalTableRepository.RelationalTableRepository]: + def func(session_relational_table: AsyncSession = Depends(get_tenant_schema)): + return RelationalTableRepository.RelationalTableRepository(model, session_relational_table) + + return func + + +# def get_repository_s3( +# model: type[models.Base], +# ) -> Callable[[AsyncSession], RepositoryS3.RepositoryS3]: +# def func(session_relational_table: AsyncSession = Depends(get_tenant_schema)): +# return RepositoryS3.RepositoryS3(model, session_relational_table) +# +# return func + + +# Tipo genérico para repositórios +T = TypeVar("T") + + +def get_repository( + model: Type[models.Base], + repository_class: Type[T] +) -> Callable[[AsyncSession], T]: + """ + Função genérica para criar dependências de repositórios. + """ + + def func(session: AsyncSession = Depends(get_tenant_schema)) -> T: + return repository_class(model, session) + + return func diff --git a/app/routers/rotas.py b/app/routers/rotas.py new file mode 100644 index 0000000..190f942 --- /dev/null +++ b/app/routers/rotas.py @@ -0,0 +1,87 @@ +from app.database import models +from app.routers.rotas_dinamicas import create_dynamic_router +from app.routers.router_pessoa import router as pessoa +from app.s3.router_s3 import router as s3 +from app import schemas +from app.rbac.routes_login import router as fastapi_user +from app.rbac.routes_usuario_logado import router as fastapi_logado + +# Instanciando o DynamicRouter com o repositório injetado diretamente nas funções + +tipo_endereco = create_dynamic_router( + rota_prefix="/api/tipo_endereco", + rota_tags=["Tipo Endereço"], + db_model=models.ComercialTipoEndereco, + create=schemas.tipo_endereco_schemas.Create, + request=schemas.tipo_endereco_schemas.Request, + request_formatado=schemas.tipo_endereco_schemas.Request, + id_many=schemas.tipo_endereco_schemas.IdsRequest, + id_one=schemas.tipo_endereco_schemas.IdRequest, + update=schemas.tipo_endereco_schemas.UpdateRequest, + updates=schemas.tipo_endereco_schemas.UpdateManyRequest, + ativo=schemas.utils.RegistroAtivo, + permissao_total=1, + permissao_setor=2, + permissao_endpoint=31, + permissao_especifica_inicial=201, + ordenacao="tipo_endereco_descricao" + +) + +endereco = create_dynamic_router( + rota_prefix="/api/endereco", + rota_tags=["Endereço"], + db_model=models.ComercialEndereco, + create=schemas.endereco_schemas.Create, + request=schemas.endereco_schemas.Request, + request_formatado=schemas.endereco_schemas.Request, + id_many=schemas.endereco_schemas.IdsRequest, + id_one=schemas.endereco_schemas.IdRequest, + update=schemas.endereco_schemas.UpdateRequest, + updates=schemas.endereco_schemas.UpdateManyRequest, + ativo=schemas.utils.RegistroAtivo, + permissao_total=1, + permissao_setor=2, + permissao_endpoint=32, + permissao_especifica_inicial=301 + +) + + + +papel = create_dynamic_router( + rota_prefix="/api/papeis", + rota_tags=["Papeis"], + db_model=models.RbacPapel, + create=schemas.papel_shemas.Create, + request=schemas.papel_shemas.Request, + request_formatado=schemas.papel_shemas.Request, + id_many=schemas.papel_shemas.IdsRequest, + id_one=schemas.papel_shemas.IdRequest, + update=schemas.papel_shemas.UpdateRequest, + updates=schemas.papel_shemas.UpdateManyRequest, + permissao_total=1, + permissao_setor=3, + permissao_endpoint=41, + permissao_especifica_inicial=1201 + +) + + + +# Rotas não dinâmicas +pessoa_router = pessoa +fastapi_user_router = fastapi_user +fastapi_logado_router = fastapi_logado +s3_router = s3 +# Lista de roteadores para serem registrados +routers = [ + fastapi_user_router, + fastapi_logado_router, + tipo_endereco, + endereco, + pessoa_router, + papel, + s3_router, + +] diff --git a/app/routers/rotas_dinamicas.py b/app/routers/rotas_dinamicas.py new file mode 100644 index 0000000..85d5842 --- /dev/null +++ b/app/routers/rotas_dinamicas.py @@ -0,0 +1,281 @@ +# Importações de bibliotecas padrão +from typing import Annotated, Any, Sequence, List, Optional, Dict, Union +from enum import Enum + +# Importações de bibliotecas de terceiros +from fastapi import APIRouter, Depends +from starlette import status +from starlette.responses import Response +from sqlalchemy import Row, RowMapping +from pydantic import BaseModel, Field + +# Importações do seu próprio projeto +from app.routers.dependencies import get_repository +from app.database.RepositoryBase import RepositoryBase +from app.database.RelationalTableRepository import RelationalTableRepository +from app.rbac.permissions import verify_permissions +from app.database.models import RbacUser +from app.config import COLUNA +from app.database.formatar_retorno_bd import formatters_map + + +# Esquema para o corpo da requisição +class FilterCondition(BaseModel): + """ + Representa uma condição de filtro individual. + """ + column: str = Field(..., description="Caminho completo da coluna (exemplo: 'relacao_setor.setor_nome').") + value: Any = Field(..., description="Valor a ser usado no filtro.") + operator: str = Field("==", description="Operador lógico (exemplo: '==', '!=', '<', '>', '>=', '<=').") + logical: str = Field("AND", description="Operador lógico para combinações ('AND', 'OR' etc).") + + +class FilterRequest(BaseModel): + """ + Representa a requisição contendo filtros dinâmicos. + """ + filters: Optional[List[FilterCondition]] = Field( + None, + description="Lista de condições de filtro. Cada condição contém coluna, operador, valor, e operador lógico." + ) + order_by: Optional[List[str]] = Field( + None, description="Lista de colunas para ordenação dos registros." + ) + ascending: Optional[List[bool]] = Field( + None, description="Lista de direções da ordenação. True para ascendente, False para descendente. " + "Deve corresponder ao tamanho de 'order_by'." + ) + relationships: Optional[List[str]] = Field( + None, description="Lista de nomes de relacionamentos para incluir na consulta." + ) + + +def create_dynamic_router( + rota_prefix: str, + create, + request, + id_many, + id_one, + update, + updates, + db_model, + ativo: Optional = None, + rota_tags: Optional[List[Union[str, Enum]]] = None, + request_formatado: Optional = None, + permissao_total: Optional[int] = None, + permissao_endpoint: Optional[int] = None, + permissao_setor: Optional[int] = None, + permissao_especifica_inicial: Optional[int] = None, + ordenacao: Optional[str] = None, + formatter_keys: Optional[dict[str, str]] = None, + related_info_append: Optional[List[Dict[str, Any]]] = None, # Dados extras para append + related_info_add: Optional[List[Dict[str, Any]]] = None, # Dados extras para add + related_info_extra_columns: Optional[List[Dict[str, Any]]] = None # Dados extras para +): + # Verifica automaticamente se deve usar RelationalTableRepository + use_complex_repo = bool(related_info_append or related_info_add or related_info_extra_columns) + + # Define o repositório com base na necessidade de relacionamentos complexos + if use_complex_repo: + repository_base = Annotated[ + RelationalTableRepository[db_model], + Depends(get_repository(db_model, RelationalTableRepository)), + ] + else: + repository_base = Annotated[ + RepositoryBase[db_model], + Depends(get_repository(db_model, RepositoryBase)), + ] + + router = APIRouter( + prefix=rota_prefix, + tags=rota_tags, + responses={404: {"description": "Not found"}}, + ) + + permissao_especifica_atual = permissao_especifica_inicial or 0 + + def get_permission_list(): + """Retorna a lista de permissões atualizada e incrementa a permissão específica.""" + nonlocal permissao_especifica_atual + + # Construir a lista de permissões baseada nos valores informados + permission_list = [ + permissao_total, + permissao_endpoint, + permissao_setor, + permissao_especifica_atual if permissao_especifica_inicial is not None else None + ] + # Incrementa a permissão específica somente se foi inicialmente definida + if permissao_especifica_inicial is not None: + permissao_especifica_atual += 1 + + # Filtrar permissões nulas + final_permissions = [perm for perm in permission_list if perm is not None] + + return final_permissions + + @router.post("/add_one", status_code=status.HTTP_201_CREATED, response_model=request, + # dependencies=[Depends(verify_permissions(get_permission_list()))] + ) + async def create_one(data: create, repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> db_model: + if use_complex_repo: + resultado = await repository.create( + data_one=data, + db_data=db_model, + related_info_append=related_info_append, + related_info_add=related_info_add, + related_info_extra_columns=related_info_extra_columns + ) + else: + resultado = await repository.create(data_one=data) + + return resultado + + @router.post("/add_many", status_code=status.HTTP_201_CREATED, response_model=list[request]) + async def create_many(datas: List[create], repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> ( + list[db_model] | bool): + if use_complex_repo: + resultado = await repository.create_many( + data=datas, + db_data=db_model, + return_models=True, + related_info_append=related_info_append, + related_info_add=related_info_add, + related_info_extra_columns=related_info_extra_columns + ) + else: + resultado = await repository.create_many(data=datas, return_models=True) + return resultado + + permission_list_iguais = get_permission_list() # Obtém a lista de permissões para as funções get_all e get_filter + + if request_formatado and formatter_keys and "get_all" in formatter_keys: + formatter_function = formatters_map[formatter_keys["get_all"]] + + @router.post("/get_all", status_code=status.HTTP_200_OK, response_model=list[request_formatado]) + async def get_all(repository: repository_base, + _user: RbacUser = Depends(verify_permissions(permission_list_iguais))) \ + -> Sequence[Row[Any] | RowMapping | Any]: + resultado = await repository.get_many_by_ids(coluna=COLUNA, order_by=ordenacao) + formatado = formatter_function(resultado) + return formatado + + @router.post("/get_filter", status_code=status.HTTP_200_OK, response_model=list[request_formatado]) + async def get_all(repository: repository_base, filtro: Optional[FilterRequest] = None, + _user: RbacUser = Depends(verify_permissions(permission_list_iguais))) -> ( + Sequence)[Row[Any] | RowMapping | Any]: + + if not filtro: + resultado = await repository.get_many_by_ids(coluna=COLUNA, order_by=ordenacao) + formatado = formatter_function(resultado) + # Chamando a função do repositório com os filtros e ordenação + else: + resultado = await repository.get_filter( + coluna=COLUNA, + filters=filtro.filters, + order_by=filtro.order_by, + ascending=filtro.ascending + ) + formatado = formatter_function(resultado) + return formatado + + else: + @router.post("/get_all", status_code=status.HTTP_200_OK, response_model=list[request]) + async def get_all(repository: repository_base, + _user: RbacUser = Depends(verify_permissions(permission_list_iguais))) -> ( + Sequence)[Row[Any] | RowMapping | Any]: + resultado = await repository.get_many_by_ids(coluna=COLUNA, order_by=ordenacao) + return resultado + + @router.post("/get_filter", status_code=status.HTTP_200_OK, response_model=list[request]) + async def get_all(repository: repository_base, filtro: Optional[FilterRequest] = None, + _user: RbacUser = Depends(verify_permissions(permission_list_iguais))) -> ( + Sequence)[Row[Any] | RowMapping | Any]: + + if not filtro: + resultado = await repository.get_many_by_ids(coluna=COLUNA, order_by=ordenacao) + # Chamando a função do repositório com os filtros e ordenação + else: + resultado = await repository.get_filter( + coluna=COLUNA, + filters=filtro.filters, + order_by=filtro.order_by, + ascending=filtro.ascending + ) + return resultado + + @router.post("/get_many", status_code=status.HTTP_200_OK, response_model=List[request]) + async def get_many(data: id_many, repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> ( + Sequence[Row[Any] | RowMapping | Any]): + resultado = await repository.get_many_by_ids(uuids=data.uuids, coluna=COLUNA, order_by=ordenacao) + return resultado + + @router.post("/get_one", status_code=status.HTTP_200_OK, response_model=request) + async def get_one(data: id_one, repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> db_model: + + resultado = await repository.get_one_by_id(uuid=data.uuid, coluna=COLUNA) + return resultado + + @router.put("/update_one", status_code=status.HTTP_201_CREATED, response_model=request) + async def update_one(data: update, repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> db_model: + if use_complex_repo: + resultado = await repository.update_by_id( + update=data, + coluna=COLUNA, + db_data=db_model, + related_info_append=related_info_append, + related_info_add=related_info_add, + related_info_extra_columns=related_info_extra_columns + ) + else: + resultado = await repository.update_by_id(update=data, coluna=COLUNA) + return resultado + + if not use_complex_repo: + @router.put("/update_many", status_code=status.HTTP_201_CREATED, response_model=List[request]) + async def update_many(data: List[updates], repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> ( + list[db_model] | db_model): + resultado = await repository.update_many_by_ids(updates=data, coluna=COLUNA, return_models=True) + return resultado + else: + # Não registramos a rota update_many, mas consumimos o efeito colateral da chamada + # de get_permission_list() para que o contador seja incrementado. + _loopPermissoes = get_permission_list() + + if ativo is not None: + @router.put("/desativar", status_code=status.HTTP_201_CREATED, response_model=request) + async def desativar(data: ativo, repository: repository_base, + _user: RbacUser = Depends(verify_permissions(get_permission_list()))) -> db_model: + resultado = await repository.desativar_registro(update=data, coluna=COLUNA) + return resultado + + @router.put("/ativar", status_code=status.HTTP_201_CREATED, response_model=request) + async def ativar(data: ativo, repository: repository_base, + _user: RbacUser = Depends(verify_permissions([permissao_total]))) -> db_model: + resultado = await repository.ativar_registro(update=data, coluna=COLUNA) + return resultado + else: + # Não registramos a rota desativar, mas consumimos o efeito colateral da chamada + # de get_permission_list() para que o contador seja incrementado. + _loopPermissoes = get_permission_list() + + @router.delete("/delete_one", status_code=status.HTTP_204_NO_CONTENT) + async def delete_one(data: id_one, repository: repository_base, + _user: RbacUser = Depends(verify_permissions([permissao_total]))) -> Response: + await repository.remove_by_id(uuid=data.uuid, coluna=COLUNA) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete("/delete_many", status_code=status.HTTP_204_NO_CONTENT) + async def delete_many(data: id_many, repository: repository_base, + _user: RbacUser = Depends(verify_permissions([permissao_total]))) -> Response: + await repository.remove_many_by_ids(uuids=data.uuids, coluna=COLUNA) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + return router diff --git a/app/routers/router_pessoa.py b/app/routers/router_pessoa.py new file mode 100644 index 0000000..4d2046a --- /dev/null +++ b/app/routers/router_pessoa.py @@ -0,0 +1,211 @@ +# Importações de bibliotecas padrão +from typing import Union, Annotated, List, Any, Sequence + +# Importações de bibliotecas de terceiros +from fastapi import APIRouter, Depends +from starlette import status +from sqlalchemy import Row, RowMapping + +# Importações do seu próprio projeto +from app.database.RelationalTableRepository import RelationalTableRepository +from app.database.models import RbacUser +from app.rbac.permissions import verify_permissions +from app.schemas import pessoa_schemas +from starlette.responses import Response +from app.routers.dependencies import get_repository +from app.database import models as db_models +from app.config import COLUNA + +# Variáveis comuns +FILE_SCHEMA = pessoa_schemas +PAYLOAD_PJ = FILE_SCHEMA.JuridicaCreate +PAYLOAD_PF = FILE_SCHEMA.FisicaCreate +UPDATE_REQUEST_SCHEMA_PJ = FILE_SCHEMA.PessoaJuridicaUpdate +UPDATE_REQUEST_SCHEMA_PF = FILE_SCHEMA.PessoaFisicaUpdate +ID_REQUEST_SCHEMA = FILE_SCHEMA.IdRequestPessoa +IDS_REQUEST_SCHEMA = FILE_SCHEMA.IdsRequestPessoas +RESPONSE_SCHEMA_PJ = FILE_SCHEMA.RequestPJ +RESPONSE_SCHEMA_PF = FILE_SCHEMA.RequestPF +RESPONSE_SCHEMA_PESSOA = FILE_SCHEMA.RequestPessoa +VALIDA_GET_ALL = FILE_SCHEMA.ValidaGetAll +DB_MODEL_PJ = db_models.ComercialJuridica +DB_MODEL_PF = db_models.ComercialFisica +DB_MODEL_ENDERECO = db_models.ComercialEndereco +DB_MODEL_PESSOA = db_models.ComercialPessoa +DB_MODEL_TIPO_ENDERECO = db_models.ComercialTipoEndereco +DB_MODEL_RELACAO_COMERCIAL = db_models.ComercialRelacaoComercial +DB_MODEL_PESSOA_POLY = db_models.PESSOA_POLY + +ROTA_PREFIX = "/api/pessoa" +ROTA_TAGS = "Cadastro Pessoa" +MENSAGEM_ERRO = "Erro ao consulta pessoa" +MENSAGEM_SUCESSO = "Pessoa deletada com sucesso" + +# Variáveis tabelas Relacionadas + +related_info_append = [ + {"key": "rc", "related_model": DB_MODEL_RELACAO_COMERCIAL, "foreign_key_field": "uuid"}, + + # outros relacionamentos que precisam de append + +] +related_info_add = [ + { + "key": 'enderecos', + "foreign_key": "fk_pessoa_uuid", + "related_model": DB_MODEL_ENDERECO, + "relations": [ + {"related_model_fk": DB_MODEL_TIPO_ENDERECO, "foreign_key_fk": "fk_tipo_endereco_uuid"} + + ] + }, + + # outros relacionamentos que precisam de add +] + +PjRepository = Annotated[ + RelationalTableRepository[DB_MODEL_PJ], + Depends(get_repository(DB_MODEL_PJ, RelationalTableRepository)) +] + +PfRepository = Annotated[ + RelationalTableRepository[DB_MODEL_PF], + Depends(get_repository(DB_MODEL_PF, RelationalTableRepository)) +] + +PessoaPolyRepository = Annotated[ + RelationalTableRepository[DB_MODEL_PESSOA_POLY], + Depends(get_repository(DB_MODEL_PESSOA_POLY, RelationalTableRepository)) +] + +PessoaRepository = Annotated[ + RelationalTableRepository[DB_MODEL_PESSOA], + Depends(get_repository(DB_MODEL_PESSOA, RelationalTableRepository)) +] + +# RelacaoComercialRepository = Annotated[ +# RelationalTableRepository[DB_MODEL_RELACAO_COMERCIAL], +# Depends(get_repository(DB_MODEL_RELACAO_COMERCIAL, RelationalTableRepository)) +# ] +# +# TipoEnderecoRepository = Annotated[ +# RelationalTableRepository[DB_MODEL_TIPO_ENDERECO], +# Depends(get_repository(DB_MODEL_TIPO_ENDERECO, RelationalTableRepository)) +# ] + +router = APIRouter( + prefix=ROTA_PREFIX, + tags=[ROTA_TAGS], + responses={404: {"description": "Not found"}}, +) + + +@router.post("/add_one", status_code=201, response_model=RESPONSE_SCHEMA_PESSOA) +async def create_one(data: Union[PAYLOAD_PJ, PAYLOAD_PF], repository_pj: PjRepository, + repository_pf: PfRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 401]))): + if isinstance(data, PAYLOAD_PJ): + + pessoa = await repository_pj.create(data_one=data, db_data=DB_MODEL_PJ, + related_info_add=related_info_add, + related_info_append=related_info_append) + return pessoa + elif isinstance(data, PAYLOAD_PF): + pessoa = await repository_pf.create(data_one=data, db_data=DB_MODEL_PF, + related_info_add=related_info_add, + related_info_append=related_info_append) + return pessoa + + +@router.post("/add_many", status_code=status.HTTP_201_CREATED, response_model=list[RESPONSE_SCHEMA_PESSOA]) +async def create_many(datas: List[Union[PAYLOAD_PJ, PAYLOAD_PF]], repository_pj: PjRepository, + repository_pf: PfRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 402]))): + # Separando dados em listas de pessoas jurídicas e físicas + data_pj = [data for data in datas if isinstance(data, PAYLOAD_PJ)] + data_pf = [data for data in datas if isinstance(data, PAYLOAD_PF)] + + pessoas_fisicas_dto = [] + pessoas_juridicas_dto = [] + + # Criando pessoas físicas + if data_pf: + pessoas_fisicas = await repository_pf.create_many(data=data_pf, return_models=True, + db_data=DB_MODEL_PF, + related_info_add=related_info_add, + related_info_append=related_info_append) + + pessoas_fisicas_dto = [RESPONSE_SCHEMA_PF.model_validate(pf, from_attributes=True) for pf in + pessoas_fisicas] + + # Criando pessoas jurídicas + if data_pj: + pessoas_juridicas = await repository_pj.create_many(data=data_pj, return_models=True, + db_data=DB_MODEL_PJ, + related_info_add=related_info_add, + related_info_append=related_info_append) + + pessoas_juridicas_dto = [RESPONSE_SCHEMA_PJ.model_validate(pj, from_attributes=True) for pj in + pessoas_juridicas] + + resultado = pessoas_fisicas_dto + pessoas_juridicas_dto + return resultado + + +@router.post("/get_all", status_code=status.HTTP_200_OK, response_model=list[RESPONSE_SCHEMA_PESSOA]) +async def get_all(repository: PessoaPolyRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 403]))) -> ( + Sequence)[Row[Any] | RowMapping | Any]: + resultado = await repository.get_many_by_ids(coluna=COLUNA) + return resultado + + +# Testado ok +@router.post("/get_many", status_code=status.HTTP_200_OK, response_model=List[RESPONSE_SCHEMA_PESSOA]) +async def get_many(data: IDS_REQUEST_SCHEMA, repository: PessoaPolyRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 404])) + ) -> Sequence[Row[Any] | RowMapping | Any]: + resultado = await repository.get_many_by_ids(uuids=data.uuids, coluna=COLUNA) + return resultado + + +# Testado ok +@router.post("/get_one", status_code=status.HTTP_200_OK, response_model=RESPONSE_SCHEMA_PESSOA) +async def get_one(data: ID_REQUEST_SCHEMA, repository: PessoaPolyRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 405])) + ) -> RESPONSE_SCHEMA_PESSOA: + resultado = await repository.get_one_by_id(uuid=data.uuid, coluna=COLUNA) + return resultado + + +@router.put("/update_one", status_code=status.HTTP_201_CREATED, response_model=RESPONSE_SCHEMA_PESSOA) +async def update_one(data: Union[UPDATE_REQUEST_SCHEMA_PJ, UPDATE_REQUEST_SCHEMA_PF], repository_pj: PjRepository, + repository_pf: PfRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 406]))): + if isinstance(data, UPDATE_REQUEST_SCHEMA_PJ): + + resultado = await repository_pj.update_by_id(update=data, coluna=COLUNA, + db_data=DB_MODEL_PJ, + related_info_add=related_info_add, + related_info_append=related_info_append) + return resultado + elif isinstance(data, UPDATE_REQUEST_SCHEMA_PF): + resultado = await repository_pf.update_by_id(update=data, coluna=COLUNA, + db_data=DB_MODEL_PF, + related_info_add=related_info_add, + related_info_append=related_info_append) + return resultado + + +@router.delete("/delete_one", status_code=status.HTTP_204_NO_CONTENT) +async def delete_one(data: ID_REQUEST_SCHEMA, repository: PessoaRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 408]))) -> Response: + await repository.remove_by_id(uuid=data.uuid, coluna=COLUNA) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + +@router.delete("/delete_many", status_code=status.HTTP_204_NO_CONTENT) +async def delete_many(data: IDS_REQUEST_SCHEMA, repository: PessoaRepository, + _user: RbacUser = Depends(verify_permissions([1, 2, 33, 409]))) -> Response: + await repository.remove_many_by_ids(uuids=data.uuids, coluna=COLUNA) + return Response(status_code=status.HTTP_204_NO_CONTENT) diff --git a/app/routers/router_registry.py b/app/routers/router_registry.py new file mode 100644 index 0000000..70edc6c --- /dev/null +++ b/app/routers/router_registry.py @@ -0,0 +1,13 @@ +# app/utils/router_registry.py +from fastapi import FastAPI, APIRouter +from typing import List + + +class RouterRegistry: + def __init__(self, app: FastAPI, routers: List[APIRouter]): + self.app = app + self.routers = routers + + def register_routers(self): + for router in self.routers: + self.app.include_router(router) diff --git a/app/s3/RepositoryS3.py b/app/s3/RepositoryS3.py new file mode 100644 index 0000000..f7b5200 --- /dev/null +++ b/app/s3/RepositoryS3.py @@ -0,0 +1,271 @@ +# Importações de bibliotecas padrão +from typing import Generic, TypeVar +from uuid import uuid4, UUID +import os + +# Importações de bibliotecas de terceiros +from sqlalchemy.ext.asyncio import AsyncSession +from pydantic import BaseModel +from fastapi import HTTPException + +# Importações integração S3 +import boto3 +import app.config as config +from starlette.responses import StreamingResponse +from botocore.exceptions import ClientError +from boto3.exceptions import Boto3Error +from app.s3 import schema_s3 + +# Importações do seu próprio projeto +from app.database.RepositoryBase import ( + RepositoryBase, +) +from app.database import models + +Model = TypeVar("Model", bound=models.Base) +Schema = TypeVar("Schema", bound=BaseModel) + +s3_client = boto3.client( + "s3", + aws_access_key_id=config.S3_ACCESS_KEY_ID, + aws_secret_access_key=config.S3_SECRET_ACCESS_KEY, + endpoint_url=config.S3_ENDPOINT_URL, + region_name=config.S3_REGION_NAME, +) + + +def get_s3_path(inquilino: UUID, nome_arquivo: str) -> str: + """ + Gera o caminho completo no S3 para um arquivo de um inquilino específico. + """ + + return f"{inquilino}/{nome_arquivo}" + + +class RepositoryS3(RepositoryBase[Model], Generic[Model]): + def __init__(self, model: type[Model], session: AsyncSession) -> None: + super().__init__(model, session) + + async def get_file_record_by_uuid(self, uuid: UUID) -> models.S3Arquivo: + """ + Busca o registro de um arquivo pelo UUID no banco de dados. + Levanta uma exceção HTTP 404 se o arquivo não for encontrado. + """ + try: + # Chamar a função herdada para buscar o arquivo pelo UUID + arquivo = await self.get_one_by_id(uuid, coluna="uuid") + if not arquivo: + raise HTTPException(status_code=404, detail="Arquivo não encontrado no banco de dados") + return arquivo + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro ao buscar o arquivo no banco: {str(e)}") + + async def upload_to_s3(self, conteudo, nome_original, tipo_conteudo, inquilino: UUID): + """ + Faz upload do conteúdo para o S3 e salva apenas o nome do arquivo no banco. + Apaga o arquivo do S3 em caso de erro ao salvar no banco. + Agora o nome do arquivo é um UUID com a extensão do arquivo original. + """ + s3_path = None # Inicializa a variável no escopo superior + + try: + # Obter a extensão do arquivo original + _, file_extension = os.path.splitext(nome_original) + file_extension = file_extension.lower() # Garantir que a extensão esteja em minúsculo + + # Gerar um nome único para o arquivo (UUID + extensão) + unique_filename = f"{uuid4()}{file_extension}" + + # Gerar o caminho completo no S3 + s3_path = get_s3_path(inquilino, unique_filename) + + # Fazer upload do arquivo para o S3 + s3_client.upload_fileobj( + conteudo, + config.S3_BUCKET_NAME, + s3_path, + ExtraArgs={ + "ContentType": tipo_conteudo, + # Nenhum metadado adicional salvo + }, + ) + + # Salvar apenas o nome do arquivo no banco + arquivo_data = schema_s3.ArquivoCreate( + arquivos_nome_original=nome_original, + arquivos_nome_armazenado=unique_filename + ) + novo_arquivo = await self.create(arquivo_data) + + return novo_arquivo + + except Boto3Error as e: + raise HTTPException(status_code=500, detail=f"Erro no S3: {str(e)}") + except Exception as e: + # Apagar o arquivo do S3 em caso de erro no banco de dados + if s3_path: # Verifica se s3_path foi atribuído + try: + s3_client.delete_object(Bucket=config.S3_BUCKET_NAME, Key=s3_path) + except Exception as delete_error: + print(f"Erro ao apagar arquivo do S3: {str(delete_error)}") + # Relançar a exceção original + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + async def get_presigned_url(self, uuid: UUID, inquilino: UUID) -> dict: + """ + Gera uma URL pré-assinada para download do arquivo, com o nome original configurado. + """ + try: + # Buscar o registro do arquivo usando a função intermediária + arquivo = await self.get_file_record_by_uuid(uuid) + + # Obter o nome armazenado e original do arquivo + file_name = arquivo.arquivos_nome_armazenado + original_filename = arquivo.arquivos_nome_original + + # Gerar o caminho completo no S3 + s3_path = get_s3_path(inquilino, file_name) + + # Gerar a URL pré-assinada para download + presigned_url = s3_client.generate_presigned_url( + "get_object", + Params={ + "Bucket": config.S3_BUCKET_NAME, + "Key": s3_path, + "ResponseContentDisposition": f'attachment; filename="{original_filename}"' + }, + ExpiresIn=3600, # URL válida por 1 hora + ) + + return {"url": presigned_url} + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="Arquivo não encontrado no S3") + raise HTTPException(status_code=500, detail=f"Erro ao gerar URL: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + async def generate_presigned_url(self, uuid: UUID, inquilino: UUID) -> dict: + """ + Gera uma URL pré-assinada para acessar o arquivo no S3 (sem download automático). + """ + try: + # Buscar o registro do arquivo usando a função intermediária + arquivo = await self.get_file_record_by_uuid(uuid) + + # Obter o nome armazenado do arquivo + file_name = arquivo.arquivos_nome_armazenado + + # Gerar o caminho completo no S3 + s3_path = get_s3_path(inquilino, file_name) + + # Gerar uma URL pré-assinada + presigned_url = s3_client.generate_presigned_url( + "get_object", + Params={ + "Bucket": config.S3_BUCKET_NAME, + "Key": s3_path + }, + ExpiresIn=3600, # URL válida por 1 hora + ) + + return {"url": presigned_url} + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="Arquivo não encontrado no S3") + raise HTTPException(status_code=500, detail=f"Erro ao gerar URL: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + async def get_file(self, uuid: UUID, inquilino: UUID) -> StreamingResponse: + """ + Retorna um arquivo específico para download com o nome original configurado. + """ + try: + # Buscar o registro do arquivo usando a função intermediária + arquivo = await self.get_file_record_by_uuid(uuid) + + # Obter o nome armazenado e original do arquivo + file_name = arquivo.arquivos_nome_armazenado + original_filename = arquivo.arquivos_nome_original + + # Gerar o caminho completo no S3 + s3_path = get_s3_path(inquilino, file_name) + + # Obter o objeto do S3 + response = s3_client.get_object(Bucket=config.S3_BUCKET_NAME, Key=s3_path) + + # Retornar o arquivo como um fluxo + return StreamingResponse( + response["Body"], + media_type=response["ContentType"], # Tipo do conteúdo, ex.: image/jpeg + headers={"Content-Disposition": f'attachment; filename="{original_filename}"'} + ) + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="Arquivo não encontrado no S3") + raise HTTPException(status_code=500, detail=f"Erro ao acessar o arquivo: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + async def get_file_inline(self, uuid: UUID, inquilino: UUID) -> StreamingResponse: + """ + Retorna um arquivo específico para exibição inline (sem download automático). + """ + try: + # Buscar o registro do arquivo usando a função intermediária + arquivo = await self.get_file_record_by_uuid(uuid) + + # Obter o nome armazenado do arquivo + file_name = arquivo.arquivos_nome_armazenado + + # Gerar o caminho completo no S3 + s3_path = get_s3_path(inquilino, file_name) + + # Obter o objeto do S3 + response = s3_client.get_object(Bucket=config.S3_BUCKET_NAME, Key=s3_path) + + # Retornar o arquivo como um fluxo para exibição inline + return StreamingResponse( + response["Body"], + media_type=response["ContentType"], # Tipo do conteúdo, ex.: image/jpeg + ) + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="Arquivo não encontrado no S3") + raise HTTPException(status_code=500, detail=f"Erro ao acessar o arquivo: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + async def delete_file_from_s3(self, uuid: UUID, inquilino: UUID): + """ + Remove um arquivo do S3 e o registro correspondente no banco de dados, usando o UUID como identificador. + """ + try: + # Buscar o registro do arquivo usando o UUID + arquivo = await self.get_file_record_by_uuid(uuid) + + # Obter o nome armazenado do arquivo + file_name = arquivo.arquivos_nome_armazenado + + # Gerar o caminho completo no S3 + s3_path = get_s3_path(inquilino, file_name) + + # Deletar o arquivo do S3 + s3_client.delete_object(Bucket=config.S3_BUCKET_NAME, Key=s3_path) + + # Remover o registro do banco de dados + result = await self.remove_by_id(uuid, coluna="uuid") + + return {"message": "Arquivo deletado com sucesso", "details": result} + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="Arquivo não encontrado no S3") + raise HTTPException(status_code=500, detail=f"Erro ao deletar arquivo: {str(e)}") + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") diff --git a/app/s3/__init__.py b/app/s3/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/s3/router_s3.py b/app/s3/router_s3.py new file mode 100644 index 0000000..485331e --- /dev/null +++ b/app/s3/router_s3.py @@ -0,0 +1,170 @@ +# Importações de bibliotecas padrão +from typing import Annotated + +# Importações de bibliotecas de terceiros +from fastapi import APIRouter, Depends, UploadFile, File, HTTPException +from starlette import status + +# Importações do seu próprio projeto +from app.routers.dependencies import get_repository +from app.s3.RepositoryS3 import RepositoryS3 +from app.rbac.permissions import verify_permissions +from app.database.models import RbacUser, S3Arquivo +from app.s3 import schema_s3 + +db_model = S3Arquivo +rota_prefix = "/api/s3/arquivos" +rota_tags = "S3 Arquivos" + +repository_base = Annotated[ + RepositoryS3[db_model], + Depends(get_repository(db_model, RepositoryS3)), +] + +router = APIRouter( + prefix=rota_prefix, + tags=[rota_tags], + responses={404: {"description": "Not found"}}, +) + + +@router.post("/upload", status_code=status.HTTP_200_OK, response_model=schema_s3.ArquivoRetorno) +async def upload_to_s3( + repository: repository_base, + file: UploadFile = File(...), + # repository: S3Repository[db_model] = Depends(get_repository_simple_table(S3Arquivo)), + user: RbacUser = Depends(verify_permissions([1, 2, 33, 401]))): + """ + Faz upload de uma imagem para o bucket restrito e registra no banco de dados. + """ + try: + # Chamar a função do repositório para realizar o upload e salvar no banco + resultado = await repository.upload_to_s3( + conteudo=file.file, + nome_original=file.filename, + tipo_conteudo=file.content_type, + inquilino=user.fk_inquilino_uuid + ) + return resultado + except HTTPException as e: + raise e # Relevanta a exceção HTTP já tratada no repositório + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + +@router.post("/url/") +async def get_presigned_url( + request: schema_s3.FileNameRequest, + repository: repository_base, + user: RbacUser = Depends(verify_permissions([1, 2, 33, 401])), # Permissões necessárias +): + """ + Gera uma URL pré-assinada para download do arquivo, com o nome original configurado. + """ + try: + + # Chamar a função do repositório para gerar a URL pré-assinada + presigned_url = await repository.get_presigned_url( + uuid=request.uuid, + inquilino=user.fk_inquilino_uuid + ) + return {"url": presigned_url} + + except HTTPException as e: + raise e # Relança exceções HTTP já tratadas no repositório + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + +@router.post("/url/simple/") +async def generate_presigned_url( + request: schema_s3.FileNameRequest, + repository: repository_base, + user: RbacUser = Depends(verify_permissions([1, 2, 33, 401])), # Permissões necessárias +): + """ + Gera uma URL pré-assinada para acessar o arquivo no MinIO (sem download automático). + """ + try: + # Chamar a função do repositório para gerar a URL pré-assinada + presigned_url = await repository.generate_presigned_url( + uuid=request.uuid, + inquilino=user.fk_inquilino_uuid # Passa diretamente o UUID do inquilino + ) + return {"url": presigned_url} + + except HTTPException as e: + raise e # Relança exceções HTTP já tratadas no repositório + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + +@router.post("/") +async def get_file( + request: schema_s3.FileNameRequest, + repository: repository_base, + user: RbacUser = Depends(verify_permissions([1, 2, 33, 401])), # Permissões necessárias +): + """ + Retorna uma imagem específica para download. + O usuário precisa estar autenticado para acessar. + """ + try: + # Chamar a função do repositório para obter a imagem como streaming + response = await repository.get_file( + uuid=request.uuid, + inquilino=user.fk_inquilino_uuid # Passa diretamente o UUID do inquilino + ) + return response + + except HTTPException as e: + raise e # Relança exceções HTTP já tratadas no repositório + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + +@router.post("/inline/") +async def get_file_inline( + request: schema_s3.FileNameRequest, + repository: repository_base, + user: RbacUser = Depends(verify_permissions([1, 2, 33, 401])), # Permissões necessárias +): + """ + Retorna uma imagem ou arquivo específico para exibição inline (sem download automático). + O usuário precisa estar autenticado para acessar. + """ + try: + # Chamar a função do repositório para obter o streaming da imagem + response = await repository.get_file_inline( + uuid=request.uuid, + inquilino=user.fk_inquilino_uuid # Passa diretamente o UUID do inquilino + ) + return response + + except HTTPException as e: + raise e # Relança exceções HTTP já tratadas no repositório + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") + + +@router.delete("/") +async def delete_file( + request: schema_s3.FileNameRequest, + repository: repository_base, + user: RbacUser = Depends(verify_permissions([1, 2, 33, 401])), # Permissões necessárias +): + """ + Exclui um arquivo do S3 e remove seu registro do banco de dados. + """ + try: + # Chamar a função do repositório para excluir o arquivo + result = await repository.delete_file_from_s3( + uuid=request.uuid, + inquilino=user.fk_inquilino_uuid # Passa diretamente o UUID do inquilino + ) + return result + + except HTTPException as e: + raise e # Relança exceções HTTP já tratadas no repositório + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erro inesperado: {str(e)}") diff --git a/app/s3/router_s3_sem_repository.py b/app/s3/router_s3_sem_repository.py new file mode 100644 index 0000000..f58b6aa --- /dev/null +++ b/app/s3/router_s3_sem_repository.py @@ -0,0 +1,122 @@ +import boto3 +import app.config as config +import uuid +from fastapi import APIRouter, UploadFile, HTTPException +from starlette import status +from starlette.responses import StreamingResponse +from botocore.exceptions import ClientError +from app.s3 import schema_s3 + +s3_client = boto3.client( + "s3", + aws_access_key_id=config.S3_ACCESS_KEY_ID, + aws_secret_access_key=config.S3_SECRET_ACCESS_KEY, + endpoint_url=config.S3_ENDPOINT_URL, + region_name=config.S3_REGION_NAME, +) + +router = APIRouter( + prefix="/api/s3", + tags=["S3 Manipulação Arquivos"], + responses={404: {"description": "Not found"}}, +) + + +@router.post("/upload", status_code=status.HTTP_200_OK) +async def upload_to_s3(file: UploadFile): + """ + Faz upload de uma imagem para o bucket restrito. + """ + try: + unique_filename = f"{uuid.uuid4()}-{file.filename}" + s3_client.upload_fileobj( + file.file, + config.S3_BUCKET_NAME, + unique_filename, + ExtraArgs={ + "ContentType": file.content_type, + "Metadata": {"original_filename": file.filename} + }, + ) + return {"message": "Arquivo salvo com sucesso", "nome do arquivo": unique_filename} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/images/url/") +async def get_presigned_url(request: schema_s3.FileNameRequest): + """ + Gera uma URL pré-assinada para download do arquivo, com o nome original configurado. + """ + try: + file_name = request.file_name # Nome do arquivo com UUID + + # Obter os metadados do arquivo para o nome original + response = s3_client.head_object(Bucket=config.S3_BUCKET_NAME, Key=file_name) + metadata = response.get("Metadata", {}) + original_filename = metadata.get("original_filename", file_name) + + # Gerar uma URL pré-assinada para download + presigned_url = s3_client.generate_presigned_url( + "get_object", + Params={ + "Bucket": config.S3_BUCKET_NAME, + "Key": file_name, + "ResponseContentDisposition": f'attachment; filename="{original_filename}"' + }, + ExpiresIn=3600, # URL válida por 1 hora + ) + + return {"url": presigned_url} + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="Arquivo não encontrado") + raise HTTPException(status_code=500, detail="Erro ao gerar URL") + + +@router.post("/images/url/simple/") +async def generate_presigned_url(request: schema_s3.FileNameRequest): + """ + Gera uma URL pré-assinada para acessar o arquivo no MinIO (sem download automático). + """ + try: + file_name = request.file_name # Nome do arquivo com UUID + + # Gerar uma URL pré-assinada sem configurar o download automático + presigned_url = s3_client.generate_presigned_url( + "get_object", + Params={ + "Bucket": config.S3_BUCKET_NAME, + "Key": file_name + }, + ExpiresIn=3600, # URL válida por 1 hora + ) + + return {"url": presigned_url} + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="File not found") + raise HTTPException(status_code=500, detail="Error generating presigned URL") + + +@router.post("/images/") +async def get_image(request: schema_s3.FileNameRequest): + """ + Retorna uma imagem específica para download. + O usuário precisa estar autenticado para acessar. + """ + try: + file_name = request.file_name + # Baixar o objeto do MinIO como um fluxo + response = s3_client.get_object(Bucket=config.S3_BUCKET_NAME, Key=file_name) + metadata = response.get("Metadata", {}) + original_filename = metadata.get("original_filename", file_name) # Nome original ou o atual + return StreamingResponse( + response["Body"], + media_type=response["ContentType"], # Tipo de arquivo, ex.: image/jpeg + headers={"Content-Disposition": f'attachment; filename="{original_filename}"'} + ) + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + raise HTTPException(status_code=404, detail="File not found") + raise HTTPException(status_code=500, detail="Error accessing file") diff --git a/app/s3/schema_s3.py b/app/s3/schema_s3.py new file mode 100644 index 0000000..41185ab --- /dev/null +++ b/app/s3/schema_s3.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel, ConfigDict +from uuid import UUID + + +# Modelo para a entrada do nome do arquivo +class FileNameRequest(BaseModel): + uuid: UUID + + +class ArquivoCreate(BaseModel): + model_config = ConfigDict(from_attributes=True) + arquivos_nome_original: str + arquivos_nome_armazenado: str + + +class ArquivoRetorno(ArquivoCreate): + uuid: UUID diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..72c9ebe --- /dev/null +++ b/app/schemas/__init__.py @@ -0,0 +1,5 @@ +from . import endereco_schemas +from . import papel_shemas +from . import pessoa_schemas +from . import tipo_endereco_schemas +from . import utils diff --git a/app/schemas/endereco_schemas.py b/app/schemas/endereco_schemas.py new file mode 100644 index 0000000..12cf3d6 --- /dev/null +++ b/app/schemas/endereco_schemas.py @@ -0,0 +1,64 @@ +# Importações de bibliotecas padrão +from typing import Annotated, Optional +from uuid import UUID + +# Importações de bibliotecas de terceiros +from pydantic import BaseModel, Field, ConfigDict +from .utils import UuidMixinSchema, TimestampMixinSchema, UuidsMixinSchema +from app.schemas.tipo_endereco_schemas import Consulta as ConsultaSchema + + +class EnderecoCreate(BaseModel): + model_config = ConfigDict(from_attributes=True) + endereco_pessoa_status: bool + endereco_pessoa_descricao: str = Field(min_length=3, max_length=50) + endereco_pessoa_numero: str = Field(min_length=1, max_length=8) + endereco_pessoa_complemento: str = Field(min_length=3, max_length=50, default="S/N") + endereco_pessoa_cep: str = Field(min_length=8, max_length=8) + fk_tipo_endereco_uuid: UUID + + +class Create(EnderecoCreate): + fk_pessoa_uuid: UUID + + +class Request(TimestampMixinSchema, Create, UuidMixinSchema): + uuid: UUID | None = None + fk_pessoa_uuid: UUID | None = None + relacao_tipo_endereco: ConsultaSchema | None = None + # relacao_tipo_endereco: UUIDSchema + + +class EnderecoRequest(Request): + pass + + +class UpdateSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + uuid: UUID | None = None + endereco_pessoa_status: bool + endereco_pessoa_descricao: Optional[Annotated[str, Field(max_length=50)]] = None + endereco_pessoa_numero: Optional[Annotated[str, Field(max_length=8)]] = None + endereco_pessoa_complemento: Optional[Annotated[str, Field(max_length=50)]] = None + endereco_pessoa_cep: Optional[Annotated[str, Field(max_length=8)]] = None + fk_tipo_endereco_uuid: UUID | None = None + + +class EnderecoBaseUpdate(UpdateSchema): + pass + + +class IdRequest(UuidMixinSchema): + pass + + +class IdsRequest(UuidsMixinSchema): + pass + + +class UpdateRequest(UpdateSchema, IdRequest): + pass + + +class UpdateManyRequest(UpdateSchema, IdRequest): + pass diff --git a/app/schemas/papel_shemas.py b/app/schemas/papel_shemas.py new file mode 100644 index 0000000..08c460a --- /dev/null +++ b/app/schemas/papel_shemas.py @@ -0,0 +1,46 @@ +# Importações de bibliotecas padrão +from datetime import datetime +from typing import Optional, List +from uuid import UUID as UuidType + +# Importações de bibliotecas de terceiros +from pydantic import BaseModel, ConfigDict + +# Importações do seu próprio projeto +from .utils import UuidMixinSchema, TimestampMixinSchema, UuidsMixinSchema + + +class PermissaoModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + id: int + + +class Create(BaseModel): + model_config = ConfigDict(from_attributes=True) + + nome: str + permissoes: List[PermissaoModel] + + +class Request(Create): + permissoes: List[PermissaoModel] + + +class UpdateSchema(BaseModel): + nome: Optional[str] = None + + +class IdRequest(UuidMixinSchema): + pass + + +class IdsRequest(UuidsMixinSchema): + pass + + +class UpdateRequest(UpdateSchema, IdRequest): + pass + + +class UpdateManyRequest(UpdateSchema, IdRequest): + pass diff --git a/app/schemas/pessoa_schemas.py b/app/schemas/pessoa_schemas.py new file mode 100644 index 0000000..af5b458 --- /dev/null +++ b/app/schemas/pessoa_schemas.py @@ -0,0 +1,118 @@ +# Importações de bibliotecas padrão +from typing import Union, List, Literal, Annotated, Optional +from uuid import UUID + +# Importações de bibliotecas de terceiros +from pydantic import BaseModel, Field, ConfigDict, EmailStr +from app.schemas.utils import UUIDSchema +from app.schemas.endereco_schemas import EnderecoCreate, EnderecoRequest, EnderecoBaseUpdate + + +class PessoaPayload(BaseModel): + model_config = ConfigDict(from_attributes=True) + pessoa_status: bool = Field(default=True) + pessoa_telefone: str = Field(min_length=8, max_length=20) + pessoa_celular: str = Field(min_length=9, max_length=20) + pessoa_email: EmailStr = Field(min_length=8, max_length=50) + pessoa_local_evento: bool = Field(default=False) + + +class FisicaCreate(PessoaPayload): + pessoa_tipo: Literal["1"] + fisica_cpf: str = Field(min_length=11, max_length=11) # Em produção usar validadores prontos + fisica_rg: str = Field(min_length=5, max_length=20) + fisica_genero: Literal["M", "F", "HT", "MT", "T", "NB", "O"] + fisica_nome: str = Field(min_length=3, max_length=100) + enderecos: List[EnderecoCreate] | None = None + rc: List[UUIDSchema] | None = None + + +class JuridicaCreate(PessoaPayload): + pessoa_tipo: Literal["0"] + juridica_cnpj: str = Field(min_length=14, max_length=14) # Em produção usar validadores prontos + juridica_email_fiscal: EmailStr = Field(min_length=8, max_length=50) + juridica_insc_est: Optional[Annotated[str, Field(min_length=5, max_length=50)]] = None + juridica_ins_mun: Optional[Annotated[str, Field(min_length=5, max_length=50)]] = None + juridica_razao_social: str = Field(min_length=5, max_length=200) + juridica_representante: str = Field(min_length=3, max_length=100) + enderecos: List[EnderecoCreate] | None = None + rc: List[UUIDSchema] | None = None + + +class PessoaBaseResponse(BaseModel): + model_config = ConfigDict(from_attributes=True) + uuid: UUID + pessoa_status: bool + pessoa_telefone: str | None = None + pessoa_celular: str | None = None + pessoa_email: str | None = None + pessoa_local_evento: bool + + +class RequestPJ(PessoaBaseResponse): + pessoa_tipo: Literal["0"] + juridica_cnpj: str | None = None + juridica_email_fiscal: str | None = None + juridica_insc_est: str | None = None + juridica_ins_mun: str | None = None + juridica_razao_social: str | None = None + juridica_representante: str | None = None + enderecos: List[EnderecoRequest] = [] + + + +class RequestPF(PessoaBaseResponse): + pessoa_tipo: Literal["1"] + fisica_cpf: str | None = None + fisica_rg: str | None = None + fisica_genero: str | None = None + fisica_nome: str + enderecos: List[EnderecoRequest] = [] + + + +RequestPessoa = Annotated[ + Union[RequestPF, RequestPJ], + Field(discriminator='pessoa_tipo') +] + + +class IdRequestPessoa(BaseModel): + uuid: UUID = None + + +class IdsRequestPessoas(BaseModel): + uuids: List[UUID] = None + + +class PessoaBaseUpdate(IdRequestPessoa): + pessoa_status: bool + pessoa_telefone: Optional[Annotated[str, Field(min_length=8, max_length=20)]] = None + pessoa_celular: Optional[Annotated[str, Field(min_length=8, max_length=20)]] = None + pessoa_email: Optional[Annotated[EmailStr, Field(min_length=8, max_length=50)]] = None + pessoa_local_evento: bool + pessoa_tipo: Optional[Literal["0", "1"]] = None + + +class PessoaFisicaUpdate(PessoaBaseUpdate): + fisica_cpf: str = Field(min_length=11, max_length=11) + fisica_rg: Optional[Annotated[str, Field(min_length=5, max_length=20)]] = None + fisica_genero: Optional[Literal["M", "F", "HT", "MT", "T", "NB", "O"]] = None + fisica_nome: Optional[Annotated[str, Field(min_length=3, max_length=100)]] = None + enderecos: List[EnderecoBaseUpdate] | None = None + rc: List[UUIDSchema] | None = None + + +class PessoaJuridicaUpdate(PessoaBaseUpdate): + juridica_cnpj: str = Field(min_length=14, max_length=14) + juridica_email_fiscal: Optional[Annotated[EmailStr, Field(min_length=8, max_length=50)]] = None + juridica_insc_est: Optional[Annotated[str, Field(min_length=5, max_length=50)]] = None + juridica_ins_mun: Optional[Annotated[str, Field(min_length=5, max_length=50)]] = None + juridica_razao_social: Optional[Annotated[str, Field(min_length=5, max_length=200)]] = None + juridica_representante: Optional[Annotated[str, Field(min_length=3, max_length=100)]] = None + enderecos: List[EnderecoBaseUpdate] | None = None + rc: List[UUIDSchema] | None = None + + +class ValidaGetAll(BaseModel): + validador: Literal["0", "1"] | None = None diff --git a/app/schemas/tipo_endereco_schemas.py b/app/schemas/tipo_endereco_schemas.py new file mode 100644 index 0000000..75b4ebb --- /dev/null +++ b/app/schemas/tipo_endereco_schemas.py @@ -0,0 +1,44 @@ +# Importações de bibliotecas padrão +from typing import Optional + +# Importações de bibliotecas de terceiros +from pydantic import BaseModel, ConfigDict, Field + +# Importações do seu próprio projeto +from .utils import UuidMixinSchema, TimestampMixinSchema, UuidsMixinSchema +from uuid import UUID + + +class Create(BaseModel): + model_config = ConfigDict(from_attributes=True) + tipo_endereco_descricao: str = Field(min_length=3, max_length=30) + + +class Request(TimestampMixinSchema, Create, UuidMixinSchema): + pass + + +class Consulta(BaseModel): + model_config = ConfigDict(from_attributes=True) + uuid: UUID + tipo_endereco_descricao: str | None = None + + +class UpdateSchema(BaseModel): + tipo_endereco_descricao: Optional[str] = Field(min_length=3, max_length=30, default=None) + + +class IdRequest(UuidMixinSchema): + pass + + +class IdsRequest(UuidsMixinSchema): + pass + + +class UpdateRequest(UpdateSchema, IdRequest): + pass + + +class UpdateManyRequest(UpdateSchema, IdRequest): + pass diff --git a/app/schemas/usuario_schemas.py b/app/schemas/usuario_schemas.py new file mode 100644 index 0000000..aaf3302 --- /dev/null +++ b/app/schemas/usuario_schemas.py @@ -0,0 +1,68 @@ +# Importações de bibliotecas padrão +import re +from typing import List +from uuid import UUID + +# Importações de bibliotecas de terceiros +from pydantic import BaseModel, field_validator, EmailStr, ConfigDict + + +class Papeis(BaseModel): + model_config = ConfigDict(from_attributes=True) + uuid: UUID + + +class PapeisResponse(Papeis): + nome: str + + +class UsuarioBase(BaseModel): + model_config = ConfigDict(from_attributes=True) + username: str + full_name: str + email: EmailStr + papeis: List[Papeis] + + # @field_validator('username') + # def validate_username(cls, value): + # if not re.match('^([a-z]|[0-9]|@)+$', value): + # raise ValueError('Username format invalid') + # return value + + +class UsuarioCreate(UsuarioBase): + password: str + + +class UsuarioResponse(BaseModel): + model_config = ConfigDict(from_attributes=True) + username: str + full_name: str + email: EmailStr + papeis: List[PapeisResponse] + + # @field_validator('username') + # def validate_username(cls, value): + # if not re.match('^([a-z]|[0-9]|@)+$', value): + # raise ValueError('Username format invalid') + # return value + + +class UsuarioRequest(UsuarioBase): + pass + + +class UsuarioLogin(BaseModel): + username: str + password: str + + # @field_validator('username') + # def validate_username(cls, value): + # if not re.match('^([a-z]|[0-9]|@)+$', value): + # raise ValueError('Username format invalid') + # return value + + +class Token(BaseModel): + access_token: str + token_type: str diff --git a/app/schemas/utils.py b/app/schemas/utils.py new file mode 100644 index 0000000..499806e --- /dev/null +++ b/app/schemas/utils.py @@ -0,0 +1,29 @@ +# Importações de bibliotecas padrão +from datetime import datetime +from typing import List, Optional +from uuid import UUID as UuidType + + +# Importações de bibliotecas de terceiros +from pydantic import BaseModel, ConfigDict + + +class UuidMixinSchema(BaseModel): + uuid: UuidType = None + + +class UuidsMixinSchema(BaseModel): + uuids: List[UuidType] = None + + +class TimestampMixinSchema(BaseModel): + created_at: datetime | None = None + updated_at: datetime | None = None + + +class UUIDSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + uuid: UuidType + +class RegistroAtivo(UuidMixinSchema): + ativo: Optional[bool] = None \ No newline at end of file diff --git a/app/schemas/validacoes.py b/app/schemas/validacoes.py new file mode 100644 index 0000000..b573be5 --- /dev/null +++ b/app/schemas/validacoes.py @@ -0,0 +1,139 @@ +from datetime import date +from decimal import Decimal + +from fastapi import HTTPException + + +class ValidacoesUtil: + @classmethod + def validar_valor_monetario(cls, value: float, precision: int = 10, scale: int = 2): + decimal_value = Decimal(value).quantize(Decimal(f'1.{"0" * scale}')) + if len(decimal_value.as_tuple().digits) > precision: + raise ValueError(f"O valor excede {precision} dígitos, incluindo, {scale} casas decimais.") + return float(decimal_value) + + # Você pode adicionar outras validações aqui + @classmethod + def validar_se_positivo(cls, value: float): + if value <= 0: + raise HTTPException(status_code=400, detail="O valor deve ser positivo.") + return value + + @classmethod + def data_no_passado(cls, data: date): + """ + Valida se a data é no passado (antes de hoje). + """ + if data >= date.today(): + raise HTTPException(status_code=400, detail="A data deve estar no passado.") + return data + + @classmethod + def data_no_passado_ou_hoje(cls, data: date): + """ + Valida se a data é no passado ou hoje (igual ou antes de hoje). + """ + if data > date.today(): + raise HTTPException(status_code=400, detail="A data não pode ser no futuro.") + return data + + @classmethod + def data_no_futuro(cls, data: date): + """ + Valida se a data é no futuro (depois de hoje). + """ + if data <= date.today(): + raise HTTPException(status_code=400, detail="A data deve ser no futuro.") + return data + + @classmethod + def data_no_futuro_ou_hoje(cls, data: date): + """ + Valida se a data é hoje ou no futuro (igual ou depois de hoje). + """ + if data < date.today(): + raise HTTPException(status_code=400, detail="A data não pode estar no passado.") + return data + + @classmethod + def data_final_maior(cls, data_final: date, data_inicial: date): + """ + Valida se a data final é posterior à data inicial. + """ + if data_final <= data_inicial: + raise HTTPException(status_code=400, detail="A data final deve ser posterior à data inicial.") + return data_final + + @classmethod + def data_final_maior_ou_igual(cls, data_final: date, data_inicial: date): + """ + Valida se a data final é igual ou posterior à data inicial. + """ + if data_final < data_inicial: + raise HTTPException(status_code=400, detail="A data final não pode ser anterior à data inicial.") + return data_final + + @classmethod + def data_final_menor(cls, data_final: date, data_inicial: date): + """ + Valida se a data final é anterior à data inicial. + """ + if data_final >= data_inicial: + raise HTTPException(status_code=400, detail="A data final deve ser anterior à data inicial.") + return data_final + + @classmethod + def data_final_menor_ou_igual(cls, data_final: date, data_inicial: date): + """ + Valida se a data final é igual ou anterior à data inicial. + """ + if data_final > data_inicial: + raise HTTPException(status_code=400, detail="A data final não pode ser posterior à data inicial.") + return data_final + + @classmethod + def validate_intervalo_minimo(cls, data_inicial: date, data_final: date, dias_minimos: int): + """ + Valida se há um intervalo mínimo de dias entre as duas datas. + """ + if (data_final - data_inicial).days < dias_minimos: + raise HTTPException(status_code=400, + detail=f"O intervalo entre as datas deve ser de pelo menos {dias_minimos} dias.") + return data_final + + @classmethod + def validate_intervalo_maximo(cls, data_inicial: date, data_final: date, dias_maximos: int): + """ + Valida se o intervalo entre duas datas não excede um número máximo de dias. + """ + if (data_final - data_inicial).days > dias_maximos: + raise HTTPException(status_code=400, + detail=f"O intervalo entre as datas não pode ser maior que {dias_maximos} dias.") + return data_final + + @classmethod + def validate_data_intervalo_anos(cls, data: date, ano_inicial: int, ano_final: int): + """ + Valida se a data está dentro de um intervalo de anos permitido. + """ + if not (ano_inicial <= data.year <= ano_final): + raise HTTPException(status_code=400, detail=f"A data deve estar entre os anos {ano_inicial} e {ano_final}.") + return data + + @classmethod + def validate_data_expirada(cls, data_expiracao: date): + """ + Verifica se a data de expiração já passou. + """ + if data_expiracao < date.today(): + raise HTTPException(status_code=400, detail="A data de expiração já passou.") + return data_expiracao + + @classmethod + def validate_dia_util(cls, data: date): + """ + Valida se a data cai em um dia útil (segunda a sexta-feira). + """ + if data.weekday() > 4: # Dias 5 e 6 são sábado e domingo + raise HTTPException(status_code=400, detail="A data deve ser em um dia útil.") + return data diff --git a/app/scripts/__init__.py b/app/scripts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/scripts/bkp_create_initial_users.py b/app/scripts/bkp_create_initial_users.py new file mode 100644 index 0000000..b43baae --- /dev/null +++ b/app/scripts/bkp_create_initial_users.py @@ -0,0 +1,41 @@ +import contextlib +from app.rbac.auth import get_user_db, get_user_manager +from app.rbac.schemas import UserCreate +from app.database.session import get_db +from sqlalchemy import select +from fastapi_users.exceptions import UserAlreadyExists +from app.database.models import RbacUser + +get_async_session_context = contextlib.asynccontextmanager(get_db) +get_user_db_context = contextlib.asynccontextmanager(get_user_db) +get_user_manager_context = contextlib.asynccontextmanager(get_user_manager) + + +async def create_user(email: str, password: str, full_name: str, username: str, is_superuser: bool = False): + async with get_async_session_context() as session: + async with get_user_db_context(session) as user_db: + async with get_user_manager_context(user_db) as user_manager: + try: + user = await user_manager.create( + UserCreate( + email=email, + password=password, + username=username, + full_name=full_name, + is_superuser=is_superuser, + is_active=True + ) + ) + + return user.id + except UserAlreadyExists: + print(f"Usuário {email} já existe") + result_user = await session.execute(select(RbacUser).filter_by(email=email)) + existing_user = result_user.scalars().first() + return existing_user.id + + +async def create_initial_users(): + user_id = await create_user(email="admin@sonora.com", password="admin", is_superuser=True, + username="Admin", full_name="Admin") + return user_id diff --git a/app/scripts/create_initial_user.py b/app/scripts/create_initial_user.py new file mode 100644 index 0000000..8aae438 --- /dev/null +++ b/app/scripts/create_initial_user.py @@ -0,0 +1,37 @@ +import contextlib +from fastapi_users.exceptions import UserAlreadyExists +from sqlalchemy import select + +from app.database.session import get_db +from app.rbac.auth import get_user_db, get_user_manager +from app.rbac.schemas import UserCreate +from app.database.models import RbacUser + +get_async_session_context = contextlib.asynccontextmanager(get_db) +get_user_db_context = contextlib.asynccontextmanager(get_user_db) +get_user_manager_context = contextlib.asynccontextmanager(get_user_manager) + + +async def create_user(email: str, password: str, full_name: str, username: str, is_superuser: bool = False): + try: + async with get_async_session_context() as session: + async with get_user_db_context(session) as user_db: + async with get_user_manager_context(user_db) as user_manager: + user = await user_manager.create( + UserCreate( + email=email, + password=password, + username=username, + full_name=full_name, + is_superuser=is_superuser, + is_active=True + ) + ) + print(f"User created: {user}") + return user.id + except UserAlreadyExists: + print(f"User {email} already exists") + async with session.begin(): + result_user = await session.execute(select(RbacUser).filter_by(email=email)) + existing_user = result_user.scalars().first() + return existing_user.id diff --git a/app/scripts/create_initial_users.py b/app/scripts/create_initial_users.py new file mode 100644 index 0000000..1d69683 --- /dev/null +++ b/app/scripts/create_initial_users.py @@ -0,0 +1,76 @@ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import schema as sa_schema +from app.database.models import Inquilino # Modelos `Inquilino` +from app.database.session import sessionmanager +from tenant import get_tenant_specific_metadata +from app.rbac.auth import get_user_db, get_user_manager +from app.rbac.schemas import UserCreate +from fastapi_users.exceptions import UserAlreadyExists +from sqlalchemy import select +from app.database.models import RbacUser +import contextlib + + +# Funções auxiliares para criação de usuários +get_async_session_context = contextlib.asynccontextmanager(sessionmanager.session) +get_user_db_context = contextlib.asynccontextmanager(get_user_db) +get_user_manager_context = contextlib.asynccontextmanager(get_user_manager) + + +async def create_user(email: str, password: str, is_superuser: bool = False, pessoa_uuid: str = None): + """ + Cria um usuário no sistema utilizando o gerenciador de usuários do FastAPI Users. + """ + async with get_async_session_context() as session: + async with get_user_db_context(session) as user_db: + async with get_user_manager_context(user_db) as user_manager: + try: + user = await user_manager.create( + UserCreate( + email=email, + password=password, + is_superuser=is_superuser, + is_active=True, + fk_pessoa_uuid=pessoa_uuid, + ) + ) + return user.id + except UserAlreadyExists: + print(f"Usuário {email} já existe") + result_user = await session.execute(select(RbacUser).filter_by(email=email)) + existing_user = result_user.scalars().first() + return existing_user.id + + +async def tenant_create(nome: str, host: str, email: str, password: str) -> None: + """ + Cria um novo tenant (inquilino) no sistema, configura o schema específico + e registra um usuário inicial relacionado ao inquilino. + """ + async with sessionmanager.session() as db: # Obtendo uma sessão do gerenciador + # Criar o inquilino na tabela `inquilinos` + tenant = Inquilino(nome=nome) + db.add(tenant) + await db.commit() # Commit necessário para o UUID ser gerado pelo banco + + # Obter o UUID gerado + await db.refresh(tenant) + schema_name = str(tenant.uuid) + + # Criar o schema do inquilino + await db.execute(sa_schema.CreateSchema(schema_name)) + + # Criar as tabelas específicas do tenant no novo schema + tenant_metadata = get_tenant_specific_metadata() + await db.run_sync(tenant_metadata.create_all) + + # Criar o usuário inicial para o tenant + user_id = await create_user( + email=email, + password=password, + is_superuser=True, + pessoa_uuid=None, # Ajustar caso seja necessário vincular com uma pessoa + ) + print(f"Usuário inicial {email} criado com ID {user_id}") + + print(f"Tenant '{nome}' criado com sucesso no schema '{schema_name}'!") diff --git a/app/scripts/initialize_permissions.py b/app/scripts/initialize_permissions.py new file mode 100644 index 0000000..bae1423 --- /dev/null +++ b/app/scripts/initialize_permissions.py @@ -0,0 +1,214 @@ +from app.database.models import RbacPermissao, RbacPapel +from app.database.session import sessionmanager +from sqlalchemy import select + + +async def add_or_update_entity(session, model, filters, data): + result = await session.execute(select(model).filter_by(**filters)) + entity = result.scalars().first() + if not entity: + entity = model(**data) + session.add(entity) + else: + for key, value in data.items(): + setattr(entity, key, value) + return entity + + +async def add_permissions_to_role(session, papel_data, permissoes_ids): + for permissao_id in permissoes_ids: + result_permissao = await session.execute(select(RbacPermissao).filter_by(id=permissao_id)) + permissao_data = result_permissao.scalars().first() + if permissao_data and permissao_data not in papel_data.permissoes: + papel_data.permissoes.append(permissao_data) + + for permissao in papel_data.permissoes: + if permissao.id not in permissoes_ids: + papel_data.permissoes.remove(permissao) + + +async def remove_unused_entities(session, model, valid_ids, id_field="id"): + result_all = await session.execute(select(model)) + all_entities = result_all.scalars().all() + for entity in all_entities: + entity_id = str(getattr(entity, id_field)) + if entity_id not in valid_ids: + await session.delete(entity) + await session.flush() + + +async def process_permissions(session, endpoint_permissao1): + ignored_permissions = [] + for permissao in endpoint_permissao1: + if permissao["id"] is None or permissao["nome"] is None: + ignored_permissions.append(permissao) + continue + + await add_or_update_entity(session, RbacPermissao, {"id": permissao["id"]}, permissao) + permissoes_ids = [str(permissao["id"]) for permissao in endpoint_permissao1] + await remove_unused_entities(session, RbacPermissao, permissoes_ids, id_field="id") + return ignored_permissions + + +async def process_roles(session, endpoint_papel): + ignored_roles = [] + for papel in endpoint_papel: + if papel.get("nome") is None: + ignored_roles.append(papel) + continue + + papel_data = await add_or_update_entity(session, RbacPapel, {"nome": papel["nome"]}, {"nome": papel["nome"]}) + await session.flush() + await session.refresh(papel_data) + + permissoes_ids = papel["permissoes"] if papel["permissoes"] is not None else [] + await add_permissions_to_role(session, papel_data, permissoes_ids) + + papeis_nomes = [papel["nome"] for papel in endpoint_papel] + await remove_unused_entities(session, RbacPapel, papeis_nomes, id_field="nome") + return ignored_roles + + +async def initialize_permissions(): + async with sessionmanager.session() as session: + # Definindo as permissões + endpoint_permissao = [ + {"id": 1, "nome": "Permissão Total"}, + {"id": 2, "nome": "Permissão Setor Comercial"}, + {"id": 3, "nome": "Permissão RBAC"}, + {"id": 4, "nome": "Permissão Setor Estoque"}, + {"id": 5, "nome": "Permissão Setor Financeiro"}, + {"id": 30, "nome": "Permissão Relação Comercial"}, + {"id": 31, "nome": "Permissão Tipo Endereço"}, + {"id": 32, "nome": "Permissão Endereço"}, + {"id": 33, "nome": "Permissão Pessoa"}, + {"id": 34, "nome": "Permissão Usuários"}, + {"id": 35, "nome": "Permissão Papel"}, + {"id": 36, "nome": "Permissão Setor"}, + {"id": 37, "nome": "Permissão Tipo Equipamento"}, + {"id": 38, "nome": "Permissão Equipamento"}, + {"id": 39, "nome": "Permissão Itens Equipamento"}, + {"id": 40, "nome": "Permissão Manutenção Equipamento"}, + {"id": 41, "nome": "Permissão Papeis"}, + {"id": 101, "nome": "Relação Comercial Criar"}, + {"id": 102, "nome": "Relação Comercial Criar Muitos"}, + {"id": 103, "nome": "Relação Comercial Buscar Todos"}, + {"id": 104, "nome": "Relação Comercial Buscar Vários"}, + {"id": 105, "nome": "Relação Comercial Buscar"}, + {"id": 106, "nome": "Relação Comercial Atualizar"}, + {"id": 107, "nome": "Relação Comercial Atualizar Vários"}, + {"id": 108, "nome": "Relação Comercial Desativar"}, + {"id": 201, "nome": "Tipo Endereço Criar"}, + {"id": 202, "nome": "Tipo Endereço Criar Muitos"}, + {"id": 203, "nome": "Tipo Endereço Buscar Todos"}, + {"id": 204, "nome": "Tipo Endereço Buscar Vários"}, + {"id": 205, "nome": "Tipo Endereço Buscar"}, + {"id": 206, "nome": "Tipo Endereço Atualizar"}, + {"id": 207, "nome": "Tipo Endereço Atualizar Vários"}, + {"id": 208, "nome": "Tipo Endereço Desativar"}, + {"id": 301, "nome": "Endereço Criar"}, + {"id": 302, "nome": "Endereço Criar Muitos"}, + {"id": 303, "nome": "Endereço Buscar Todos"}, + {"id": 304, "nome": "Endereço Buscar Vários"}, + {"id": 305, "nome": "Endereço Buscar"}, + {"id": 306, "nome": "Endereço Atualizar"}, + {"id": 307, "nome": "Endereço Atualizar Vários"}, + {"id": 308, "nome": "Endereço Desativar"}, + {"id": 401, "nome": "Pessoa Criar"}, + {"id": 402, "nome": "Pessoa Criar Muitos"}, + {"id": 403, "nome": "Pessoa Buscar Todos"}, + {"id": 404, "nome": "Pessoa Buscar Vários"}, + {"id": 405, "nome": "Pessoa Buscar"}, + {"id": 406, "nome": "Pessoa Atualizar"}, + {"id": 408, "nome": "Pessoa Desativar"}, + {"id": 501, "nome": "Usuário Criar"}, + {"id": 502, "nome": "Usuário Criar Muitos"}, + {"id": 503, "nome": "Usuário Buscar Todos"}, + {"id": 504, "nome": "Usuário Buscar Vários"}, + {"id": 505, "nome": "Usuário Buscar"}, + {"id": 506, "nome": "Usuário Atualizar"}, + {"id": 507, "nome": "Usuário Atualizar Vários"}, + {"id": 508, "nome": "Usuário Desativar"}, + {"id": 601, "nome": "Papel Criar"}, + {"id": 602, "nome": "Papel Criar Muitos"}, + {"id": 603, "nome": "Papel Buscar Todos"}, + {"id": 604, "nome": "Papel Buscar Vários"}, + {"id": 605, "nome": "Papel Buscar"}, + {"id": 606, "nome": "Papel Atualizar"}, + {"id": 607, "nome": "Papel Atualizar Vários"}, + {"id": 608, "nome": "Papel Desativar"}, + {"id": 701, "nome": "Setor Criar"}, + {"id": 702, "nome": "Setpr Criar Muitos"}, + {"id": 703, "nome": "Setor Buscar Todos"}, + {"id": 704, "nome": "Setor Buscar Vários"}, + {"id": 705, "nome": "Setor Buscar"}, + {"id": 706, "nome": "Setor Atualizar"}, + {"id": 707, "nome": "Setor Atualizar Vários"}, + {"id": 708, "nome": "Setor Desativar"}, + {"id": 801, "nome": "Tipo Equipamento Criar"}, + {"id": 802, "nome": "Tipo Equipamento Criar Muitos"}, + {"id": 803, "nome": "Tipo Equipamento Buscar Todos"}, + {"id": 804, "nome": "Tipo Equipamento Buscar Vários"}, + {"id": 805, "nome": "Tipo Equipamento Buscar"}, + {"id": 806, "nome": "Tipo Equipamento Atualizar"}, + {"id": 807, "nome": "Tipo Equipamento Atualizar Vários"}, + {"id": 808, "nome": "Tipo Equipamento Desativar"}, + {"id": 901, "nome": "Equipamento Criar"}, + {"id": 902, "nome": "Equipamento Criar Muitos"}, + {"id": 903, "nome": "Equipamento Buscar Todos"}, + {"id": 904, "nome": "Equipamento Buscar Vários"}, + {"id": 905, "nome": "Equipamento Buscar"}, + {"id": 906, "nome": "Equipamento Atualizar"}, + {"id": 907, "nome": "Equipamento Atualizar Vários"}, + {"id": 908, "nome": "Equipamento Desativar"}, + {"id": 1001, "nome": "Itens Equipamento Criar"}, + {"id": 1002, "nome": "Itens Equipamento Criar Muitos"}, + {"id": 1003, "nome": "Itens Equipamento Buscar Todos"}, + {"id": 1004, "nome": "Itens Equipamento Buscar Vários"}, + {"id": 1005, "nome": "Itens Equipamento Buscar"}, + {"id": 1006, "nome": "Itens Equipamento Atualizar"}, + {"id": 1007, "nome": "Itens Equipamento Atualizar Vários"}, + {"id": 1008, "nome": "Itens Equipamento Desativar"}, + {"id": 1101, "nome": "Manutenção Equipamento Criar"}, + {"id": 1102, "nome": "Manutenção Equipamento Criar Muitos"}, + {"id": 1103, "nome": "Manutenção Equipamento Buscar Todos"}, + {"id": 1104, "nome": "Manutenção Equipamento Buscar Vários"}, + {"id": 1105, "nome": "Manutenção Equipamento Buscar"}, + {"id": 1106, "nome": "Manutenção Equipamento Atualizar"}, + {"id": 1107, "nome": "Manutenção Equipamento Atualizar Vários"}, + {"id": 1108, "nome": "Manutenção Equipamento Desativar"}, + {"id": 1201, "nome": "Papeis Criar"}, + {"id": 1202, "nome": "Papeis Criar Muitos"}, + {"id": 1203, "nome": "Papeis Buscar Todos"}, + {"id": 1204, "nome": "Papeis Buscar Vários"}, + {"id": 1205, "nome": "Papeis Buscar"}, + {"id": 1206, "nome": "Papeis Atualizar"}, + {"id": 1207, "nome": "Papeis Atualizar Vários"}, + {"id": 1208, "nome": "Papeis Desativar"}, + + ] + + # Definindo os papéis + endpoint_papel = [ + {"nome": "Super Administrador", "permissoes": [1]}, + # Outros papéis aqui + ] + + # Processamento das permissões e papéis + ignored_permissions = await process_permissions(session, endpoint_permissao) + ignored_roles = await process_roles(session, endpoint_papel) + + await session.commit() + + if not ignored_permissions and not ignored_roles: + print("Permissões e papéis inicializados com sucesso") + else: + if ignored_permissions: + print("Aviso: As seguintes permissões foram ignoradas devido a campos None:") + for permissao in ignored_permissions: + print(permissao) + + if ignored_roles: + print("Aviso: Os seguintes papéis foram ignorados devido a campos None:") + for papel in ignored_roles: + print(papel) diff --git a/app/scripts/initialize_permissions_roles.py b/app/scripts/initialize_permissions_roles.py new file mode 100644 index 0000000..5d96059 --- /dev/null +++ b/app/scripts/initialize_permissions_roles.py @@ -0,0 +1,231 @@ +from app.database.models import RbacPermissao, RbacPapel, RbacUser +from app.database.session import sessionmanager +from sqlalchemy import select + + +async def add_or_update_entity(session, model, filters, data): + result = await session.execute(select(model).filter_by(**filters)) + entity = result.scalars().first() + if not entity: + entity = model(**data) + session.add(entity) + else: + for key, value in data.items(): + setattr(entity, key, value) + return entity + + +async def add_permissions_to_role(session, papel_data, permissoes_ids): + for permissao_id in permissoes_ids: + result_permissao = await session.execute(select(RbacPermissao).filter_by(id=permissao_id)) + permissao_data = result_permissao.scalars().first() + if permissao_data and permissao_data not in papel_data.permissoes: + papel_data.permissoes.append(permissao_data) + + for permissao in papel_data.permissoes: + if permissao.id not in permissoes_ids: + papel_data.permissoes.remove(permissao) + + +async def remove_unused_entities(session, model, valid_ids, id_field="id"): + result_all = await session.execute(select(model)) + all_entities = result_all.scalars().all() + for entity in all_entities: + entity_id = str(getattr(entity, id_field)) + if entity_id not in valid_ids: + await session.delete(entity) + await session.flush() + + +async def process_permissions(session, endpoint_permissao1): + ignored_permissions = [] + for permissao in endpoint_permissao1: + if permissao["id"] is None or permissao["nome"] is None: + ignored_permissions.append(permissao) + continue + + await add_or_update_entity(session, RbacPermissao, {"id": permissao["id"]}, permissao) + permissoes_ids = [str(permissao["id"]) for permissao in endpoint_permissao1] + await remove_unused_entities(session, RbacPermissao, permissoes_ids, id_field="id") + return ignored_permissions + + +async def process_roles(session, endpoint_papel, user_id): + ignored_roles = [] + for papel in endpoint_papel: + if papel.get("nome") is None: + ignored_roles.append(papel) + continue + + papel_data = await add_or_update_entity(session, RbacPapel, {"nome": papel["nome"]}, {"nome": papel["nome"]}) + await session.flush() + await session.refresh(papel_data) + + permissoes_ids = papel["permissoes"] if papel["permissoes"] is not None else [] + await add_permissions_to_role(session, papel_data, permissoes_ids) + + # Relacionando o papel ao usuário + result_user = await session.execute(select(RbacUser).filter_by(id=user_id)) + user = result_user.scalars().first() + if papel_data not in user.papeis: + user.papeis.append(papel_data) + + papeis_nomes = [papel["nome"] for papel in endpoint_papel] + await remove_unused_entities(session, RbacPapel, papeis_nomes, id_field="nome") + return ignored_roles + + +async def initialize_permissions_roles(user_id): + async with sessionmanager.session() as session: + # Definindo as permissões + endpoint_permissao1 = [ + {"id": 1, "nome": "Permissão Total"}, + {"id": 2, "nome": "Permissão Setor Comercial"}, + {"id": 3, "nome": "Permissão RBAC"}, + {"id": 4, "nome": "Permissão Setor Estoque"}, + {"id": 5, "nome": "Permissão Setor Financeiro"}, + {"id": 30, "nome": "Permissão Relação Comercial"}, + {"id": 31, "nome": "Permissão Tipo Endereço"}, + {"id": 32, "nome": "Permissão Endereço"}, + {"id": 33, "nome": "Permissão Pessoa"}, + {"id": 34, "nome": "Permissão Usuários"}, + {"id": 35, "nome": "Permissão Papel"}, + {"id": 36, "nome": "Permissão Setor"}, + {"id": 37, "nome": "Permissão Tipo Equipamento"}, + {"id": 38, "nome": "Permissão Equipamento"}, + {"id": 39, "nome": "Permissão Itens Equipamento"}, + {"id": 40, "nome": "Permissão Manutenção Equipamento"}, + {"id": 41, "nome": "Permissão Papeis"}, + {"id": 101, "nome": "Relação Comercial Criar"}, + {"id": 102, "nome": "Relação Comercial Criar Muitos"}, + {"id": 103, "nome": "Relação Comercial Buscar Todos"}, + {"id": 104, "nome": "Relação Comercial Buscar Vários"}, + {"id": 105, "nome": "Relação Comercial Buscar"}, + {"id": 106, "nome": "Relação Comercial Atualizar"}, + {"id": 107, "nome": "Relação Comercial Atualizar Vários"}, + {"id": 108, "nome": "Relação Comercial Apagar"}, + {"id": 109, "nome": "Relação Comercial Apagar Vários"}, + {"id": 201, "nome": "Tipo Endereço Criar"}, + {"id": 202, "nome": "Tipo Endereço Criar Muitos"}, + {"id": 203, "nome": "Tipo Endereço Buscar Todos"}, + {"id": 204, "nome": "Tipo Endereço Buscar Vários"}, + {"id": 205, "nome": "Tipo Endereço Buscar"}, + {"id": 206, "nome": "Tipo Endereço Atualizar"}, + {"id": 207, "nome": "Tipo Endereço Atualizar Vários"}, + {"id": 208, "nome": "Tipo Endereço Apagar"}, + {"id": 209, "nome": "Tipo Endereço Apagar Vários"}, + {"id": 301, "nome": "Endereço Criar"}, + {"id": 302, "nome": "Endereço Criar Muitos"}, + {"id": 303, "nome": "Endereço Buscar Todos"}, + {"id": 304, "nome": "Endereço Buscar Vários"}, + {"id": 305, "nome": "Endereço Buscar"}, + {"id": 306, "nome": "Endereço Atualizar"}, + {"id": 307, "nome": "Endereço Atualizar Vários"}, + {"id": 308, "nome": "Endereço Apagar"}, + {"id": 309, "nome": "Endereço Apagar Vários"}, + {"id": 401, "nome": "Pessoa Criar"}, + {"id": 402, "nome": "Pessoa Criar Muitos"}, + {"id": 403, "nome": "Pessoa Buscar Todos"}, + {"id": 404, "nome": "Pessoa Buscar Vários"}, + {"id": 405, "nome": "Pessoa Buscar"}, + {"id": 406, "nome": "Pessoa Atualizar"}, + {"id": 408, "nome": "Pessoa Apagar"}, + {"id": 409, "nome": "Pessoa Apagar Vários"}, + {"id": 501, "nome": "Usuário Criar"}, + {"id": 502, "nome": "Usuário Criar Muitos"}, + {"id": 503, "nome": "Usuário Buscar Todos"}, + {"id": 504, "nome": "Usuário Buscar Vários"}, + {"id": 505, "nome": "Usuário Buscar"}, + {"id": 506, "nome": "Usuário Atualizar"}, + {"id": 507, "nome": "Usuário Atualizar Vários"}, + {"id": 508, "nome": "Usuário Apagar"}, + {"id": 509, "nome": "Usuário Apagar Vários"}, + {"id": 601, "nome": "Papel Criar"}, + {"id": 602, "nome": "Papel Criar Muitos"}, + {"id": 603, "nome": "Papel Buscar Todos"}, + {"id": 604, "nome": "Papel Buscar Vários"}, + {"id": 605, "nome": "Papel Buscar"}, + {"id": 606, "nome": "Papel Atualizar"}, + {"id": 607, "nome": "Papel Atualizar Vários"}, + {"id": 608, "nome": "Papel Apagar"}, + {"id": 609, "nome": "Papel Apagar Vários"}, + {"id": 701, "nome": "Setor Criar"}, + {"id": 702, "nome": "Setpr Criar Muitos"}, + {"id": 703, "nome": "Setor Buscar Todos"}, + {"id": 704, "nome": "Setor Buscar Vários"}, + {"id": 705, "nome": "Setor Buscar"}, + {"id": 706, "nome": "Setor Atualizar"}, + {"id": 707, "nome": "Setor Atualizar Vários"}, + {"id": 708, "nome": "Setor Apagar"}, + {"id": 709, "nome": "Setor Apagar Vários"}, + {"id": 801, "nome": "Tipo Equipamento Criar"}, + {"id": 802, "nome": "Tipo Equipamento Criar Muitos"}, + {"id": 803, "nome": "Tipo Equipamento Buscar Todos"}, + {"id": 804, "nome": "Tipo Equipamento Buscar Vários"}, + {"id": 805, "nome": "Tipo Equipamento Buscar"}, + {"id": 806, "nome": "Tipo Equipamento Atualizar"}, + {"id": 807, "nome": "Tipo Equipamento Atualizar Vários"}, + {"id": 808, "nome": "Tipo Equipamento Apagar"}, + {"id": 809, "nome": "Tipo Equipamento Apagar Vários"}, + {"id": 901, "nome": "Equipamento Criar"}, + {"id": 902, "nome": "Equipamento Criar Muitos"}, + {"id": 903, "nome": "Equipamento Buscar Todos"}, + {"id": 904, "nome": "Equipamento Buscar Vários"}, + {"id": 905, "nome": "Equipamento Buscar"}, + {"id": 906, "nome": "Equipamento Atualizar"}, + {"id": 907, "nome": "Equipamento Atualizar Vários"}, + {"id": 908, "nome": "Equipamento Apagar"}, + {"id": 909, "nome": "Equipamento Apagar Vários"}, + {"id": 1001, "nome": "Itens Equipamento Criar"}, + {"id": 1002, "nome": "Itens Equipamento Criar Muitos"}, + {"id": 1003, "nome": "Itens Equipamento Buscar Todos"}, + {"id": 1004, "nome": "Itens Equipamento Buscar Vários"}, + {"id": 1005, "nome": "Itens Equipamento Buscar"}, + {"id": 1006, "nome": "Itens Equipamento Atualizar"}, + {"id": 1007, "nome": "Itens Equipamento Atualizar Vários"}, + {"id": 1008, "nome": "Itens Equipamento Apagar"}, + {"id": 1009, "nome": "Itens Equipamento Apagar Vários"}, + {"id": 1101, "nome": "Manutenção Equipamento Criar"}, + {"id": 1102, "nome": "Manutenção Equipamento Criar Muitos"}, + {"id": 1103, "nome": "Manutenção Equipamento Buscar Todos"}, + {"id": 1104, "nome": "Manutenção Equipamento Buscar Vários"}, + {"id": 1105, "nome": "Manutenção Equipamento Buscar"}, + {"id": 1106, "nome": "Manutenção Equipamento Atualizar"}, + {"id": 1107, "nome": "Manutenção Equipamento Atualizar Vários"}, + {"id": 1108, "nome": "Manutenção Equipamento Apagar"}, + {"id": 1109, "nome": "Manutenção Equipamento Apagar Vários"}, + {"id": 1201, "nome": "Papeis Criar"}, + {"id": 1202, "nome": "Papeis Criar Muitos"}, + {"id": 1203, "nome": "Papeis Buscar Todos"}, + {"id": 1204, "nome": "Papeis Buscar Vários"}, + {"id": 1205, "nome": "Papeis Buscar"}, + {"id": 1206, "nome": "Papeis Atualizar"}, + {"id": 1207, "nome": "Papeis Atualizar Vários"}, + {"id": 1208, "nome": "Papeis Apagar"}, + {"id": 1209, "nome": "Papeis Apagar Vários"}, + ] + + # Definindo os papéis + endpoint_papel = [ + {"nome": "Super Administrador", "permissoes": [1]}, + # Outros papéis aqui + ] + + # Processamento das permissões e papéis + ignored_permissions = await process_permissions(session, endpoint_permissao1) + ignored_roles = await process_roles(session, endpoint_papel, user_id) + + await session.commit() + + if not ignored_permissions and not ignored_roles: + print("Permissões e papéis inicializados com sucesso") + else: + if ignored_permissions: + print("Aviso: As seguintes permissões foram ignoradas devido a campos None:") + for permissao in ignored_permissions: + print(permissao) + + if ignored_roles: + print("Aviso: Os seguintes papéis foram ignorados devido a campos None:") + for papel in ignored_roles: + print(papel) diff --git a/app/scripts/initialize_pessoa.py b/app/scripts/initialize_pessoa.py new file mode 100644 index 0000000..7c5f7bb --- /dev/null +++ b/app/scripts/initialize_pessoa.py @@ -0,0 +1,60 @@ +from app.database.models import ComercialFisica +from app.database.session import sessionmanager +from sqlalchemy import select +from sqlalchemy.orm import selectinload + + +async def add_or_update_person(session, model, filters, data): + print("Executando add_or_update_person...") + + # Usando selectinload para evitar carregamento preguiçoso inesperado + result = await session.execute( + select(model).options(selectinload("*")).filter_by(**filters) + ) + entity = result.scalars().first() + + if not entity: + print("Nenhuma pessoa encontrada com os filtros fornecidos. Criando nova pessoa.") + entity = model(**data) + session.add(entity) + else: + print("Pessoa já existente encontrada. Atualizando informações.") + for key, value in data.items(): + setattr(entity, key, value) + + return entity + + +async def initialize_person(): + print("Iniciando processo de criação/atualização de pessoa...") + async with sessionmanager.session() as session: + # Dados da pessoa + pessoa_data = { + "pessoa_telefone": "00000000000", # Exemplo de telefone + "pessoa_celular": "00000000000", # Exemplo de celular + "pessoa_email": "admin@sonora.com", # Exemplo de email + "pessoa_status": True, + "pessoa_tipo": "1", # Tipo especificado + "fisica_nome": "Admin", # Nome da pessoa + "fisica_cpf": "00000000000", # Exemplo de CPF + "fisica_genero": "O", # Gênero + "fisica_rg": "1.234.567", # Exemplo de RG + } + + print("Dados da pessoa para criação/atualização:", pessoa_data) + + # Criando ou atualizando a pessoa + pessoa = await add_or_update_person( + session, ComercialFisica, {"pessoa_email": pessoa_data["pessoa_email"]}, pessoa_data + ) + + print("Pessoa criada ou atualizada, realizando commit...") + # Confirmando alterações no banco + await session.commit() + + # Garantindo que os dados estão totalmente carregados + await session.refresh(pessoa) + + # Retornando o UUID da pessoa + print(f"Pessoa criada com UUID: {pessoa.uuid}") + return pessoa diff --git a/app/scripts/initizalize_financeiro.py b/app/scripts/initizalize_financeiro.py new file mode 100644 index 0000000..6997995 --- /dev/null +++ b/app/scripts/initizalize_financeiro.py @@ -0,0 +1,48 @@ +""" +FIN_TIPO_PAGAMENTO + +Tabela que armazena os tipos de pagamento: DINHEIRO, CARTÃO, CHEQUE, etc. +Tipos padrões já cadastrados pelo sistema para toda empresa: +01 = Dinheiro +02 = Cheque +03 = Cartao +04 = Boleto +05 = Transferência Bancária +06 = PIX + +""" + +""" +FIN_STATUS_PARCELA + +Tabela que armazena as possíveis situações de uma parcela. Status padrões: +01 = Aberto +02 = Quitado +03 = Quitado Parcial +04 = Vencido +05 = Renegociado +""" + +""" +FIN_DOCUMENTO_ORIGEM + +Tabela para cadastro dos tipo de documentos que podem gerar contas a pagar ou receber: +O campo SIGLA pode receber valores tais como: NF, CHQ, NFe, DP, NP, CTe, CT, CF, CFe. +O campo DESCRICAO pode receber valores tais como: NOTA FISCAL | BOLETO | RECIBO | ETC. + +01 = NF - Nota Fiscal +02 = NFS - Nota Fiscal de Serviço +03 = FL - Fatura de Locação +""" + +""" +FIN_TIPO_RECEBIMENTO + +Tabela que armazena os tipos de recebimento: DINHEIRO, CARTÃO, CHEQUE, etc. +Tipos padrões já cadastrados pelo sistema para toda empresa: +01 = Dinheiro +02 = Cheque +03 = Cartão +04 = Boleto +05 = PIX +""" diff --git a/assets/style.css b/assets/style.css new file mode 100644 index 0000000..561524c --- /dev/null +++ b/assets/style.css @@ -0,0 +1,319 @@ +body { + font-family: Helvetica, Arial, sans-serif; + font-size: 12px; + /* do not increase min-width as some may use split screens */ + min-width: 800px; + color: #999; +} + +h1 { + font-size: 24px; + color: black; +} + +h2 { + font-size: 16px; + color: black; +} + +p { + color: black; +} + +a { + color: #999; +} + +table { + border-collapse: collapse; +} + +/****************************** + * SUMMARY INFORMATION + ******************************/ +#environment td { + padding: 5px; + border: 1px solid #e6e6e6; + vertical-align: top; +} +#environment tr:nth-child(odd) { + background-color: #f6f6f6; +} +#environment ul { + margin: 0; + padding: 0 20px; +} + +/****************************** + * TEST RESULT COLORS + ******************************/ +span.passed, +.passed .col-result { + color: green; +} + +span.skipped, +span.xfailed, +span.rerun, +.skipped .col-result, +.xfailed .col-result, +.rerun .col-result { + color: orange; +} + +span.error, +span.failed, +span.xpassed, +.error .col-result, +.failed .col-result, +.xpassed .col-result { + color: red; +} + +.col-links__extra { + margin-right: 3px; +} + +/****************************** + * RESULTS TABLE + * + * 1. Table Layout + * 2. Extra + * 3. Sorting items + * + ******************************/ +/*------------------ + * 1. Table Layout + *------------------*/ +#results-table { + border: 1px solid #e6e6e6; + color: #999; + font-size: 12px; + width: 100%; +} +#results-table th, +#results-table td { + padding: 5px; + border: 1px solid #e6e6e6; + text-align: left; +} +#results-table th { + font-weight: bold; +} + +/*------------------ + * 2. Extra + *------------------*/ +.logwrapper { + max-height: 230px; + overflow-y: scroll; + background-color: #e6e6e6; +} +.logwrapper.expanded { + max-height: none; +} +.logwrapper.expanded .logexpander:after { + content: "collapse [-]"; +} +.logwrapper .logexpander { + z-index: 1; + position: sticky; + top: 10px; + width: max-content; + border: 1px solid; + border-radius: 3px; + padding: 5px 7px; + margin: 10px 0 10px calc(100% - 80px); + cursor: pointer; + background-color: #e6e6e6; +} +.logwrapper .logexpander:after { + content: "expand [+]"; +} +.logwrapper .logexpander:hover { + color: #000; + border-color: #000; +} +.logwrapper .log { + min-height: 40px; + position: relative; + top: -50px; + height: calc(100% + 50px); + border: 1px solid #e6e6e6; + color: black; + display: block; + font-family: "Courier New", Courier, monospace; + padding: 5px; + padding-right: 80px; + white-space: pre-wrap; +} + +div.media { + border: 1px solid #e6e6e6; + float: right; + height: 240px; + margin: 0 5px; + overflow: hidden; + width: 320px; +} + +.media-container { + display: grid; + grid-template-columns: 25px auto 25px; + align-items: center; + flex: 1 1; + overflow: hidden; + height: 200px; +} + +.media-container--fullscreen { + grid-template-columns: 0px auto 0px; +} + +.media-container__nav--right, +.media-container__nav--left { + text-align: center; + cursor: pointer; +} + +.media-container__viewport { + cursor: pointer; + text-align: center; + height: inherit; +} +.media-container__viewport img, +.media-container__viewport video { + object-fit: cover; + width: 100%; + max-height: 100%; +} + +.media__name, +.media__counter { + display: flex; + flex-direction: row; + justify-content: space-around; + flex: 0 0 25px; + align-items: center; +} + +.collapsible td:not(.col-links) { + cursor: pointer; +} +.collapsible td:not(.col-links):hover::after { + color: #bbb; + font-style: italic; + cursor: pointer; +} + +.col-result { + width: 130px; +} +.col-result:hover::after { + content: " (hide details)"; +} + +.col-result.collapsed:hover::after { + content: " (show details)"; +} + +#environment-header h2:hover::after { + content: " (hide details)"; + color: #bbb; + font-style: italic; + cursor: pointer; + font-size: 12px; +} + +#environment-header.collapsed h2:hover::after { + content: " (show details)"; + color: #bbb; + font-style: italic; + cursor: pointer; + font-size: 12px; +} + +/*------------------ + * 3. Sorting items + *------------------*/ +.sortable { + cursor: pointer; +} +.sortable.desc:after { + content: " "; + position: relative; + left: 5px; + bottom: -12.5px; + border: 10px solid #4caf50; + border-bottom: 0; + border-left-color: transparent; + border-right-color: transparent; +} +.sortable.asc:after { + content: " "; + position: relative; + left: 5px; + bottom: 12.5px; + border: 10px solid #4caf50; + border-top: 0; + border-left-color: transparent; + border-right-color: transparent; +} + +.hidden, .summary__reload__button.hidden { + display: none; +} + +.summary__data { + flex: 0 0 550px; +} +.summary__reload { + flex: 1 1; + display: flex; + justify-content: center; +} +.summary__reload__button { + flex: 0 0 300px; + display: flex; + color: white; + font-weight: bold; + background-color: #4caf50; + text-align: center; + justify-content: center; + align-items: center; + border-radius: 3px; + cursor: pointer; +} +.summary__reload__button:hover { + background-color: #46a049; +} +.summary__spacer { + flex: 0 0 550px; +} + +.controls { + display: flex; + justify-content: space-between; +} + +.filters, +.collapse { + display: flex; + align-items: center; +} +.filters button, +.collapse button { + color: #999; + border: none; + background: none; + cursor: pointer; + text-decoration: underline; +} +.filters button:hover, +.collapse button:hover { + color: #ccc; +} + +.filter__label { + margin-right: 10px; +} diff --git a/atualizar_tabelas_inquilinos.py b/atualizar_tabelas_inquilinos.py new file mode 100644 index 0000000..890a2d7 --- /dev/null +++ b/atualizar_tabelas_inquilinos.py @@ -0,0 +1,43 @@ +import asyncio +import subprocess +from app.database.session import sessionmanager +from app.config import URL_BD + + +async def atualizar_todos_inquilinos(): + # Inicializar o gerenciador de sessão + sessionmanager.init(URL_BD) + try: + # Buscar a lista de esquemas (nomes dos inquilinos) + async with sessionmanager.session() as session: + result = await session.execute( + """ + SELECT schema_name + FROM information_schema.schemata + WHERE schema_name NOT IN ('public', 'information_schema', 'pg_catalog', 'shared', 'pg_toast_temp_1', + 'pg_temp_1', 'pg_toast'); + """ + ) + tenants = [row[0] for row in result.fetchall()] + + print(f"Encontrados {len(tenants)} inquilinos para atualizar: {tenants}") + + for tenant in tenants: + print(f"Iniciando migração para o inquilino: {tenant}") + try: + subprocess.run( + ["alembic", "-x", f"tenant={tenant}", "upgrade", "head"], + check=True, capture_output=True, text=True + ) + print(f"Migração para o inquilino '{tenant}' concluída com sucesso.") + except subprocess.CalledProcessError as e: + print(f"Erro durante migração para o inquilino '{tenant}':") + print("Saída padrão (stdout):", e.stdout) + print("Erro padrão (stderr):", e.stderr) + print("Código de saída:", e.returncode) + finally: + await sessionmanager.close() + + +if __name__ == "__main__": + asyncio.run(atualizar_todos_inquilinos()) diff --git a/check_db.py b/check_db.py new file mode 100644 index 0000000..966f216 --- /dev/null +++ b/check_db.py @@ -0,0 +1,55 @@ +# check_db.py +import os +import sys +import time +import asyncio +from urllib.parse import urlparse + +import asyncpg + +async def check_database_connection(): + DATABASE_URL = os.getenv("URL_BD") + + if not DATABASE_URL: + print("Erro: Variável de ambiente URL_BD não definida.", file=sys.stderr) + sys.exit(1) + + # Analisar a URL para obter os componentes + parsed_url = urlparse(DATABASE_URL) + DB_HOST = parsed_url.hostname + DB_PORT = parsed_url.port or 5432 # Default PostgreSQL port + DB_USER = parsed_url.username + DB_PASSWORD = parsed_url.password + DB_NAME = parsed_url.path.lstrip('/') + + print(f"Tentando conectar ao banco de dados em {DB_HOST}:{DB_PORT}/{DB_NAME} como {DB_USER}...") + + max_retries = 30 # Tentar por até 30 segundos (30 * 1s sleep) + retry_interval = 1 + + for i in range(max_retries): + try: + # Tentar conectar usando asyncpg + conn = await asyncpg.connect( + host=DB_HOST, + port=DB_PORT, + user=DB_USER, + password=DB_PASSWORD, + database=DB_NAME, + timeout=5 # Timeout para a tentativa de conexão + ) + await conn.close() + print("Conexão com o banco de dados estabelecida com sucesso!") + sys.exit(0) # Sucesso! + except asyncpg.exceptions.PostgresError as e: # Captura erros específicos do asyncpg + print(f"Erro de conexão com o banco de dados (tentativa {i+1}/{max_retries}): {e}", file=sys.stderr) + await asyncio.sleep(retry_interval) # Usar await asyncio.sleep para sleep assíncrono + except Exception as e: # Captura outros erros inesperados + print(f"Erro inesperado durante a conexão (tentativa {i+1}/{max_retries}): {e}", file=sys.stderr) + await asyncio.sleep(retry_interval) + + print("Falha ao conectar ao banco de dados após várias tentativas.", file=sys.stderr) + sys.exit(1) # Falha + +if __name__ == "__main__": + asyncio.run(check_database_connection()) # Executa a função assíncrona \ No newline at end of file diff --git a/docker.txt b/docker.txt new file mode 100644 index 0000000..6752dc7 --- /dev/null +++ b/docker.txt @@ -0,0 +1,20 @@ +ir para a raiz do projeto docker login registry.sonoraav.com.br + +comando 1 : docker build -t registry.sonoraav.com.br/back-end-evento-app:latest . +comando 2 : $VERSION = '0.0.6' +comando 3 : docker tag ` + registry.sonoraav.com.br/back-end-evento-app:latest ` + "registry.sonoraav.com.br/back-end-evento-app:$VERSION" +comando 4 : docker push "registry.sonoraav.com.br/back-end-evento-app:$VERSION" +comando 5 : docker push registry.sonoraav.com.br/back-end-evento-app:latest +comando 6 : curl.exe -k -u registry:Sonora@2015 ` + https://registry.sonoraav.com.br/v2/_catalog ` + -UseBasicParsing +comando 7 : curl.exe -k -u registry:Sonora@2015 ` + https://registry.sonoraav.com.br/v2/back-end-evento-app/tags/list ` + -UseBasicParsing + + +chegar no código +docker ps --filter "name=fastapi" +docker exec -it NAME sh \ No newline at end of file diff --git a/iniciar.txt b/iniciar.txt new file mode 100644 index 0000000..b42c61b --- /dev/null +++ b/iniciar.txt @@ -0,0 +1,6 @@ +Para Criar o Banco de Dados> + - Primeiro Comando + - alembic init -t async alembic + - Com o models configurado + - alembic revision --autogenerate -m "Adding user model" + - alembic upgrade head \ No newline at end of file diff --git a/iniciar_multi_tenant.txt b/iniciar_multi_tenant.txt new file mode 100644 index 0000000..f061887 --- /dev/null +++ b/iniciar_multi_tenant.txt @@ -0,0 +1,14 @@ +Executar arquivo iniciar_pemissoes_e_papeis.py + - Ele vai criar o esquema Shared + - Ele vai criar o esquema modelo de inquilinos + - Por fim vai cadastar as permissões e o papel de Super Usuário + +Criação de um Novo Inquilino + - Executar o script de configuração passando os parametros para cadastro do novo cliente: + python novo_inquilino.py --nome 'no cliente' --email "email" --password "Senha" --doc "CPF/CNPJ" + +Atualizar Banco de Dados + - Chegar a Migração + alembic -x tenant=default_tenant revision -m "nome migracao" --autogenerate +Atualizar Shared + alembic -x special_schema=shared revision -m "Initial Migration Shared" --autogenerate diff --git a/iniciar_permissoes_e_papeis.py b/iniciar_permissoes_e_papeis.py new file mode 100644 index 0000000..0794a12 --- /dev/null +++ b/iniciar_permissoes_e_papeis.py @@ -0,0 +1,50 @@ +import asyncio +import subprocess +from app.scripts.initialize_permissions import initialize_permissions # Importe a função correta +from app.database.session import sessionmanager +from app.config import URL_BD + + +def alembic_upgrade(): + try: + print("Iniciando Migrações Alembic das Tabelas Compartilhadas") + shared = subprocess.run( + ["alembic", "-x", "special_schema=shared", "upgrade", "head"], + check=True, capture_output=True, text=True) + print("Migração Alembic das Tabelas Compartilhadas finalizado com sucesso.") + print(shared.stdout) + print("Iniciando Migrações Alembic das Tabelas Modelo dos Inquilinos") + default_tenant = subprocess.run(["alembic", "-x", "tenant=default_tenant", + "upgrade", "head"], check=True, capture_output=True, text=True) + print("Migração Alembic das Tabelas Modelo dos Inquilinos finalizado com sucesso.") + print(default_tenant.stdout) + except subprocess.CalledProcessError as e: + print("Erro durante Migrações Alembic.") + print("Erro na Migração Alembic:") + print("Saída padrão (stdout):", e.stdout) + print("Erro padrão (stderr):", e.stderr) + print("Código de saída:", e.returncode) + raise + + +async def main(): + try: + alembic_upgrade() + except Exception as e: + print(f"Erro na Migração Alembic: {e}") + return + + # Inicializar o gerenciador de sessão + sessionmanager.init(URL_BD) + + try: + # Executar a função para inicializar permissões e papéis + await initialize_permissions() + finally: + # Fechar o gerenciador de sessão + await sessionmanager.close() + + +if __name__ == "__main__": + # Rodar o script assíncrono + asyncio.run(main()) diff --git a/novo_inquilino.py b/novo_inquilino.py new file mode 100644 index 0000000..c59dd0a --- /dev/null +++ b/novo_inquilino.py @@ -0,0 +1,45 @@ +import asyncio +import subprocess +from app.multi_tenant.criar_tenant import tenant_create +from app.database.session import sessionmanager +from app.config import URL_BD + + +async def main(nome: str, email: str, password: str, cpf_cnpj: str): + # Inicializar o gerenciador de sessão + sessionmanager.init(URL_BD) + cliente = None + try: + print(f"Iniciando a configuração de um novo Cliente '{nome}'...") + cliente = await tenant_create(nome=nome, email=email, password=password, cpf_cnpj=cpf_cnpj) + print(f"Cliente '{nome}' criado com sucesso!") + except Exception as e: + print(f"Erro ao criar o tenant: {e}") + finally: + await sessionmanager.close() + try: + print("Iniciando Migrações Alembic das Tabelas do Cliente") + + default_tenant = subprocess.run(["alembic", "-x", f"tenant={cliente}", "upgrade", "head"], + check=True, capture_output=True, text=True) + print("Migração Alembic das Tabelas Modelo dos Inquilinos finalizado com sucesso.") + print(default_tenant.stdout) + except subprocess.CalledProcessError as e: + print("Erro durante Migrações Alembic.") + print("Erro na Migração Alembic:") + print("Saída padrão (stdout):", e.stdout) + print("Erro padrão (stderr):", e.stderr) + print("Código de saída:", e.returncode) + raise + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description="Criar novo tenant") + parser.add_argument("--nome", required=True, help="Nome do Cliente") + parser.add_argument("--email", required=True, help="Email do usuário inicial") + parser.add_argument("--password", required=True, help="Senha do usuário inicial") + parser.add_argument("--doc", required=True, help="CPF ou CNPJ") + args = parser.parse_args() + + asyncio.run(main(nome=args.nome, email=args.email, password=args.password, cpf_cnpj=args.doc,)) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..8d27d5d --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2014 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "alembic" +version = "1.13.3" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.6.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "asyncio" +version = "3.4.3" +description = "reference implementation of PEP 3156" +optional = false +python-versions = "*" +files = [ + {file = "asyncio-3.4.3-cp33-none-win32.whl", hash = "sha256:b62c9157d36187eca799c378e572c969f0da87cd5fc42ca372d92cdb06e7e1de"}, + {file = "asyncio-3.4.3-cp33-none-win_amd64.whl", hash = "sha256:c46a87b48213d7464f22d9a497b9eef8c1928b68320a2fa94240f969f6fec08c"}, + {file = "asyncio-3.4.3-py3-none-any.whl", hash = "sha256:c4d18b22701821de07bd6aea8b53d21449ec0ec5680645e5317062ea21817d2d"}, + {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"}, +] + +[[package]] +name = "asyncpg" +version = "0.29.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, + {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, + {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, + {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, + {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, + {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, + {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, + {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, + {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, + {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, + {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, + {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, + {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, +] + +[package.extras] +docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] + +[[package]] +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "bcrypt" +version = "4.1.2" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "boto3" +version = "1.35.90" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.90-py3-none-any.whl", hash = "sha256:b0874233057995a8f0c813f5b45a36c09630e74c43d7a7c64db2feef2915d493"}, + {file = "boto3-1.35.90.tar.gz", hash = "sha256:dc56caaaab2157a4bfc109c88b50cd032f3ac66c06d17f8ee335b798eaf53e5c"}, +] + +[package.dependencies] +botocore = ">=1.35.90,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.90" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.90-py3-none-any.whl", hash = "sha256:51dcbe1b32e2ac43dac17091f401a00ce5939f76afe999081802009cce1e92e4"}, + {file = "botocore-1.35.90.tar.gz", hash = "sha256:f007f58e8e3c1ad0412a6ddfae40ed92a7bca571c068cb959902bcf107f2ae48"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.22.0)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "43.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.1.2" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.1.2-py3-none-any.whl", hash = "sha256:d89f6324e13b1e39889eab7f9ca2f91dc9aebb6fa50a6d8bd4329ab50f251115"}, + {file = "email_validator-2.1.2.tar.gz", hash = "sha256:14c0f3d343c4beda37400421b39fa411bbe33a75df20825df73ad53e06a9f04c"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "fastapi" +version = "0.112.4" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.112.4-py3-none-any.whl", hash = "sha256:6d4f9c3301825d4620665cace8e2bc34e303f61c05a5382d1d61a048ea7f2f37"}, + {file = "fastapi-0.112.4.tar.gz", hash = "sha256:b1f72e1f72afe7902ccd639ba320abb5d57a309804f45c10ab0ce3693cadeb33"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.37.2,<0.39.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-cli" +version = "0.0.5" +description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_cli-0.0.5-py3-none-any.whl", hash = "sha256:e94d847524648c748a5350673546bbf9bcaeb086b33c24f2e82e021436866a46"}, + {file = "fastapi_cli-0.0.5.tar.gz", hash = "sha256:d30e1239c6f46fcb95e606f02cdda59a1e2fa778a54b64686b3ff27f6211ff9f"}, +] + +[package.dependencies] +typer = ">=0.12.3" +uvicorn = {version = ">=0.15.0", extras = ["standard"]} + +[package.extras] +standard = ["uvicorn[standard] (>=0.15.0)"] + +[[package]] +name = "fastapi-users" +version = "13.0.0" +description = "Ready-to-use and customizable users management for FastAPI" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_users-13.0.0-py3-none-any.whl", hash = "sha256:e6246529e3080a5b50e5afeed1e996663b661f1dc791a1ac478925cb5bfc0fa0"}, + {file = "fastapi_users-13.0.0.tar.gz", hash = "sha256:b397c815b7051c8fd4b560fbeee707acd28e00bd3e8f25c292ad158a1e47e884"}, +] + +[package.dependencies] +email-validator = ">=1.1.0,<2.2" +fastapi = ">=0.65.2" +makefun = ">=1.11.2,<2.0.0" +pwdlib = {version = "0.2.0", extras = ["argon2", "bcrypt"]} +pyjwt = {version = "2.8.0", extras = ["crypto"]} +python-multipart = "0.0.9" + +[package.extras] +beanie = ["fastapi-users-db-beanie (>=3.0.0)"] +oauth = ["httpx-oauth (>=0.13)"] +redis = ["redis (>=4.3.3,<6.0.0)"] +sqlalchemy = ["fastapi-users-db-sqlalchemy (>=6.0.0)"] + +[[package]] +name = "fastapi-users-db-sqlalchemy" +version = "6.0.1" +description = "FastAPI Users database adapter for SQLAlchemy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_users_db_sqlalchemy-6.0.1-py3-none-any.whl", hash = "sha256:d1050ec31eb75e8c4fa9abafa4addaf0baf5c97afeea2f0f910ea55e2451fcad"}, + {file = "fastapi_users_db_sqlalchemy-6.0.1.tar.gz", hash = "sha256:f0ef9fe3250453712d25c13170700c80fa205867ce7add7ef391c384ec27cbe1"}, +] + +[package.dependencies] +fastapi-users = ">=10.0.0" +sqlalchemy = {version = ">=2.0.0,<2.1.0", extras = ["asyncio"]} + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "makefun" +version = "1.15.6" +description = "Small library to dynamically create python functions." +optional = false +python-versions = "*" +files = [ + {file = "makefun-1.15.6-py2.py3-none-any.whl", hash = "sha256:e69b870f0bb60304765b1e3db576aaecf2f9b3e5105afe8cfeff8f2afe6ad067"}, + {file = "makefun-1.15.6.tar.gz", hash = "sha256:26bc63442a6182fb75efed8b51741dd2d1db2f176bec8c64e20a586256b8f149"}, +] + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "outcome" +version = "1.3.0.post0" +description = "Capture the outcome of Python function calls." +optional = false +python-versions = ">=3.7" +files = [ + {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, + {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, +] + +[package.dependencies] +attrs = ">=19.2.0" + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pwdlib" +version = "0.2.0" +description = "Modern password hashing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pwdlib-0.2.0-py3-none-any.whl", hash = "sha256:be53812012ab66795a57ac9393a59716ae7c2b60841ed453eb1262017fdec144"}, + {file = "pwdlib-0.2.0.tar.gz", hash = "sha256:b1bdafc064310eb6d3d07144a210267063ab4f45ac73a97be948e6589f74e861"}, +] + +[package.dependencies] +argon2-cffi = {version = "23.1.0", optional = true, markers = "extra == \"argon2\""} +bcrypt = {version = "4.1.2", optional = true, markers = "extra == \"bcrypt\""} + +[package.extras] +argon2 = ["argon2-cffi (==23.1.0)"] +bcrypt = ["bcrypt (==4.1.2)"] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.9.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-html" +version = "4.1.1" +description = "pytest plugin for generating HTML reports" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_html-4.1.1-py3-none-any.whl", hash = "sha256:c8152cea03bd4e9bee6d525573b67bbc6622967b72b9628dda0ea3e2a0b5dd71"}, + {file = "pytest_html-4.1.1.tar.gz", hash = "sha256:70a01e8ae5800f4a074b56a4cb1025c8f4f9b038bba5fe31e3c98eb996686f07"}, +] + +[package.dependencies] +jinja2 = ">=3.0.0" +pytest = ">=7.0.0" +pytest-metadata = ">=2.0.0" + +[package.extras] +docs = ["pip-tools (>=6.13.0)"] +test = ["assertpy (>=1.1)", "beautifulsoup4 (>=4.11.1)", "black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "pytest-mock (>=3.7.0)", "pytest-rerunfailures (>=11.1.2)", "pytest-xdist (>=2.4.0)", "selenium (>=4.3.0)", "tox (>=3.24.5)"] + +[[package]] +name = "pytest-metadata" +version = "3.1.1" +description = "pytest plugin for test session metadata" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b"}, + {file = "pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (>=3.24.5)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.9" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, + {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, +] + +[package.extras] +dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "s3transfer" +version = "0.10.4" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, + {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.35" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "starlette" +version = "0.38.6" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.38.6-py3-none-any.whl", hash = "sha256:4517a1409e2e73ee4951214ba012052b9e16f60e90d73cfb06192c19203bbb05"}, + {file = "starlette-0.38.6.tar.gz", hash = "sha256:863a1588f5574e70a821dadefb41e4881ea451a47a3cd1b4df359d4ffefe5ead"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "trio" +version = "0.26.2" +description = "A friendly Python library for async concurrency and I/O" +optional = false +python-versions = ">=3.8" +files = [ + {file = "trio-0.26.2-py3-none-any.whl", hash = "sha256:c5237e8133eb0a1d72f09a971a55c28ebe69e351c783fc64bc37db8db8bbe1d0"}, + {file = "trio-0.26.2.tar.gz", hash = "sha256:0346c3852c15e5c7d40ea15972c4805689ef2cb8b5206f794c9c19450119f3a4"}, +] + +[package.dependencies] +attrs = ">=23.2.0" +cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} +idna = "*" +outcome = "*" +sniffio = ">=1.3.0" +sortedcontainers = "*" + +[[package]] +name = "typeguard" +version = "4.4.1" +description = "Run-time type checker for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, + {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, +] + +[package.dependencies] +typing-extensions = ">=4.10.0" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] +test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] + +[[package]] +name = "typer" +version = "0.14.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.14.0-py3-none-any.whl", hash = "sha256:f476233a25770ab3e7b2eebf7c68f3bc702031681a008b20167573a4b7018f09"}, + {file = "typer-0.14.0.tar.gz", hash = "sha256:af58f737f8d0c0c37b9f955a6d39000b9ff97813afcbeef56af5e37cf743b45a"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uuid6" +version = "2024.7.10" +description = "New time-based UUID formats which are suited for use as a database key" +optional = false +python-versions = ">=3.8" +files = [ + {file = "uuid6-2024.7.10-py3-none-any.whl", hash = "sha256:93432c00ba403751f722829ad21759ff9db051dea140bf81493271e8e4dd18b7"}, + {file = "uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0"}, +] + +[[package]] +name = "uvicorn" +version = "0.30.6" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.20.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.24.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "13.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, + {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, + {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, + {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, + {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "c4e44ce2f4992f6ff0b485de8931fdc5005fa7dd872dff94ec66ac35b342bab4" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..2199c38 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,36 @@ +[tool.poetry] +name = "adminuuidpostgresql" +version = "0.1.0" +description = "" +authors = ["RicardoJDaleprane "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +fastapi = "^0.112.2" +uvicorn = {extras = ["standard"], version = "^0.30.6"} +sqlalchemy = "^2.0.32" +alembic = "^1.13.2" +fastapi-users = "^13.0.0" +fastapi-users-db-sqlalchemy = "^6.0.1" +uuid6 = "^2024.7.10" +asyncpg = "^0.29.0" +typeguard = "^4.4.1" +fastapi-cli = "^0.0.5" +boto3 = "^1.35.90" + + +[tool.poetry.group.dev.dependencies] +httpx = {extras = ["http2"], version = "^0.27.2"} +pytest = "^8.3.2" +pytest-asyncio = "^0.24.0" +asyncio = "^3.4.3" +pytest-cov = "^5.0.0" +pytest-html = "^4.1.1" +trio = "^0.26.2" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + + diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..b4cfa39 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +env = PYTHONPATH=. +addopts = -p no:warnings +asyncio_default_fixture_loop_scope = function +markers = usuarios_permitidos(users): Marcador para definir os usuários permitidos em um teste diff --git a/start.sh b/start.sh new file mode 100644 index 0000000..e8b435b --- /dev/null +++ b/start.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# --- 1. Verificação de Conectividade com o Banco de Dados --- +# Este passo garante que o PostgreSQL está acessível antes de qualquer operação que dependa dele. +# O script 'check_db.py' é executado e, em caso de falha, o processo é encerrado. +echo "Running database readiness check..." +python /code/check_db.py + +# Verifica o código de saída do script Python. Se for diferente de 0 (falha), o bash sai. +if [ $? -ne 0 ]; then + echo "Database readiness check failed. Exiting." + exit 1 +fi +echo "Database is ready!" + + + +# --- 2. Executar Script de Inicialização de Dados --- +# Executa o script Python que insere permissões e papéis iniciais no banco de dados. +# Este script também deve usar a variável de ambiente URL_BD para se conectar. +echo "Executing initial permissions and roles script..." +python /code/iniciar_permissoes_e_papeis.py +echo "Permissions and roles script completed!" + + + +# --- 3. Iniciar a Aplicação FastAPI --- +# Após garantir que o banco de dados está pronto, migrado e com dados iniciais, +# a aplicação FastAPI é iniciada usando a CLI e suas configurações de porta e workers. +echo "Starting FastAPI application using 'fastapi run'..." +fastapi run app/main.py --port 80 --workers 4 \ No newline at end of file diff --git a/test_main.http b/test_main.http new file mode 100644 index 0000000..a2d81a9 --- /dev/null +++ b/test_main.http @@ -0,0 +1,11 @@ +# Test your FastAPI endpoints + +GET http://127.0.0.1:8000/ +Accept: application/json + +### + +GET http://127.0.0.1:8000/hello/User +Accept: application/json + +### diff --git a/teste.txt b/teste.txt new file mode 100644 index 0000000..82093a7 --- /dev/null +++ b/teste.txt @@ -0,0 +1,8 @@ +DELETE FROM public.pessoa + WHERE "uuid"='018ffdd5-8ff3-724f-834f-c2640acce13d'::uuid::uuid; +DELETE FROM public.pessoa + WHERE "uuid"='018ffdd6-3171-7474-a121-ad2822e3649b'::uuid::uuid; +DELETE FROM public.pessoa + WHERE "uuid"='018ffdd7-d216-7501-8364-17881011681a'::uuid::uuid; +DELETE FROM public.pessoa + WHERE "uuid"='018ffdd8-2c96-7116-a231-9de5cea3074d'::uuid::uuid; diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..b28b04f --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,3 @@ + + + diff --git a/tests/_test_client.py b/tests/_test_client.py new file mode 100644 index 0000000..27ac7e3 --- /dev/null +++ b/tests/_test_client.py @@ -0,0 +1,6 @@ +# _test_client.py +import pytest + +@pytest.mark.asyncio +async def test_client_fixture(client): + assert client is not None diff --git a/tests/anterior.py b/tests/anterior.py new file mode 100644 index 0000000..11ea6cc --- /dev/null +++ b/tests/anterior.py @@ -0,0 +1,190 @@ +# # import pytest +# # import asyncio # Importa asyncio para gerenciar loops de eventos +# # from fastapi.testclient import TestClient +# # from sqlalchemy import select +# # from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +# # from sqlalchemy.orm import sessionmaker +# # from app.main import init_app +# # from app.database.session import sessionmanager +# # from app.database.models import Permissao +# # from app.database.session import Base # Certifique-se de que o Base contém todos os modelos +# # from app.config import URL_BD +# # from app.scripts.initialize_permissions_roles import process_permissions +# # +# # # Criação do engine de testes +# # engine = create_async_engine(URL_BD, echo=True) +# # TestSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) +# # +# # +# # async def create_test_database(): +# # """ +# # Exclui e recria todas as tabelas do banco de dados para garantir um estado limpo. +# # """ +# # async with engine.begin() as conn: +# # # Exclui as tabelas se elas já existirem +# # await conn.run_sync(Base.metadata.drop_all) +# # # Cria as tabelas novamente +# # await conn.run_sync(Base.metadata.create_all) +# # +# # # Insere dados iniciais na tabela Permissao +# # async with AsyncSession(engine) as session: +# # permissoes = [ +# # Permissao(nome="Permissão Total"), +# # +# # ] +# # +# # session.add_all(permissoes) +# # await session.commit() +# # +# # +# # @pytest.fixture(scope="module") +# # def client(): +# # """ +# # Fixture para inicializar o cliente de testes do FastAPI com o banco de dados de testes. +# # Cria as tabelas do banco de dados antes de iniciar o cliente. +# # """ +# # # Inicializa o sessionmanager com a URL do banco de testes +# # sessionmanager.init(URL_BD) +# # +# # # Cria as tabelas no banco de testes (exclui e recria) +# # asyncio.run(create_test_database()) +# # +# # # Inicializa o aplicativo FastAPI para testes +# # app = init_app(init_db=False) +# # +# # with TestClient(app) as c: +# # yield c +# # +# # +# # @pytest.fixture(scope="function") +# # async def session(): +# # """ +# # Fixture para fornecer uma sessão do SQLAlchemy para os testes. +# # """ +# # async with TestSessionLocal() as session: +# # yield session +# # +# # +# import pytest +# import asyncio +# from fastapi.testclient import TestClient +# from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +# from sqlalchemy.orm import sessionmaker +# from app.main import init_app +# from app.database.session import sessionmanager +# from app.database.models import Permissao +# from app.database.session import Base +# from app.config import URL_BD +# +# # Criação do engine de testes +# engine = create_async_engine(URL_BD, echo=True) +# TestSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) +# +# +# async def create_test_database(): +# """ +# Exclui e recria todas as tabelas do banco de dados para garantir um estado limpo. +# """ +# async with engine.begin() as conn: +# await conn.run_sync(Base.metadata.drop_all) +# await conn.run_sync(Base.metadata.create_all) +# +# # Insere dados iniciais na tabela Permissao +# async with AsyncSession(engine) as session: +# permissoes = [ +# Permissao(nome="Permissão Total"), +# ] +# session.add_all(permissoes) +# await session.commit() +# +# +# @pytest.fixture(scope="module") +# def client(): +# """ +# Fixture para inicializar o cliente de testes do FastAPI com o banco de dados de testes. +# Cria as tabelas do banco de dados antes de iniciar o cliente. +# """ +# # Inicializa o sessionmanager com a URL do banco de testes +# sessionmanager.init(URL_BD) +# +# # Cria as tabelas no banco de testes (exclui e recria) +# asyncio.run(create_test_database()) +# +# # Inicializa o aplicativo FastAPI para testes +# app = init_app(init_db=False) +# +# with TestClient(app) as c: +# yield c +# +# +# @pytest.fixture(scope="function") +# async def session(): +# """ +# Fixture para fornecer uma sessão do SQLAlchemy para os testes. +# """ +# async with TestSessionLocal() as session: +# yield session +# +# +# @pytest.fixture(scope="session") +# def event_loop(): +# """ +# Cria um novo loop de eventos para a sessão de testes. +# """ +# loop = asyncio.new_event_loop() +# yield loop +# loop.close() + +import pytest +import asyncio +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.orm import sessionmaker +from app.main import app # Importa o app diretamente do main.py +from app.database.session import sessionmanager +from app.database.models import Permissao +from app.database.session import Base +from app.config import URL_BD + +# Criação do engine de testes +engine = create_async_engine(URL_BD, echo=True) +TestSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine, class_=AsyncSession) + + +async def create_test_database(): + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await conn.run_sync(Base.metadata.create_all) + + async with AsyncSession(engine) as session: + permissoes = [ + Permissao(nome="Permissão Total"), + ] + session.add_all(permissoes) + await session.commit() + + +@pytest.fixture(scope="session") +def event_loop(): + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +@pytest.fixture(scope="session") +async def client(): + # Inicializa o sessionmanager com a URL do banco de testes + sessionmanager.init(URL_BD) + + # Cria as tabelas no banco de testes + await create_test_database() + + # Usa o app diretamente do main.py + async with AsyncClient(app=app, base_url="http://test") as ac: + yield ac + + +@pytest.fixture(scope="function") +async def session(): + async with TestSessionLocal() as session: + yield session diff --git a/tests/bkp_conftest.py b/tests/bkp_conftest.py new file mode 100644 index 0000000..6450d75 --- /dev/null +++ b/tests/bkp_conftest.py @@ -0,0 +1,251 @@ +import pytest +import asyncio +from app.main import app +from httpx import AsyncClient +from datetime import datetime, timedelta +from app.database.models import Base, RbacUser, RbacPapel, RbacPermissao +# from fastapi_users_db_sqlalchemy import SQLAlchemyBaseUserTableUUID +from app.config import URL_BD_TESTE, SECRET +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.ext.asyncio import async_sessionmaker + +from app.rbac.classes_customizadas import CustomJWTStrategy +from app.rbac.auth import current_active_user + +# Criação do engine de testes com o URL do banco de dados +engine = create_async_engine(URL_BD_TESTE) + +# Configurando o async_sessionmaker para usar AsyncSession +TestSessionLocal = async_sessionmaker( + bind=engine, + class_=AsyncSession, + autoflush=False, + autocommit=False, +) + +print("Configuração do sessionmaker concluída") +# Definindo a permissão fictícia que o papel de teste irá possuir +test_permissao = RbacPermissao( + id=1, # "ID" fictício, certifique-se de que o valor seja único e não cause conflitos + nome="Permissao_Fake" +) + +# Definindo o papel fictício que o usuário de teste irá possuir +test_papel = RbacPapel( + # uuid=uuid.uuid4(), + nome="Teste_Papel", + permissoes=[test_permissao] # Adicione permissões conforme necessário para simular o comportamento real +) + +# Definindo o usuário de teste que será simulado, incluindo o papel +test_user = RbacUser( + # id=uuid.uuid4(), # Gere um UUID único + email="test@email.com", + hashed_password="hashed-password", # Senha fictícia, pois a verificação não será feita + username="testuser", + full_name="Test User", + is_active=True, + is_superuser=False, + papeis=[test_papel] # Incluindo o papel definido anteriormente +) + + +def fake_current_user(): + return test_user # Variável definida com o usuário fictício + + +# Suponha que você tenha uma lista de permissões que deseja simular +required_permissions_for_test = [1, 2] # "IDs" artificiais de permissões para o teste + + +# Função para criar e destruir o banco de dados de testes +async def create_test_database(): + print("Dentro da função de criação do Banco de Dados") + async with engine.begin() as conn: + print("Apagando todas as tabelas...") + await conn.run_sync(Base.metadata.drop_all) + print("Criando todas as tabelas...") + await conn.run_sync(Base.metadata.create_all) + + +@pytest.fixture(scope="session") +def anyio_backend(): + return "asyncio" + + +@pytest.fixture(scope="session") +async def client(): + print("Iniciando Cliente") + # Chama a função para criar o banco de dados de teste + await create_test_database() + # Inicializa permissões e papéis após a criação do banco de dados + + # Definindo a sobrecarga de usuário + + app.dependency_overrides[current_active_user] = fake_current_user + + # Instanciando a estratégia JWT e gerando o token + strategy = CustomJWTStrategy(secret=SECRET, lifetime_seconds=3600) + token = await strategy.write_token(test_user) + + async with AsyncClient(app=app, base_url="http://test") as client: + client.headers.update({"Authorization": f"Bearer {token}"}) + yield client + + +# FUNÇÕES PARA ROTAS QUE REQUEREM AUTENTICAÇÃO + + +# MODULOS COM UUID PARA TESTES +@pytest.fixture(scope="session") +def datas_referencia(): + data_atual = datetime.now().date() + return { + "data_atual": data_atual, + "data_anterior": data_atual - timedelta(days=1), + "data_posterior": data_atual + timedelta(days=1), + } + + +@pytest.fixture(scope="session") +def uuid_store_relacao_comercial(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e20", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e30", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_tipo_endereco(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e40", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e50", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + } + + +@pytest.fixture(scope="session") +def uuid_store_pessoa(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e60", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e70", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, + "uuid_7": None, + "uuid_8": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_endereco(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e80", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e90", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, + "uuid_7": None, + "uuid_8": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_setor(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e21", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e32", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_tipo_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e22", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e31", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e23", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e32", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_itens_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e24", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e33", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_manutencao_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e25", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e34", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..1b3c560 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,335 @@ +import pytest +from fastapi_users.authentication import JWTStrategy + +from app.main import app +from httpx import AsyncClient +from datetime import datetime, timedelta +from app.database.models import Base, RbacUser, RbacPapel, RbacPermissao +# from fastapi_users_db_sqlalchemy import SQLAlchemyBaseUserTableUUID +from app.config import URL_BD_TESTE, SECRET +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy import text + + +from app.rbac.auth import current_active_user + +# Criação do engine de testes com o URL do banco de dados +engine = create_async_engine(URL_BD_TESTE) +selected_user = None + +# Configurando o async_sessionmaker para usar AsyncSession +TestSessionLocal = async_sessionmaker( + bind=engine, + class_=AsyncSession, + autoflush=False, + autocommit=False, +) + + +# print("Configuração do sessionmaker concluída") +# print(URL_BD_TESTE) + + +def pytest_addoption(parser): + parser.addoption( + "--usuario", action="store", default="admin", help="Nome do usuário para executar os testes" + ) + + +# Função para criar um usuário com permissão e papel +def create_user(email, papel_nome, permissoes_ids): + permissoes = [RbacPermissao(id=perm_id, nome=f"Permissao_{perm_id}") for perm_id in permissoes_ids] + papel = RbacPapel( + nome=papel_nome, + permissoes=permissoes + ) + user = RbacUser( + email=email, + hashed_password="hashed-password", + fk_inquilino_uuid="pytest", + is_active=True, + is_superuser=False, + papeis=[papel] + ) + + return user + + +# Fixture para criar múltiplos usuários +@pytest.fixture(scope="session") +def create_test_users(): + users = { + "admin": create_user( + email="admin@example.com", + papel_nome="Admin_Papel", + permissoes_ids=[1] + ), + "comercial": create_user( + email="user1@example.com", + papel_nome="User1_Papel", + permissoes_ids=[2] + ), + "estoque": create_user( + email="user2@example.com", + papel_nome="User2_Papel", + permissoes_ids=[4] + ), + "financeiro": create_user( + email="user3@example.com", + papel_nome="User3_Papel", + permissoes_ids=[5] + ), + "pessoa": create_user( + email="user4@example.com", + papel_nome="User4_Papel", + permissoes_ids=[4, 5, 33] + ), + } + return users + + +# Fixture para selecionar um usuário +@pytest.fixture(scope="session") +def selecionar_usuario(create_test_users, request): + usuario_nome = request.config.getoption("--usuario", default="admin") + usuario_selecionado = create_test_users.get(usuario_nome, create_test_users["admin"]) + return usuario_selecionado + + +def fake_current_user(selecionar_usuario): + return selecionar_usuario + + +async def create_test_database(): + tenant_schema = "pytest" + + # Dropar e recriar o schema "pytest" + async with engine.begin() as conn: + try: + # print("Dropping schema tenant (pytest)...") + await conn.execute(text(f"DROP SCHEMA IF EXISTS {tenant_schema} CASCADE")) + # print("Creating schema tenant (pytest)...") + await conn.execute(text(f"CREATE SCHEMA {tenant_schema}")) + except Exception as e: + print(f"Erro ao dropar ou recriar o schema '{tenant_schema}': {e}") + raise # Propaga o erro para interromper os testes + + # Atualiza o schema das tabelas que não são do schema "shared" + for table in Base.metadata.tables.values(): + if table.schema != "shared": + table.schema = tenant_schema + + # Cria as tabelas apenas para o schema tenant + tenant_tables = [table for table in Base.metadata.sorted_tables if table.schema == tenant_schema] + async with engine.begin() as conn: + try: + # print("Criando tabelas no schema tenant (pytest)...") + await conn.run_sync(Base.metadata.create_all, tables=tenant_tables) + except Exception as e: + print(f"Erro ao criar as tabelas no schema '{tenant_schema}': {e}") + raise + + +@pytest.fixture(scope="session") +def anyio_backend(): + return "asyncio" + + +@pytest.fixture(scope="session") +async def client(selecionar_usuario): + # print("Iniciando Cliente") + # Chama a função para criar o banco de dados de teste + await create_test_database() + + app.dependency_overrides[current_active_user] = lambda: selecionar_usuario + + strategy = JWTStrategy(secret=SECRET, lifetime_seconds=3600) + token = await strategy.write_token(selecionar_usuario) + # print(f"Token gerado: {token}") + + async with AsyncClient(app=app, base_url="http://test") as client: + client.headers.update({"Authorization": f"Bearer {token}"}) + yield client + + +# Função para modificar os itens de teste com base nos usuários permitidos +def pytest_collection_modifyitems(config, items): + usuario = config.getoption("--usuario") + + def usuario_esta_permitido(usuarios_permitidos_): + # print(f"Verificando se o usuário '{usuario}' está na lista de permitidos: {usuarios_permitidos_}") + return usuario in usuarios_permitidos_ + + selected_items = [] + for item in items: + usuarios_permitidos_marker = item.get_closest_marker("usuarios_permitidos") + + if usuarios_permitidos_marker: + usuarios_permitidos = usuarios_permitidos_marker.args[0] + if not usuario_esta_permitido(usuarios_permitidos): + # print(f"Ignorando o teste '{item.name}' porque o usuário '{usuario}' não está permitido.") + continue # Ignorar o teste se o usuário não estiver permitido + + selected_items.append(item) + + # Atualiza a lista de testes que serão executados + items[:] = selected_items + + +# FUNÇÕES PARA ROTAS QUE REQUEREM AUTENTICAÇÃO + + +# MODULOS COM UUID PARA TESTES +@pytest.fixture(scope="session") +def datas_referencia(): + data_atual = datetime.now().date() + return { + "data_atual": data_atual, + "data_anterior": data_atual - timedelta(days=1), + "data_posterior": data_atual + timedelta(days=1), + } + + +@pytest.fixture(scope="session") +def uuid_store_relacao_comercial(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e20", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e30", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_tipo_endereco(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e40", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e50", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + } + + +@pytest.fixture(scope="session") +def uuid_store_pessoa(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e60", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e70", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, + "uuid_7": None, + "uuid_8": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_endereco(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e80", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e90", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, + "uuid_7": None, + "uuid_8": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_setor(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e21", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e32", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_tipo_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e22", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e31", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e23", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e32", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_itens_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e24", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e33", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } + + +@pytest.fixture(scope="session") +def uuid_store_manutencao_equipamento(): + return { + "uuid_1": "01915caf-2c4d-7270-a071-d928c87f8e25", # UUID inválido para testes + "uuid_2": "01915caf-2c4d-7270-a071-d928c87f8e34", # UUID inválido para testes + "uuid_3": None, + "uuid_4": None, + "uuid_5": None, + "uuid_6": None, # UUID para Relacionamento + "uuid_7": None, # UUID para Relacionamento + "uuid_8": None, # UUID para Relacionamento + "uuid_9": None, + "uuid_10": None, + "uuid_11": None, + } diff --git a/tests/init_db_pytest.py b/tests/init_db_pytest.py new file mode 100644 index 0000000..61cb446 --- /dev/null +++ b/tests/init_db_pytest.py @@ -0,0 +1,44 @@ +import subprocess +import asyncio +from app.database.session import sessionmanager +from app.config import URL_BD +from app.scripts.initialize_permissions_roles import initialize_permissions_roles +from app.scripts.create_initial_user import create_user + + +def alembic_upgrade(): + try: + print("Iniciando Migrações Alembic") + result = subprocess.run(["alembic", "upgrade", "head"], check=True, capture_output=True, text=True) + print("Migração Alembic finalizado com sucesso.") + print(result.stdout) + except subprocess.CalledProcessError as e: + print("Erro durante Migrações Alembic.") + print(e.stderr) + raise + + +async def init_bd(user_email: str, user_password: str, user_username: str, user_full_name: str, + user_is_superuser: bool = False): + + sessionmanager.init(URL_BD) + + try: + print("Inserindo dados iniciais no Banco de Dados") + # Criando o usuário inicial + user_id = await create_user(user_email, user_password, user_username, user_full_name, user_is_superuser) + # Inserindo permissões e papéis + await initialize_permissions_roles(user_id) + except Exception as e: + print(f"Falha na inserção de dados: {e}") + + +if __name__ == "__main__": + # Os dados do usuário podem ser passados como argumentos na linha de comando ou coletados de outra maneira + email = "admin@sonora.com" + password = "admin" + username = "UsuarioAdmin" + full_name = "Admin" + is_superuser = True + + asyncio.run(init_bd(email, password, username, full_name, is_superuser)) diff --git a/tests/test_000_usuario.py b/tests/test_000_usuario.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_003_pessoa.py b/tests/test_003_pessoa.py new file mode 100644 index 0000000..362f6b8 --- /dev/null +++ b/tests/test_003_pessoa.py @@ -0,0 +1,1414 @@ +import pytest +from httpx import AsyncClient +from fastapi import status + +BASE_URL = "/api/pessoa" + + +@pytest.mark.anyio +async def test_route_exists(client: AsyncClient): + response = await client.post(f"{BASE_URL}/get_all") + assert response.status_code != 404 # Certifica-se de que a rota existe + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one(client: AsyncClient, uuid_store_pessoa: dict, uuid_store_tipo_endereco, + uuid_store_relacao_comercial, uuid_store_endereco): + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "00000000001", + "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"} + ) + if response.status_code in [200, 201, 204, 404, 422]: + assert response.status_code == 201 + data = response.json() + assert data["pessoa_status"] is True + assert data["fisica_cpf"] == "00000000001" + assert data["pessoa_telefone"] == "telteste1" + assert data["pessoa_celular"] == "celteste1" + assert data["pessoa_email"] == "email1@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "rg teste 1" + assert data["fisica_genero"] == "M" + assert data["fisica_nome"] == "nome teste 1" + assert data["enderecos"] == [] + assert data["rc"] == [] + uuid_store_pessoa["uuid_3"] = response.json()["uuid"] + + elif response.status_code == 403: + assert response.status_code == 403, "Esperado status 403 para usuário sem permissões." + else: + assert False, f"Status inesperado: {response.status_code}, detalhe: {response.json()}" + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_many(client: AsyncClient, uuid_store_pessoa: dict, uuid_store_tipo_endereco: dict, + uuid_store_relacao_comercial: dict, uuid_store_endereco: dict): + response = await client.post(f"{BASE_URL}/add_many", json=[ + # 0 + {"pessoa_status": True, "fisica_cpf": "00000000002", "pessoa_telefone": "telteste2", + "pessoa_celular": "celteste2", "pessoa_email": "email2@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", "fisica_rg": "rg teste 2", "fisica_genero": "M", + "fisica_nome": "nome teste 2", + "enderecos": [{"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 2", + "endereco_pessoa_numero": "numero 2", "endereco_pessoa_complemento": "complemento 2", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}], + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_6"]} + ]}, + # 1 + {"pessoa_status": True, "fisica_cpf": "00000000003", "pessoa_telefone": "telteste3", + "pessoa_celular": "celteste3", "pessoa_email": "email3@email.com", + "pessoa_local_evento": False, "pessoa_tipo": "1", "fisica_rg": "rg teste 3", "fisica_genero": "F", + "fisica_nome": "nome teste 3", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_6"]}]}, + # 2 + {"pessoa_status": False, "fisica_cpf": "00000000004", "pessoa_telefone": "telteste4", + "pessoa_celular": "celteste4", "pessoa_email": "email4@email.com", + "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 4", "fisica_genero": "F", "fisica_nome": "nome teste 4", + "enderecos": [{"endereco_pessoa_status": False, "endereco_pessoa_descricao": "descricao 4", + "endereco_pessoa_numero": "numero 4", "endereco_pessoa_complemento": "complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}]}, + # 3 + {"pessoa_status": True, "juridica_cnpj": "00000000000005", "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", + "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, + "pessoa_tipo": "0", "juridica_email_fiscal": "email_fiscal5@email.com", "juridica_insc_est": "ins est 5", + "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_8"]} + ]}, + # 4 + {"pessoa_status": True, "juridica_cnpj": "00000000000006", "pessoa_telefone": "telteste6", + "pessoa_celular": "celteste6", + "pessoa_email": "email6@email.com", + "pessoa_local_evento": True, + "pessoa_tipo": "0", "juridica_email_fiscal": "email_fiscal6@email.com", "juridica_insc_est": "ins est 6", + "juridica_ins_mun": "insc mun 6", + "juridica_razao_social": "razão social 5", "juridica_representante": "representante 6", + "enderecos": [{"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 6", + "endereco_pessoa_numero": "numero 6", "endereco_pessoa_complemento": "complemento 6", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}], + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]} + ]}, + # 5 + {"pessoa_status": False, "juridica_cnpj": "00000000000007", "pessoa_telefone": "telteste7", + "pessoa_celular": "celteste7", + "pessoa_email": "email7@email.com", + "pessoa_local_evento": False, "pessoa_tipo": "0", "juridica_email_fiscal": "email_fiscal7@email.com", + "juridica_insc_est": "ins est 7", "juridica_ins_mun": "insc mun 7", + "juridica_razao_social": "razão social 7", "juridica_representante": "representante 7", + "enderecos": [{"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 7", + "endereco_pessoa_numero": "numero 7", "endereco_pessoa_complemento": "complemento 7", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}], + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]} + ]}, + + # 6 + {"pessoa_status": True, "juridica_cnpj": "00000000000008", "pessoa_telefone": "telteste8", + "pessoa_celular": "celteste8", + "pessoa_email": "email8@email.com", + "pessoa_local_evento": True, + "pessoa_tipo": "0", "juridica_email_fiscal": "email_fiscal8@email.com", "juridica_insc_est": "ins est 8", + "juridica_ins_mun": "insc mun 8", + "juridica_razao_social": "razão social 8", "juridica_representante": "representante 8"} + + ]) + if response.status_code in [200, 201, 204, 404, 422]: + assert response.status_code == 201 + data = response.json() + assert len(data) == 7 + assert data[0]["fisica_cpf"] == "00000000002" + assert data[1]["fisica_cpf"] == "00000000003" + assert data[1]["enderecos"] == [] + assert data[2]["fisica_cpf"] == "00000000004" + assert data[2]["rc"] == [] + assert data[3]["juridica_cnpj"] == "00000000000005" + assert data[3]["enderecos"] == [] + assert data[4]["juridica_cnpj"] == "00000000000006" + assert data[5]["juridica_cnpj"] == "00000000000007" + assert data[6]["juridica_cnpj"] == "00000000000008" + assert data[6]["enderecos"] == [] + assert data[6]["rc"] == [] + uuid_store_pessoa["uuid_4"] = data[0]["uuid"] + uuid_store_pessoa["uuid_5"] = data[1]["uuid"] + uuid_store_pessoa["uuid_6"] = data[2]["uuid"] + uuid_store_pessoa["uuid_7"] = data[3]["uuid"] + uuid_store_pessoa["uuid_8"] = data[4]["uuid"] + uuid_store_pessoa["uuid_9"] = data[5]["uuid"] + uuid_store_pessoa["uuid_10"] = data[6]["uuid"] + uuid_store_endereco["uuid_3"] = data[0]["enderecos"][0]["uuid"] + uuid_store_endereco["uuid_4"] = data[2]["enderecos"][0]["uuid"] + uuid_store_endereco["uuid_5"] = data[4]["enderecos"][0]["uuid"] + uuid_store_endereco["uuid_6"] = data[5]["enderecos"][0]["uuid"] + elif response.status_code == 403: + assert response.status_code == 403, "Esperado status 403 para usuário sem permissões." + else: + assert False, f"Status inesperado: {response.status_code}, detalhe: {response.json()}" + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque", "pessoa"]) +async def test_get_all(client: AsyncClient): + response = await client.post(f"{BASE_URL}/get_all") + if response.status_code in [200, 201, 204, 404, 422]: + assert response.status_code == 200 + data = response.json() + assert isinstance(data, list) + elif response.status_code == 403: + assert response.status_code == 403, "Esperado status 403 para usuário sem permissões." + else: + assert False, f"Status inesperado: {response.status_code}, detalhe: {response.json()}" + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_many(client: AsyncClient, uuid_store_pessoa: dict): + uuids = [uuid_store_pessoa["uuid_3"], uuid_store_pessoa["uuid_4"]] + response = await client.post(f"{BASE_URL}/get_many", json={"uuids": uuids}) + assert response.status_code == 200 + data = response.json() + assert isinstance(data, list) + assert len(data) == 2 + assert data[0]["pessoa_celular"] == "celteste1" + assert data[0]["pessoa_email"] == "email1@email.com" + assert data[1]["pessoa_celular"] == "celteste2" + assert data[1]["pessoa_email"] == "email2@email.com" + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_one(client: AsyncClient, uuid_store_pessoa: dict): + response = await client.post(f"{BASE_URL}/get_one", json={"uuid": uuid_store_pessoa["uuid_3"]}) + assert response.status_code == 200 + data = response.json() + assert "uuid" in data + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["uuid"] == uuid_store_pessoa["uuid_3"] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica(client: AsyncClient, uuid_store_pessoa: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_3"], "pessoa_status": True, + "fisica_cpf": "u0000000001", + "pessoa_telefone": "up telteste1", "pessoa_celular": "012345678", + "pessoa_email": "email_update1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", + "fisica_rg": "up rg teste 1", "fisica_genero": "M", + "fisica_nome": "up nome teste 1"}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_3"] + assert data["pessoa_status"] is True + assert data["fisica_cpf"] == "u0000000001" + assert data["pessoa_telefone"] == "up telteste1" + assert data["pessoa_celular"] == "012345678" + assert data["pessoa_email"] == "email_update1@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 1" + assert data["fisica_genero"] == "M" + assert data["fisica_nome"] == "up nome teste 1" + assert data["enderecos"] == [] + assert data["rc"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_5"], "pessoa_status": True, + "fisica_cpf": "u0000000003", + "pessoa_telefone": "up telteste3", "pessoa_celular": "012345678", + "pessoa_email": "email_update3@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", + "fisica_rg": "up rg teste 3", + "fisica_genero": "M", "fisica_nome": "up nome teste 3", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_5"] + assert data["pessoa_status"] is True + assert data["fisica_cpf"] == "u0000000003" + assert data["pessoa_telefone"] == "up telteste3" + assert data["pessoa_celular"] == "012345678" + assert data["pessoa_email"] == "email_update3@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 3" + assert data["fisica_genero"] == "M" + assert data["fisica_nome"] == "up nome teste 3" + assert data["enderecos"] == [] + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert len(data["rc"]) == 1 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_2rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_5"], "pessoa_status": True, + "fisica_cpf": "u0000000003", + "pessoa_telefone": "up telteste3", "pessoa_celular": "012345678", + "pessoa_email": "email_update3@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", + "fisica_rg": "up rg teste 3", + "fisica_genero": "M", "fisica_nome": "up nome teste 3", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}, + {"uuid": uuid_store_relacao_comercial["uuid_8"]}]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_5"] + assert data["pessoa_status"] is True + assert data["fisica_cpf"] == "u0000000003" + assert data["pessoa_telefone"] == "up telteste3" + assert data["pessoa_celular"] == "012345678" + assert data["pessoa_email"] == "email_update3@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 3" + assert data["fisica_genero"] == "M" + assert data["fisica_nome"] == "up nome teste 3" + assert data["enderecos"] == [] + assert data["rc"][1]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_8"] + assert len(data["rc"]) == 2 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_tipo_endereco: dict, uuid_store_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_6"], "pessoa_status": False, + "fisica_cpf": "u0000000004", + "pessoa_telefone": "uptelteste4", "pessoa_celular": "upteste4", + "pessoa_email": "email_update4@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", "fisica_rg": "up rg teste 4", + "fisica_genero": "F", + "fisica_nome": "Ricardo Junio Daleprane", + "enderecos": [ + {"uuid": uuid_store_endereco["uuid_4"], "endereco_pessoa_status": False, + "endereco_pessoa_descricao": "up descricao 4", + "endereco_pessoa_numero": "up num 4", + "endereco_pessoa_complemento": "up complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]}]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_6"] + assert data["pessoa_status"] is False + assert data["fisica_cpf"] == "u0000000004" + assert data["pessoa_telefone"] == "uptelteste4" + assert data["pessoa_celular"] == "upteste4" + assert data["pessoa_email"] == "email_update4@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 4" + assert data["fisica_genero"] == "F" + assert data["fisica_nome"] == "Ricardo Junio Daleprane" + assert data["enderecos"][0]["uuid"] == uuid_store_endereco["uuid_4"] + assert data["enderecos"][0]["endereco_pessoa_status"] is False + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "up descricao 4" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "up num 4" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "up complemento 4" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_7"] + assert data["rc"] == [] + assert len(data["enderecos"]) == 1 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_2end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_tipo_endereco: dict, uuid_store_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_6"], "pessoa_status": False, + "fisica_cpf": "u0000000004", + "pessoa_telefone": "uptelteste4", "pessoa_celular": "upteste4", + "pessoa_email": "email_update4@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", "fisica_rg": "up rg teste 4", + "fisica_genero": "F", + "fisica_nome": "Ricardo Junio Daleprane", + "enderecos": [ + {"uuid": uuid_store_endereco["uuid_4"], "endereco_pessoa_status": False, + "endereco_pessoa_descricao": "up descricao 4", + "endereco_pessoa_numero": "up num 4", + "endereco_pessoa_complemento": "up complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]}, + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "novo descricao 4", + "endereco_pessoa_numero": "n num 4", + "endereco_pessoa_complemento": "novo complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]} + ]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_6"] + assert data["pessoa_status"] is False + assert data["fisica_cpf"] == "u0000000004" + assert data["pessoa_telefone"] == "uptelteste4" + assert data["pessoa_celular"] == "upteste4" + assert data["pessoa_email"] == "email_update4@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 4" + assert data["fisica_genero"] == "F" + assert data["fisica_nome"] == "Ricardo Junio Daleprane" + assert data["enderecos"][0]["uuid"] == uuid_store_endereco["uuid_4"] + assert data["enderecos"][0]["endereco_pessoa_status"] is False + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "up descricao 4" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "up num 4" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "up complemento 4" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_7"] + assert data["enderecos"][1]["endereco_pessoa_status"] is True + assert data["enderecos"][1]["endereco_pessoa_descricao"] == "novo descricao 4" + assert data["enderecos"][1]["endereco_pessoa_numero"] == "n num 4" + assert data["enderecos"][1]["endereco_pessoa_complemento"] == "novo complemento 4" + assert data["enderecos"][1]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][1]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_6"] + assert data["rc"] == [] + assert len(data["enderecos"]) == 2 + uuid_store_endereco["uuid_7"] = data["enderecos"][1]["uuid"] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_rc_and_end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_tipo_endereco: dict, uuid_store_endereco: dict, + uuid_store_relacao_comercial: dict, ): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_6"], "pessoa_status": False, + "fisica_cpf": "u0000000004", + "pessoa_telefone": "uptelteste4", "pessoa_celular": "upteste4", + "pessoa_email": "email_update4@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", "fisica_rg": "up rg teste 4", + "fisica_genero": "F", + "fisica_nome": "Ricardo Junio Daleprane", + "enderecos": [ + {"uuid": uuid_store_endereco["uuid_4"], "endereco_pessoa_status": False, + "endereco_pessoa_descricao": "up descricao 4", + "endereco_pessoa_numero": "up num 4", + "endereco_pessoa_complemento": "up complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]}], + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}] + }) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_6"] + assert data["pessoa_status"] is False + assert data["fisica_cpf"] == "u0000000004" + assert data["pessoa_telefone"] == "uptelteste4" + assert data["pessoa_celular"] == "upteste4" + assert data["pessoa_email"] == "email_update4@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 4" + assert data["fisica_genero"] == "F" + assert data["fisica_nome"] == "Ricardo Junio Daleprane" + assert data["enderecos"][0]["uuid"] == uuid_store_endereco["uuid_4"] + assert data["enderecos"][0]["endereco_pessoa_status"] is False + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "up descricao 4" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "up num 4" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "up complemento 4" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_7"] + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert len(data["enderecos"]) == 1 + assert len(data["rc"]) == 1 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_2rc_and_2end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_tipo_endereco: dict, uuid_store_endereco: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_6"], "pessoa_status": False, + "fisica_cpf": "u0000000004", + "pessoa_telefone": "uptelteste4", "pessoa_celular": "upteste4", + "pessoa_email": "email_update4@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", "fisica_rg": "up rg teste 4", + "fisica_genero": "F", + "fisica_nome": "Ricardo Junio Daleprane", + "enderecos": [ + {"uuid": uuid_store_endereco["uuid_4"], "endereco_pessoa_status": False, + "endereco_pessoa_descricao": "up descricao 4", + "endereco_pessoa_numero": "up num 4", + "endereco_pessoa_complemento": "up complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]}, + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "novo descricao 4", + "endereco_pessoa_numero": "n num 4", + "endereco_pessoa_complemento": "novo complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]} + ], + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}, + {"uuid": uuid_store_relacao_comercial["uuid_8"]}] + }) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_6"] + assert data["pessoa_status"] is False + assert data["fisica_cpf"] == "u0000000004" + assert data["pessoa_telefone"] == "uptelteste4" + assert data["pessoa_celular"] == "upteste4" + assert data["pessoa_email"] == "email_update4@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 4" + assert data["fisica_genero"] == "F" + assert data["fisica_nome"] == "Ricardo Junio Daleprane" + assert data["enderecos"][0]["uuid"] == uuid_store_endereco["uuid_4"] + assert data["enderecos"][0]["endereco_pessoa_status"] is False + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "up descricao 4" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "up num 4" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "up complemento 4" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_7"] + assert data["enderecos"][1]["endereco_pessoa_status"] is True + assert data["enderecos"][1]["endereco_pessoa_descricao"] == "novo descricao 4" + assert data["enderecos"][1]["endereco_pessoa_numero"] == "n num 4" + assert data["enderecos"][1]["endereco_pessoa_complemento"] == "novo complemento 4" + assert data["enderecos"][1]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][1]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_6"] + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_8"] + assert data["rc"][1]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert len(data["enderecos"]) == 2 + assert len(data["rc"]) == 2 + uuid_store_endereco["uuid_8"] = data["enderecos"][1]["uuid"] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_2rc_to_1rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_5"], "pessoa_status": True, + "fisica_cpf": "u0000000003", + "pessoa_telefone": "up telteste3", "pessoa_celular": "012345678", + "pessoa_email": "email_update3@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", + "fisica_rg": "up rg teste 3", + "fisica_genero": "M", "fisica_nome": "up nome teste 3", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}, + ]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_5"] + assert data["pessoa_status"] is True + assert data["fisica_cpf"] == "u0000000003" + assert data["pessoa_telefone"] == "up telteste3" + assert data["pessoa_celular"] == "012345678" + assert data["pessoa_email"] == "email_update3@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 3" + assert data["fisica_genero"] == "M" + assert data["fisica_nome"] == "up nome teste 3" + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert data["enderecos"] == [] + assert len(data["rc"]) == 1 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_fisica_2end_to_1end_and_0rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_tipo_endereco: dict, + uuid_store_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_6"], "pessoa_status": False, + "fisica_cpf": "u0000000004", + "pessoa_telefone": "uptelteste4", "pessoa_celular": "upteste4", + "pessoa_email": "email_update4@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", "fisica_rg": "up rg teste 4", + "fisica_genero": "F", + "fisica_nome": "Ricardo Junio Daleprane", + "enderecos": [ + {"uuid": uuid_store_endereco["uuid_4"], "endereco_pessoa_status": False, + "endereco_pessoa_descricao": "up descricao 4", + "endereco_pessoa_numero": "up num 4", + "endereco_pessoa_complemento": "up complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]}, + ]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_6"] + assert data["pessoa_status"] is False + assert data["fisica_cpf"] == "u0000000004" + assert data["pessoa_telefone"] == "uptelteste4" + assert data["pessoa_celular"] == "upteste4" + assert data["pessoa_email"] == "email_update4@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "1" + assert data["fisica_rg"] == "up rg teste 4" + assert data["fisica_genero"] == "F" + assert data["fisica_nome"] == "Ricardo Junio Daleprane" + assert data["enderecos"][0]["uuid"] == uuid_store_endereco["uuid_4"] + assert data["enderecos"][0]["endereco_pessoa_status"] is False + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "up descricao 4" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "up num 4" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "up complemento 4" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_7"] + assert data["rc"] == [] + assert len(data["enderecos"]) == 1 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica(client: AsyncClient, uuid_store_pessoa: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8"}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["enderecos"] == [] + assert data["rc"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica_rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}] + }) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert len(data["rc"]) == 1 + assert data["enderecos"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica_2rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "rc": [{"uuid": uuid_store_relacao_comercial["uuid_7"]}, + {"uuid": uuid_store_relacao_comercial["uuid_6"]}] + }) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["rc"][0]["uuid"] == uuid_store_relacao_comercial["uuid_6"] + assert data["rc"][1]["uuid"] == uuid_store_relacao_comercial["uuid_7"] + assert len(data["rc"]) == 2 + assert data["enderecos"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica_2rc_to_0rc(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8"}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["rc"] == [] + assert data["enderecos"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica_end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict, + uuid_store_tipo_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "enderecos": [ + {"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 7", + "endereco_pessoa_numero": "numero 7", + "endereco_pessoa_complemento": "complemento 7", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}] + }) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["rc"] == [] + assert data["enderecos"][0]["endereco_pessoa_status"] is True + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "descricao 7" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "numero 7" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "complemento 7" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_6"] + assert len(data["enderecos"]) == 1 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica_end_to_0end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict, + uuid_store_tipo_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8"}) + # Voltando o uuid_10 para ficar sem Endereço para o próximo teste + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["rc"] == [] + assert data["enderecos"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_juridica_2end(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict, + uuid_store_tipo_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "u0000000000008", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "enderecos": [ + {"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 7", + "endereco_pessoa_numero": "numero 7", + "endereco_pessoa_complemento": "complemento 7", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}, + {"endereco_pessoa_status": False, "endereco_pessoa_descricao": "descricao 8", + "endereco_pessoa_numero": "numero 8", + "endereco_pessoa_complemento": "complemento 8", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]} + ] + }) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_pessoa["uuid_10"] + assert data["pessoa_status"] is False + assert data["juridica_cnpj"] == "u0000000000008" + assert data["pessoa_telefone"] == "uptelteste8" + assert data["pessoa_celular"] == "upteste8" + assert data["pessoa_email"] == "email_update8@email.com" + assert data["pessoa_local_evento"] is True + assert data["pessoa_tipo"] == "0" + assert data["juridica_email_fiscal"] == "email_fiscal_update8@email.com" + assert data["juridica_insc_est"] == "up ins est 8" + assert data["juridica_ins_mun"] == "up insc mun 8" + assert data["juridica_razao_social"] == "up razão social 8" + assert data["juridica_representante"] == "up representante 8" + assert data["rc"] == [] + assert data["enderecos"][0]["endereco_pessoa_status"] is True + assert data["enderecos"][0]["endereco_pessoa_descricao"] == "descricao 7" + assert data["enderecos"][0]["endereco_pessoa_numero"] == "numero 7" + assert data["enderecos"][0]["endereco_pessoa_complemento"] == "complemento 7" + assert data["enderecos"][0]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][0]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_6"] + assert data["enderecos"][1]["endereco_pessoa_status"] is False + assert data["enderecos"][1]["endereco_pessoa_descricao"] == "descricao 8" + assert data["enderecos"][1]["endereco_pessoa_numero"] == "numero 8" + assert data["enderecos"][1]["endereco_pessoa_complemento"] == "complemento 8" + assert data["enderecos"][1]["endereco_pessoa_cep"] == "00000000" + assert data["enderecos"][1]["fk_tipo_endereco_uuid"] == uuid_store_tipo_endereco["uuid_7"] + assert len(data["enderecos"]) == 2 + assert data["rc"] == [] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_delete_one_item(client: AsyncClient, uuid_store_pessoa: dict): + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_one", + json={"uuid": uuid_store_pessoa["uuid_3"]} + ) + assert response.status_code == 204 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_delete_many_items(client: AsyncClient, uuid_store_pessoa: dict): + uuids = [uuid_store_pessoa["uuid_4"], uuid_store_pessoa["uuid_5"]] + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_many", + json={"uuids": uuids} # Envia o corpo da solicitação como JSON + ) + assert response.status_code == 204 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque", "pessoa"]) +async def test_delete_one_non_existent_item(client: AsyncClient, uuid_store_pessoa: dict): + # Tentando deletar novamente o primeiro item já deletado + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_one", + json={"uuid": uuid_store_pessoa["uuid_1"]} + ) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_one_non_existent_item(client: AsyncClient, uuid_store_pessoa: dict): + # Tentando buscar um item deletado + response = await client.request( + method="POST", + url=f"{BASE_URL}/get_one", + json={"uuid": uuid_store_pessoa["uuid_3"]} + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque", "pessoa"]) +async def test_delete_many_non_existent_item(client: AsyncClient, uuid_store_pessoa: dict): + uuids = [uuid_store_pessoa["uuid_1"], uuid_store_pessoa["uuid_2"], + ] + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_many", + json={"uuids": uuids} # Envia o corpo da solicitação como JSON + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque", "pessoa"]) +async def test_update_one_non_existing_item(client: AsyncClient, uuid_store_pessoa: dict): + # Atualizando o segundo item + response = await client.request( + method="PUT", + url=f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_1"], "pessoa_status": True, "fisica_cpf": "01234567890", + "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"} + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +# Teste com dados fora dos limites de tamanho + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_telefone(client: AsyncClient): + # Telefone + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "123456", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_celular(client: AsyncClient): + # Celular + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "1234567", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_email(client: AsyncClient): + # Email + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "a@a.com", "pessoa_local_evento": True, "pessoa_tipo": "1", + "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_tipo_pessoa(client: AsyncClient): + # Tipo Pessoa Valor Inválido + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "2", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_rg(client: AsyncClient): + # RG + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "1234", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_genero(client: AsyncClient): + # Gênero + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "A", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_nome(client: AsyncClient): + # Nome + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "aa"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_cpf(client: AsyncClient): + # CPF + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "0123456789", + "pessoa_telefone": "telteste1", "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_cnpj(client: AsyncClient): + # CNPJ + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "0000000000000", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_emial_fiscal(client: AsyncClient): + # Email Fiscal + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "a@a.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_insc_estadual(client: AsyncClient): + # Inscrição Estadual + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", "juridica_insc_est": "1234", + "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_insc_municipal(client: AsyncClient): + # Inscrição Municipal + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "1234", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_razao_social(client: AsyncClient): + # Razão Social + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "1234", "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_representante(client: AsyncClient): + # Representante + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", "juridica_representante": "12"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_telefone(client: AsyncClient): + # Telefone + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", + "pessoa_telefone": "012345678901234567890", "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_celular(client: AsyncClient): + # Celular + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "012345678901234567890", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_email(client: AsyncClient): + # Email - + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa@a.com", + "pessoa_local_evento": True, "pessoa_tipo": "1", + "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_rg(client: AsyncClient): + # RG + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "012345678901234567890", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_nome(client: AsyncClient): + # Nome + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "cpft1", "pessoa_telefone": "telteste1", + "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", + "fisica_nome": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_cpf(client: AsyncClient): + # CPF + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "fisica_cpf": "012345678901", + "pessoa_telefone": "telteste1", "pessoa_celular": "celteste1", + "pessoa_email": "email1@email.com", "pessoa_local_evento": True, + "pessoa_tipo": "1", "fisica_rg": "rg teste 1", + "fisica_genero": "M", "fisica_nome": "nome teste 1"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_cnpj(client: AsyncClient): + # CNPJ + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "012345678901234", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_emial_fiscal(client: AsyncClient): + # Email Fiscal + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "123456", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_insc_estadual(client: AsyncClient): + # Inscrição Estadual + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "012345678901234567890123456789012345678901234567890", + "juridica_ins_mun": "insc mun 5", "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_insc_municipal(client: AsyncClient): + # Inscrição Municipal + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", + "juridica_ins_mun": "012345678901234567890123456789012345678901234567890", + "juridica_razao_social": "razão social 5", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length_razao_social(client: AsyncClient): + # Razão Social + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "juridica_representante": "representante 5"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length_representante(client: AsyncClient): + # Representante + response = await client.post(f"{BASE_URL}/add_one", + json={"pessoa_status": True, "juridica_cnpj": "00000000000005", + "pessoa_telefone": "telteste5", + "pessoa_celular": "celteste5", "pessoa_email": "email5@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal5@email.com", + "juridica_insc_est": "ins est 5", "juridica_ins_mun": "insc mun 5", + "juridica_razao_social": "razão social 5", + "juridica_representante": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_min_lenght(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict, + uuid_store_tipo_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "0123456789012", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "enderecos": [ + {"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 7", + "endereco_pessoa_numero": "numero 7", + "endereco_pessoa_complemento": "complemento 7", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}, + {"endereco_pessoa_status": False, "endereco_pessoa_descricao": "descricao 8", + "endereco_pessoa_numero": "numero 8", + "endereco_pessoa_complemento": "complemento 8", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]} + ] + }) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_max_lenght(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict, + uuid_store_tipo_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "012345678901234", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "email_update8@email.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "enderecos": [ + {"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 7", + "endereco_pessoa_numero": "numero 7", + "endereco_pessoa_complemento": "complemento 7", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}, + {"endereco_pessoa_status": False, "endereco_pessoa_descricao": "descricao 8", + "endereco_pessoa_numero": "numero 8", + "endereco_pessoa_complemento": "complemento 8", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]} + ] + }) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_max_and_min_lenght(client: AsyncClient, uuid_store_pessoa: dict, + uuid_store_relacao_comercial: dict, + uuid_store_tipo_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_pessoa["uuid_10"], "pessoa_status": False, + "juridica_cnpj": "012345678901234", + "pessoa_telefone": "uptelteste8", "pessoa_celular": "upteste8", + "pessoa_email": "8@e.com", + "pessoa_local_evento": True, "pessoa_tipo": "0", + "juridica_email_fiscal": "email_fiscal_update8@email.com", + "juridica_insc_est": "up ins est 8", "juridica_ins_mun": "up insc mun 8", + "juridica_razao_social": "up razão social 8", + "juridica_representante": "up representante 8", + "enderecos": [ + {"endereco_pessoa_status": True, "endereco_pessoa_descricao": "descricao 7", + "endereco_pessoa_numero": "numero 7", + "endereco_pessoa_complemento": "complemento 7", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"]}, + {"endereco_pessoa_status": False, "endereco_pessoa_descricao": "descricao 8", + "endereco_pessoa_numero": "numero 8", + "endereco_pessoa_complemento": "complemento 8", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_7"]} + ] + }) + assert response.status_code == 422 diff --git a/tests/test_004_endereco.py b/tests/test_004_endereco.py new file mode 100644 index 0000000..fb54cb4 --- /dev/null +++ b/tests/test_004_endereco.py @@ -0,0 +1,334 @@ +import pytest +from httpx import AsyncClient +from fastapi import status + +BASE_URL = "/api/endereco" + + +@pytest.mark.anyio +async def test_route_exists(client: AsyncClient): + response = await client.post(f"{BASE_URL}/get_all") + assert response.status_code != 404 # Certifica-se de que a rota existe + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one(client: AsyncClient, uuid_store_tipo_endereco: dict, uuid_store_pessoa: dict, + uuid_store_endereco: dict): + response = await client.post(f"{BASE_URL}/add_one", + json={"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Endereço Descrição 1", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Complemento 1", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}) + assert response.status_code == 201 + uuid_store_endereco["uuid_3"] = response.json()["uuid"] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_many(client: AsyncClient, uuid_store_tipo_endereco: dict, uuid_store_pessoa: dict, + uuid_store_endereco: dict, ): + response = await client.post(f"{BASE_URL}/add_many", json=[ + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Endereço Descrição 2", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Complemento 2", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Endereço Descrição 3", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Complemento 3", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Endereço Descrição 4", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Endereço Descrição 5", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Complemento 5", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + {"endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Endereço Descrição 6", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Complemento 6", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]} + ]) + assert response.status_code == 201 + data = response.json() + assert len(data) == 5 + uuid_store_endereco["uuid_4"] = data[0]["uuid"] + uuid_store_endereco["uuid_5"] = data[1]["uuid"] + uuid_store_endereco["uuid_6"] = data[2]["uuid"] + uuid_store_endereco["uuid_7"] = data[3]["uuid"] + uuid_store_endereco["uuid_8"] = data[4]["uuid"] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_all(client: AsyncClient): + response = await client.post(f"{BASE_URL}/get_all") + assert response.status_code == 200 + data = response.json() + assert isinstance(data, list) + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_many(client: AsyncClient, uuid_store_endereco: dict): + uuids = [uuid_store_endereco["uuid_3"], uuid_store_endereco["uuid_4"]] + response = await client.post(f"{BASE_URL}/get_many", json={"uuids": uuids}) + assert response.status_code == 200 + data = response.json() + assert isinstance(data, list) + assert len(data) == 2 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_one(client: AsyncClient, uuid_store_endereco: dict): + response = await client.post(f"{BASE_URL}/get_one", json={"uuid": uuid_store_endereco["uuid_3"]}) + assert response.status_code == 200 + data = response.json() + assert "uuid" in data + assert data["uuid"] == uuid_store_endereco["uuid_3"] + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item(client: AsyncClient, uuid_store_endereco: dict, uuid_store_tipo_endereco: dict, + uuid_store_pessoa: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_endereco["uuid_8"], + "endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Update Endereço Descrição 6", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Update Complemento 6", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}) + assert response.status_code == 201 + data = response.json() + assert data["uuid"] == uuid_store_endereco["uuid_8"] + assert data["endereco_pessoa_descricao"] == "Update Endereço Descrição 6" + assert data["endereco_pessoa_complemento"] == "Update Complemento 6" + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_many_existing_item(client: AsyncClient, uuid_store_endereco: dict, uuid_store_tipo_endereco: dict, + uuid_store_pessoa: dict): + response = await client.put(f"{BASE_URL}/update_many", json=[ + {"uuid": uuid_store_endereco["uuid_7"], "endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Update Endereço Descrição 5", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Update Complemento 5", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + {"uuid": uuid_store_endereco["uuid_6"], "endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Update Endereço Descrição 4", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Update Complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + ]) + assert response.status_code == 201 + data = response.json() + assert len(data) == 2 + # Verificando se os valores atualizados são os corretos + assert data[0]["uuid"] == uuid_store_endereco["uuid_6"] + assert data[0]["endereco_pessoa_descricao"] == "Update Endereço Descrição 4" + assert data[0]["endereco_pessoa_complemento"] == "Update Complemento 4" + assert data[1]["uuid"] == uuid_store_endereco["uuid_7"] + assert data[1]["endereco_pessoa_descricao"] == "Update Endereço Descrição 5" + assert data[1]["endereco_pessoa_complemento"] == "Update Complemento 5" + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_delete_one_item(client: AsyncClient, uuid_store_endereco): + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_one", + json={"uuid": uuid_store_endereco["uuid_3"]} + ) + assert response.status_code == 204 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_delete_many_items(client: AsyncClient, uuid_store_endereco): + uuids = [uuid_store_endereco["uuid_4"], uuid_store_endereco["uuid_5"]] + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_many", + json={"uuids": uuids} # Envia o corpo da solicitação como JSON + ) + assert response.status_code == 204 + + +# Testes com dados inválidos + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_delete_one_non_existent_item(client: AsyncClient, uuid_store_relacao_comercial): + # Tentando deletar novamente o primeiro item já deletado + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_one", + json={"uuid": uuid_store_relacao_comercial["uuid_1"]} + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_get_one_non_existent_item(client: AsyncClient, uuid_store_relacao_comercial): + # Tentando buscar um item deletado + response = await client.request( + method="POST", + url=f"{BASE_URL}/get_one", + json={"uuid": uuid_store_relacao_comercial["uuid_3"]} + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_delete_many_non_existent_item(client: AsyncClient, uuid_store_relacao_comercial): + uuids = [uuid_store_relacao_comercial["uuid_1"], uuid_store_relacao_comercial["uuid_2"], + uuid_store_relacao_comercial["uuid_5"]] + response = await client.request( + method="DELETE", + url=f"{BASE_URL}/delete_many", + json={"uuids": uuids} # Envia o corpo da solicitação como JSON + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_non_existing_item(client: AsyncClient, uuid_store_tipo_endereco: dict, + uuid_store_endereco: dict, uuid_store_pessoa: dict): + # Atualizando o segundo item + response = await client.request( + method="PUT", + url=f"{BASE_URL}/update_one", + json={"uuid": uuid_store_endereco["uuid_1"], + "endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Update Endereço Descrição 4", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Update Complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]} + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_many_non_existing_item(client: AsyncClient, uuid_store_endereco: dict, uuid_store_pessoa: dict, + uuid_store_tipo_endereco: dict): + # Atualizando o segundo e terceiro item + response = await client.request( + method="PUT", + url=f"{BASE_URL}/update_many", + json=[ + {"uuid": uuid_store_endereco["uuid_1"], + "endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Update Endereço Descrição 4", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Update Complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]}, + {"uuid": uuid_store_endereco["uuid_2"], + "endereco_pessoa_status": True, + "endereco_pessoa_descricao": "Update Endereço Descrição 4", + "endereco_pessoa_numero": "123", + "endereco_pessoa_complemento": "Update Complemento 4", + "endereco_pessoa_cep": "00000000", + "fk_tipo_endereco_uuid": uuid_store_tipo_endereco["uuid_6"], + "fk_pessoa_uuid": uuid_store_pessoa["uuid_6"]} + ] + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +# Teste com dados fora dos limites de tamanho + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_min_length(client: AsyncClient, uuid_store_relacao_comercial: dict): + response = await client.post(f"{BASE_URL}/add_one", + json={"endereco_pessoa_descricao": "a"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_one_max_length(client: AsyncClient, uuid_store_relacao_comercial: dict): + response = await client.post(f"{BASE_URL}/add_one", + json={"endereco_pessoa_descricao": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaa"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_create_many_max_and_min_length(client: AsyncClient): + response = await client.post(f"{BASE_URL}/add_many", json=[ + {"endereco_pessoa_descricao": "aa"}, + {"endereco_pessoa_descricao": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, + {"endereco_pessoa_descricao": "aa"}, + {"endereco_pessoa_descricao": "aa"} + + ]) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_min_lenght(client: AsyncClient, uuid_store_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_endereco["uuid_8"], + "endereco_pessoa_descricao": "a"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_one_existing_item_max_lenght(client: AsyncClient, uuid_store_endereco: dict): + response = await client.put(f"{BASE_URL}/update_one", + json={"uuid": uuid_store_endereco["uuid_8"], + "endereco_pessoa_descricao": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaa"}) + assert response.status_code == 422 + + +@pytest.mark.anyio +@pytest.mark.usuarios_permitidos(["admin", "estoque"]) +async def test_update_many_existing_item_max_and_min_length(client: AsyncClient, uuid_store_endereco: dict): + response = await client.put(f"{BASE_URL}/update_many", json=[ + {"uuid": uuid_store_endereco["uuid_7"], "endereco_pessoa_descricao": "aa"}, + {"uuid": uuid_store_endereco["uuid_6"], + "endereco_pessoa_descricao": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, + ]) + assert response.status_code == 422