diff --git a/app/config/__init__.py b/app/config/__init__.py index 6c2d783..43b52c5 100644 --- a/app/config/__init__.py +++ b/app/config/__init__.py @@ -81,7 +81,7 @@ def inject_environment_variables(environment: str): f"Injecting {len(secrets)} environment variables from Infisical:") for secret in secrets: logger.info( - f" - {secret.secret_name}: {'*' * len(secret.secret_value)}") + f" - {secret.secret_name}: {len(secret.secret_value)} chars") environment = getenv_or_action("ENVIRONMENT", action="warn", default="dev") diff --git a/app/config/base.py b/app/config/base.py index 7729784..7b41b71 100644 --- a/app/config/base.py +++ b/app/config/base.py @@ -6,8 +6,17 @@ # Logging LOG_LEVEL = getenv_or_action("LOG_LEVEL", default="INFO") -# BigQuery Project +# BigQuery Integration BIGQUERY_PROJECT = getenv_or_action("BIGQUERY_PROJECT", action="raise") +BIGQUERY_PATIENT_HEADER_TABLE_ID = getenv_or_action( + "BIGQUERY_PATIENT_HEADER_TABLE_ID", action="raise" +) +BIGQUERY_PATIENT_SUMMARY_TABLE_ID = getenv_or_action( + "BIGQUERY_PATIENT_SUMMARY_TABLE_ID", action="raise" +) +BIGQUERY_PATIENT_ENCOUNTERS_TABLE_ID = getenv_or_action( + "BIGQUERY_PATIENT_ENCOUNTERS_TABLE_ID", action="raise" +) # JWT configuration JWT_SECRET_KEY = getenv_or_action("JWT_SECRET_KEY", default=token_bytes(32).hex()) diff --git a/app/routers/frontend.py b/app/routers/frontend.py index f255d01..c780cc8 100644 --- a/app/routers/frontend.py +++ b/app/routers/frontend.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- -import json - from typing import Annotated, List from fastapi import APIRouter, Depends, HTTPException -from basedosdados import read_sql +from tortoise.exceptions import ValidationError from app.dependencies import ( get_current_frontend_user @@ -15,8 +13,14 @@ Encounter, UserInfo, ) -from app.config import BIGQUERY_PROJECT -from app.utils import read_timestamp, normalize_case +from app.utils import read_bq +from app.validators import CPFValidator +from app.config import ( + BIGQUERY_PROJECT, + BIGQUERY_PATIENT_HEADER_TABLE_ID, + BIGQUERY_PATIENT_SUMMARY_TABLE_ID, + BIGQUERY_PATIENT_ENCOUNTERS_TABLE_ID +) router = APIRouter(prefix="/frontend", tags=["Frontend Application"]) @@ -45,80 +49,32 @@ async def get_patient_header( _: Annotated[User, Depends(get_current_frontend_user)], cpf: str, ) -> PatientHeader: - results_json = read_sql( + validator = CPFValidator() + try: + validator(cpf) + except ValidationError: + raise HTTPException(status_code=400, detail="Invalid CPF") + + results = await read_bq( f""" SELECT * - FROM `{BIGQUERY_PROJECT}`.`saude_dados_mestres`.`paciente` + FROM `{BIGQUERY_PROJECT}`.{BIGQUERY_PATIENT_HEADER_TABLE_ID} WHERE cpf = '{cpf}' """, from_file="/tmp/credentials.json", - ).to_json(orient="records") + ) - results = json.loads(results_json) - - if len(results) > 0: - patient_record = results[0] - else: + if len(results) == 0: raise HTTPException(status_code=404, detail="Patient not found") - data = patient_record["dados"] - - cns_principal = None - if len(patient_record["cns"]) > 0: - cns_principal = patient_record["cns"][0] - - telefone_principal = None - if len(patient_record["contato"]["telefone"]) > 0: - telefone_principal = patient_record["contato"]["telefone"][0]["valor"] - - clinica_principal, equipe_principal = {}, {} - medicos, enfermeiros = [], [] - if len(patient_record["equipe_saude_familia"]) > 0: - equipe_principal = patient_record["equipe_saude_familia"][0] - - # Pega Clínica da Família - if equipe_principal["clinica_familia"]: - clinica_principal = equipe_principal["clinica_familia"] - - for equipe in patient_record["equipe_saude_familia"]: - medicos.extend(equipe["medicos"]) - enfermeiros.extend(equipe["enfermeiros"]) - - for medico in medicos: - medico['registry'] = medico.pop('id_profissional_sus') - medico['name'] = medico.pop('nome') - - for enfermeiro in enfermeiros: - enfermeiro['registry'] = enfermeiro.pop('id_profissional_sus') - enfermeiro['name'] = enfermeiro.pop('nome') + dados = results[0] + configuracao_exibicao = dados.get('exibicao', {}) - data_nascimento = None - if data.get("data_nascimento") is not None: - data_nascimento = read_timestamp(data.get("data_nascimento"), output_format='date') + if configuracao_exibicao.get('indicador', False) is False: + message = ",".join(configuracao_exibicao.get('motivos', [])) + raise HTTPException(status_code=403, detail=message) - return { - "registration_name": data.get("nome"), - "social_name": data.get("nome_social"), - "cpf": f"{cpf[:3]}.{cpf[3:6]}.{cpf[6:9]}-{cpf[9:]}", - "cns": cns_principal, - "birth_date": data_nascimento, - "gender": data.get("genero"), - "race": data.get("raca"), - "phone": telefone_principal, - "family_clinic": { - "cnes": clinica_principal.get("id_cnes"), - "name": clinica_principal.get("nome"), - "phone": clinica_principal.get("telefone"), - }, - "family_health_team": { - "ine_code": equipe_principal.get("id_ine"), - "name": equipe_principal.get("nome"), - "phone": equipe_principal.get("telefone"), - }, - "medical_responsible": medicos, - "nursing_responsible": enfermeiros, - "validated": data.get("identidade_validada_indicador"), - } + return dados @@ -128,51 +84,18 @@ async def get_patient_summary( cpf: str, ) -> PatientSummary: - query = f""" - with - base as (select '{cpf}' as cpf), - alergias_grouped as ( - select - cpf, - alergias as allergies - from `saude_historico_clinico.alergia` - where cpf = '{cpf}' - ), - medicamentos_cronicos_single as ( - select - cpf, - med.nome as nome_medicamento - from `saude_historico_clinico.medicamentos_cronicos`, - unnest(medicamentos) as med - where cpf = '{cpf}' - ), - medicamentos_cronicos_grouped as ( - select - cpf, - array_agg(nome_medicamento) as continuous_use_medications - from medicamentos_cronicos_single - group by cpf - ) - select - alergias_grouped.allergies, - medicamentos_cronicos_grouped.continuous_use_medications - from base - left join alergias_grouped on alergias_grouped.cpf = base.cpf - left join medicamentos_cronicos_grouped on medicamentos_cronicos_grouped.cpf = base.cpf - """ - results_json = read_sql( - query, - from_file="/tmp/credentials.json" - ).to_json(orient="records") - - result = json.loads(results_json) - if len(result) > 0: - return result[0] - - return { - "allergies": [], - "continuous_use_medications": [] - } + results = await read_bq( + f""" + SELECT * + FROM `{BIGQUERY_PROJECT}`.{BIGQUERY_PATIENT_SUMMARY_TABLE_ID} + WHERE cpf = '{cpf}' + """, + from_file="/tmp/credentials.json", + ) + if len(results) == 0: + raise HTTPException(status_code=404, detail="Patient not found") + else: + return results[0] @router.get("/patient/filter_tags") async def get_filter_tags( @@ -196,54 +119,12 @@ async def get_patient_encounters( cpf: str, ) -> List[Encounter]: - results_json = read_sql( + results = await read_bq( f""" SELECT * - FROM `{BIGQUERY_PROJECT}`.`saude_historico_clinico`.`episodio_assistencial` - WHERE paciente.cpf = '{cpf}' + FROM `{BIGQUERY_PROJECT}`.{BIGQUERY_PATIENT_ENCOUNTERS_TABLE_ID} + WHERE cpf = '{cpf}' and exibicao.indicador = true """, from_file="/tmp/credentials.json", - ).to_json(orient="records") - - encounters = [] - for result in json.loads(results_json): - # Responsible professional - professional = result.get('profissional_saude_responsavel') - if professional: - if isinstance(professional, list): - professional = professional[0] if len(professional) > 0 else {} - - if not professional['nome'] and not professional['especialidade']: - professional = None - else: - professional = { - "name": professional.get('nome'), - "role": professional.get('especialidade') - } - - # Filter Tags - unit_type = result['estabelecimento']['estabelecimento_tipo'] - if unit_type in [ - 'CLINICA DA FAMILIA', - 'CENTRO MUNICIPAL DE SAUDE' - ]: - unit_type = 'CF/CMS' - - encounter = { - "entry_datetime": read_timestamp(result['entrada_datahora'], output_format='datetime'), - "exit_datetime": read_timestamp(result['saida_datahora'], output_format='datetime'), - "location": result['estabelecimento']['nome'], - "type": result['tipo'], - "subtype": result['subtipo'], - "active_cids": [cid['descricao'] for cid in result['condicoes'] if cid['descricao']], - "responsible": professional, - "clinical_motivation": normalize_case(result['motivo_atendimento']), - "clinical_outcome": normalize_case(result['desfecho_atendimento']), - "filter_tags": [unit_type], - } - encounters.append(encounter) - - # Sort Encounters by entry_datetime - encounters = sorted(encounters, key=lambda x: x['entry_datetime'], reverse=True) - - return encounters + ) + return results diff --git a/app/types/frontend.py b/app/types/frontend.py index 24b6bc5..60743cc 100644 --- a/app/types/frontend.py +++ b/app/types/frontend.py @@ -16,6 +16,10 @@ class FamilyHealthTeam(BaseModel): name: Optional[str] phone: Optional[str] +# Clinical Exam Model +class ClinicalExam(BaseModel): + type: str + description: Optional[str] # Medical Conditions model class PatientSummary(BaseModel): @@ -32,14 +36,16 @@ class Responsible(BaseModel): # Medical Visit model class Encounter(BaseModel): entry_datetime: str - exit_datetime: str + exit_datetime: Optional[str] location: str type: str subtype: Optional[str] + exhibition_type: str = 'default' active_cids: List[str] responsible: Optional[Responsible] clinical_motivation: Optional[str] clinical_outcome: Optional[str] + clinical_exams: List[ClinicalExam] filter_tags: List[str] diff --git a/app/utils.py b/app/utils.py index 5dc8003..67be48f 100644 --- a/app/utils.py +++ b/app/utils.py @@ -3,7 +3,11 @@ import jwt import hashlib import json -from typing import Literal +import os + +from google.cloud import bigquery +from google.oauth2 import service_account +from asyncer import asyncify from loguru import logger from passlib.context import CryptContext @@ -124,24 +128,19 @@ async def get_instance(Model, table, slug=None, code=None): return table[slug] -def read_timestamp(timestamp: int, output_format=Literal['date','datetime']) -> str: - if output_format == 'date': - denominator = 1000 - str_format = "%Y-%m-%d" - elif output_format == 'datetime': - denominator = 1 - str_format = "%Y-%m-%d %H:%M:%S" - else: - raise ValueError("Invalid format") +async def read_bq(query, from_file="/tmp/credentials.json"): + logger.debug(f"""Reading BigQuery with query (QUERY_PREVIEW_ENABLED={ + os.environ['QUERY_PREVIEW_ENABLED'] + }): {query}""") - try: - value = datetime(1970, 1, 1) + timedelta(seconds=timestamp/denominator) - except Exception as exc: - logger.error(f"Invalid timestamp: {timestamp} from {exc}") - return None + def execute_job(): + credentials = service_account.Credentials.from_service_account_file( + from_file, + ) + client = bigquery.Client(credentials=credentials) + row_iterator = client.query_and_wait(query) + return [dict(row) for row in row_iterator] - return value.strftime(str_format) + rows = await asyncify(execute_job)() -def normalize_case(text): - # TODO - return text + return rows \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index e0ad362..bed4472 100644 --- a/poetry.lock +++ b/poetry.lock @@ -85,6 +85,20 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "asyncer" +version = "0.0.8" +description = "Asyncer, async and await, focused on developer experience." +optional = false +python-versions = ">=3.8" +files = [ + {file = "asyncer-0.0.8-py3-none-any.whl", hash = "sha256:5920d48fc99c8f8f0f1576e1882f5022885589c5fcbc46ce4224ec3e53776eeb"}, + {file = "asyncer-0.0.8.tar.gz", hash = "sha256:a589d980f57e20efb07ed91d0dbe67f1d2fd343e7142c66d3a099f05c620739c"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5.0" + [[package]] name = "asyncpg" version = "0.29.0" @@ -2793,4 +2807,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "af2cee69c2de80a6861a61b21a5ac4dacbcbdf1a79ee275beab6ea8bc72cba7c" +content-hash = "1ec4944b80ec680487b4e0ffc0144b035cc4fc7efeb12e81f5b585127b8c0271" diff --git a/pyproject.toml b/pyproject.toml index 92157cb..37f0d01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ urllib3 = "2.0.7" idna = "3.7" basedosdados = "^2.0.0b16" nltk = "^3.9.1" +asyncer = "^0.0.8" [tool.poetry.group.dev.dependencies]