Skip to content

Commit

Permalink
feat: support for new graphene-federation (#237)
Browse files Browse the repository at this point in the history
* bump: v0.4.4

Added support for new graphene-federation
Drop support for python 3.8

* fix: reference field, cache field refetching data even when field is already pre-fetched
  • Loading branch information
mak626 committed Jun 17, 2024
1 parent 8fd26f2 commit 0abaa56
Show file tree
Hide file tree
Showing 7 changed files with 244 additions and 215 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: ["3.8", "3.9", "3.10", "3.11","3.12"]
python: ["3.9", "3.10", "3.11","3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: ["3.8", "3.9", "3.10", "3.11", "3.12"]
python: ["3.9", "3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
Expand Down
2 changes: 1 addition & 1 deletion examples/django_mongoengine/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Django==3.2.24
Django==3.2.25
pytest==4.6.3
pytest-django==3.5.1
mongoengine==0.27.0
Expand Down
9 changes: 3 additions & 6 deletions graphene_mongo/advanced_types.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
import base64

import graphene
from graphene_federation import shareable


@shareable # Support Graphene Federation v2
class FileFieldType(graphene.ObjectType):
content_type = graphene.String()
md5 = graphene.String()
chunk_size = graphene.Int()
length = graphene.Int()
data = graphene.String()

# Support Graphene Federation v2
_shareable = True

@classmethod
def _resolve_fs_field(cls, field, name, default_value=None):
v = getattr(field.instance, field.key)
Expand All @@ -38,12 +37,10 @@ def resolve_data(self, info):
return None


@shareable # Support Graphene Federation v2
class _CoordinatesTypeField(graphene.ObjectType):
type = graphene.String()

# Support Graphene Federation v2
_shareable = True

def resolve_type(self, info):
return self["type"]

Expand Down
48 changes: 34 additions & 14 deletions graphene_mongo/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ def convert_field_to_dynamic(field, registry=None, executor: ExecutorEnum = Exec
model = field.document_type

def reference_resolver(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
document = root._data.get(field.name or field.db_name, None)
if document:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
Expand All @@ -558,16 +558,23 @@ def reference_resolver(root, *args, **kwargs):
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)

fields_to_fetch = set(list(_type._meta.required_fields) + queried_fields)
if isinstance(document, field.document_type) and all(
document._data[_field] is not None for _field in fields_to_fetch
):
return document # Data is already fetched
return (
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.only(*fields_to_fetch)
.get(pk=document.id)
)
return None

def cached_reference_resolver(root, *args, **kwargs):
if field:
document = root._data.get(field.name or field.db_name, None)
if document:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
filter_args = list()
Expand All @@ -579,16 +586,22 @@ def cached_reference_resolver(root, *args, **kwargs):
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)

fields_to_fetch = set(list(_type._meta.required_fields) + queried_fields)
if isinstance(document, field.document_type) and all(
document._data[_field] is not None for _field in fields_to_fetch
):
return document # Data is already fetched
return (
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.only(*fields_to_fetch)
.get(pk=getattr(root, field.name or field.db_name))
)
return None

async def reference_resolver_async(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
document = root._data.get(field.name or field.db_name, None)
if document:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
Expand All @@ -601,16 +614,20 @@ async def reference_resolver_async(root, *args, **kwargs):
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)

fields_to_fetch = set(list(_type._meta.required_fields) + queried_fields)
if isinstance(document, field.document_type) and all(
document._data[_field] is not None for _field in fields_to_fetch
):
return document # Data is already fetched
return await sync_to_async(
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.get
field.document_type.objects().no_dereference().only(*fields_to_fetch).get
)(pk=document.id)
return None

async def cached_reference_resolver_async(root, *args, **kwargs):
if field:
document = root._data.get(field.name or field.db_name, None)
if document:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
filter_args = list()
Expand All @@ -622,11 +639,14 @@ async def cached_reference_resolver_async(root, *args, **kwargs):
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)

fields_to_fetch = set(list(_type._meta.required_fields) + queried_fields)
if isinstance(document, field.document_type) and all(
document._data[_field] is not None for _field in fields_to_fetch
):
return document # Data is already fetched
return await sync_to_async(
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.get
field.document_type.objects().no_dereference().only(*fields_to_fetch).get
)(pk=getattr(root, field.name or field.db_name))
return None

Expand Down
Loading

0 comments on commit 0abaa56

Please sign in to comment.