Skip to content

Commit

Permalink
exchange: add timeout in ticks
Browse files Browse the repository at this point in the history
  • Loading branch information
sgliner-ledger committed Feb 15, 2024
1 parent e408b8c commit 6b27747
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 11 deletions.
19 changes: 15 additions & 4 deletions speculos/api/apdu.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,23 @@ def __init__(self, seph: SeProxyHal):
self._seph.apdu_callbacks.append(self.seph_apdu_callback)
self.response: Optional[bytes]

def exchange(self, data: bytes) -> Generator[bytes, None, None]:
def exchange(self, data: bytes, tick_timeout: int = 5 * 60 * 10) -> Generator[bytes, None, None]:
# force headers to be sent
yield b""

tick_count_before_exchange = self._seph.get_tick_count()

with self.endpoint_lock: # Lock for a command/response for one client
with self.response_condition:
self.response = None
self._seph.to_app(data)
with self.response_condition:
while self.response is None:
self.response_condition.wait()
self.response_condition.wait(0.1)
exchange_tick_count = self._seph.get_tick_count() - tick_count_before_exchange

if tick_timeout != 0 and exchange_tick_count > tick_timeout:
raise TimeoutError()
yield json.dumps({"data": self.response.hex()}).encode()

def seph_apdu_callback(self, data: bytes) -> None:
Expand All @@ -55,5 +61,10 @@ def post(self):
except jsonschema.exceptions.ValidationError as e:
return {"error": f"{e}"}, 400

data = bytes.fromhex(args.get("data"))
return Response(stream_with_context(self._bridge.exchange(data)), content_type="application/json")
if 'apdu' in args['data']:
apdu = bytes.fromhex(args['data']['apdu'])
tick_timeout = args["data"]['tick_timeout']
return Response(stream_with_context(self._bridge.exchange(apdu, tick_timeout)), content_type="application/json")
else:
data = bytes.fromhex(args.get("data"))
return Response(stream_with_context(self._bridge.exchange(data)), content_type="application/json")
5 changes: 4 additions & 1 deletion speculos/api/resources/apdu.schema
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@

"type": "object",
"properties": {
"data": { "type": "string", "pattern": "^([a-fA-F0-9]{2})+$" }
"data": {
"apdu": { "type": "string", "pattern": "^([a-fA-F0-9]{2})+$" },
"tick_timeout": { "type": "int"}
}
},
"required": [ "data" ],
"additionalProperties": false
Expand Down
20 changes: 14 additions & 6 deletions speculos/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,10 +141,16 @@ def get_screenshot(self) -> bytes:
check_status_code(response, "/screenshot")
return response.content

def _apdu_exchange(self, data: bytes) -> bytes:
with self.session.post(f"{self.api_url}/apdu", json={"data": data.hex()}) as response:
apdu_response = ApduResponse(response)
return apdu_response.receive()
def _apdu_exchange(self, data: bytes, tick_timeout: int = 5 * 60 * 10) -> bytes:
try:
data_payload = {"apdu": data.hex(), "tick_timeout": tick_timeout}
with self.session.post(f"{self.api_url}/apdu", json={"data": data_payload}) as response:
apdu_response = ApduResponse(response)
return apdu_response.receive()

# TimeoutError exception in exchange function raises ChunkedEncodingError exception
except requests.exceptions.ChunkedEncodingError:
raise TimeoutError()

def _apdu_exchange_nowait(self, data: bytes) -> requests.Response:
return self.session.post(f"{self.api_url}/apdu", json={"data": data.hex()}, stream=True)
Expand Down Expand Up @@ -239,9 +245,11 @@ def __exit__(
) -> None:
self.stop()

def apdu_exchange(self, cla: int, ins: int, data: bytes = b"", p1: int = 0, p2: int = 0) -> bytes:
def apdu_exchange(
self, cla: int, ins: int, data: bytes = b"", p1: int = 0, p2: int = 0,
tick_timeout: int = 5 * 60 * 10) -> bytes:
apdu = bytes([cla, ins, p1, p2, len(data)]) + data
return Api._apdu_exchange(self, apdu)
return Api._apdu_exchange(self, apdu, tick_timeout)

@contextmanager
def apdu_exchange_nowait(
Expand Down

0 comments on commit 6b27747

Please sign in to comment.