Skip to content
41 changes: 30 additions & 11 deletions src/fastapi_cloud_cli/commands/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from itertools import cycle
from pathlib import Path, PurePosixPath
from textwrap import dedent
from typing import Annotated, Any
from typing import Annotated, Any, BinaryIO, cast

import fastar
import rignore
Expand All @@ -17,6 +17,7 @@
from rich.text import Text
from rich_toolkit import RichToolkit
from rich_toolkit.menu import Option
from rich_toolkit.progress import Progress

from fastapi_cloud_cli.commands.login import login
from fastapi_cloud_cli.utils.api import (
Expand All @@ -29,6 +30,7 @@
from fastapi_cloud_cli.utils.apps import AppConfig, get_app_config, write_app_config
from fastapi_cloud_cli.utils.auth import Identity
from fastapi_cloud_cli.utils.cli import get_rich_toolkit, handle_http_errors
from fastapi_cloud_cli.utils.progress_file import ProgressFile

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -201,16 +203,32 @@ class RequestUploadResponse(BaseModel):
fields: dict[str, str]


def _upload_deployment(deployment_id: str, archive_path: Path) -> None:
def _format_size(size_in_bytes: int) -> str:
if size_in_bytes >= 1024 * 1024:
return f"{size_in_bytes / (1024 * 1024):.2f} MB"
elif size_in_bytes >= 1024:
return f"{size_in_bytes / 1024:.2f} KB"
else:
return f"{size_in_bytes} bytes"


def _upload_deployment(
deployment_id: str, archive_path: Path, progress: Progress
) -> None:
archive_size = archive_path.stat().st_size
archive_size_str = _format_size(archive_size)

progress.log(f"Uploading deployment ({archive_size_str})...")
logger.debug(
"Starting deployment upload for deployment: %s",
deployment_id,
)
logger.debug(
"Archive path: %s, size: %s bytes",
archive_path,
archive_path.stat().st_size,
)
logger.debug("Archive path: %s, size: %s bytes", archive_path, archive_size)

def progress_callback(bytes_read: int) -> None:
progress.log(
f"Uploading deployment ({_format_size(bytes_read)} of {archive_size_str})..."
)

with APIClient() as fastapi_client, Client() as client:
# Get the upload URL
Expand All @@ -223,10 +241,13 @@ def _upload_deployment(deployment_id: str, archive_path: Path) -> None:

logger.debug("Starting file upload to S3")
with open(archive_path, "rb") as archive_file:
archive_file_with_progress = ProgressFile(
archive_file, progress_callback=progress_callback
)
upload_response = client.post(
upload_data.url,
data=upload_data.fields,
files={"file": archive_file},
files={"file": cast(BinaryIO, archive_file_with_progress)},
)

upload_response.raise_for_status()
Expand Down Expand Up @@ -769,9 +790,7 @@ def deploy(
f"Deployment created successfully! Deployment slug: {deployment.slug}"
)

progress.log("Uploading deployment...")

_upload_deployment(deployment.id, archive_path)
_upload_deployment(deployment.id, archive_path, progress=progress)

progress.log("Deployment uploaded successfully!")
except KeyboardInterrupt:
Expand Down
30 changes: 30 additions & 0 deletions src/fastapi_cloud_cli/utils/progress_file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from collections.abc import Callable
from datetime import datetime
from typing import Any, BinaryIO


class ProgressFile:
"""Wrap a binary file object and report upload progress as it is read."""

def __init__(
self,
file: BinaryIO,
progress_callback: Callable[[int], None],
update_interval: float = 0.5,
) -> None:
self._file = file
self._progress_callback = progress_callback
self._update_interval = update_interval
self._last_update_time = 0.0

def read(self, n: int = -1) -> bytes:
data = self._file.read(n)
now_ = datetime.now().timestamp()
is_eof = (len(data) == 0) or (n > 0 and len(data) < n)
if (now_ - self._last_update_time >= self._update_interval) or is_eof:
self._progress_callback(self._file.tell())
self._last_update_time = now_
return data

def __getattr__(self, name: str) -> Any:
return getattr(self._file, name)
97 changes: 97 additions & 0 deletions tests/test_cli_deploy.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import random
import re
import string
from datetime import timedelta
from pathlib import Path
Expand All @@ -10,6 +11,7 @@
import respx
from click.testing import Result
from httpx import Response
from rich_toolkit.progress import Progress
from time_machine import TimeMachineFixture
from typer.testing import CliRunner

Expand Down Expand Up @@ -1656,6 +1658,101 @@ def test_deploy_with_token_fails(
)


@pytest.mark.parametrize(
("size", "expected_msgs"),
[
(
100,
[
r"\(\d+ bytes\)", # e.g. "(123 bytes)"
r"\(\d+ bytes of \d+ bytes\)", # e.g. "(123 bytes of 456 bytes)"
],
),
(
10 * 1024,
[
r"\(\d+\.\d+ KB\)", # e.g. "(1.23 KB)"
r"\(\d+\.\d+ KB of \d+\.\d+ KB\)", # e.g. "(1.23 KB of 4.56 KB)"
],
),
(
10 * 1024 * 1024,
[
r"\(\d+\.\d+ MB\)", # e.g. "(1.23 MB)"
r"\(\d+\.\d+ KB of \d+\.\d+ MB\)", # e.g. "(1.23 KB of 4.56 MB)"
r"\(\d+\.\d+ MB of \d+\.\d+ MB\)", # e.g. "(1.23 MB of 4.56 MB)"
],
),
],
)
@pytest.mark.respx
def test_upload_deployment_progress(
logged_in_cli: None,
tmp_path: Path,
respx_mock: respx.MockRouter,
size: int,
expected_msgs: list[str],
) -> None:
app_data = _get_random_app()
team_data = _get_random_team()
app_id = app_data["id"]
team_id = team_data["id"]
deployment_data = _get_random_deployment(app_id=app_id)
deployment_id = deployment_data["id"]

config_path = tmp_path / ".fastapicloud" / "cloud.json"
config_path.parent.mkdir(parents=True, exist_ok=True)
config_path.write_text(f'{{"app_id": "{app_id}", "team_id": "{team_id}"}}')

(tmp_path / "file.bin").write_bytes(random.randbytes(size))

respx_mock.get(f"/apps/{app_id}").mock(return_value=Response(200, json=app_data))
respx_mock.post(f"/apps/{app_id}/deployments/").mock(
return_value=Response(201, json=deployment_data)
)
respx_mock.post(f"/deployments/{deployment_id}/upload").mock(
return_value=Response(
200,
json={"url": "http://test.com", "fields": {"key": "value"}},
)
)
respx_mock.post("http://test.com", data={"key": "value"}).mock(
return_value=Response(200)
)
respx_mock.post(f"/deployments/{deployment_id}/upload-complete").mock(
return_value=Response(200)
)
respx_mock.get(f"/deployments/{deployment_id}/build-logs").mock(
return_value=Response(
200,
content=build_logs_response(
{"type": "message", "message": "Building...", "id": "1"},
{"type": "complete"},
),
)
)
respx_mock.get(f"/apps/{app_id}/deployments/{deployment_id}").mock(
return_value=Response(200, json={**deployment_data, "status": "success"})
)

with (
changing_dir(tmp_path),
patch.object(Progress, "log") as mock_progress,
):
result = runner.invoke(app, ["deploy"])
assert result.exit_code == 0

call_args = [
c.args[0] for c in mock_progress.call_args_list if isinstance(c.args[0], str)
]

for expected_msg in expected_msgs:
pattern = re.compile(f"Uploading deployment {expected_msg}\\.\\.\\.")
assert any(pattern.match(arg) for arg in call_args), (
f"Expected message '{pattern.pattern}' not found in {call_args}"
)


@pytest.mark.respx
def test_deploy_with_app_id_arg(
logged_in_cli: None, tmp_path: Path, respx_mock: respx.MockRouter
Expand Down
104 changes: 104 additions & 0 deletions tests/test_progress_file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import io
from datetime import datetime, timezone
from unittest.mock import Mock, call

import time_machine

from fastapi_cloud_cli.utils.progress_file import ProgressFile


def _make_file(
content: bytes = b"hello world", name: str = "test.tar.gz"
) -> io.BytesIO:
f = io.BytesIO(content)
f.name = name
return f


def test_read_with_size() -> None:
file = _make_file(b"abcdef")
pf = ProgressFile(file, progress_callback=lambda _: None)

assert pf.read(3) == b"abc"
assert pf.read(3) == b"def"


def test_callback_not_called_within_interval() -> None:
file = _make_file(b"abcdef")
mock_callback = Mock()
pf = ProgressFile(file, progress_callback=mock_callback)

pf.read(3) # Should trigger callback
pf.read(3) # Should NOT trigger

mock_callback.assert_called_once_with(3)


def test_callback_called_after_interval_elapses() -> None:
file = _make_file(b"abcdef")
mock_callback = Mock()

with time_machine.travel(
datetime(2026, 1, 1, tzinfo=timezone.utc), tick=False
) as traveller:
pf = ProgressFile(file, progress_callback=mock_callback)

pf.read(3)
traveller.shift(0.6)
pf.read(3)

mock_callback.assert_has_calls([call(3), call(6)])


def test_callback_tracks_cumulative_bytes() -> None:
file = _make_file(b"a" * 100)
mock_callback = Mock()

with time_machine.travel(
datetime(2026, 1, 1, tzinfo=timezone.utc), tick=False
) as traveller:
pf = ProgressFile(file, progress_callback=mock_callback)

pf.read(10) # Should trigger callback with 10 bytes read
traveller.shift(0.1)
pf.read(10)
traveller.shift(0.5)
pf.read(10) # Should trigger callback with 10 + 10 + 10 = 30 bytes read
traveller.shift(0.6)
pf.read(10) # Should trigger callback with 30 + 10 = 40 bytes read

mock_callback.assert_has_calls([call(10), call(30), call(40)])


def test_callback_called_on_eof() -> None:
file = _make_file(b"abcd")
mock_callback = Mock()

pf = ProgressFile(file, progress_callback=mock_callback)
pf.read(3)
pf.read(3)
mock_callback.assert_has_calls([call(3), call(4)])


def test_name_property() -> None:
file = _make_file(name="test.tar.gz")
pf = ProgressFile(file, progress_callback=lambda _: None)

assert pf.name == "test.tar.gz"


def test_callback_uses_current_file_position_after_seek() -> None:
file = _make_file(b"abcde")
mock_callback = Mock()

pf = ProgressFile(file, progress_callback=mock_callback)

pf.read(3)

# Imitate retrying
pf.seek(0)
pf.read(3)

pf.read(3)

mock_callback.assert_has_calls([call(3), call(5)])
Loading