Merge remote-tracking branch 'origin/main' into pr-2703

This commit is contained in:
Xubin Ren 2026-04-02 04:28:23 +00:00
commit 4ce0a8a68e
12 changed files with 436 additions and 26 deletions

View File

@ -86,7 +86,15 @@ class MessageTool(Tool):
) -> str:
channel = channel or self._default_channel
chat_id = chat_id or self._default_chat_id
message_id = message_id or self._default_message_id
# Only inherit default message_id when targeting the same channel+chat.
# Cross-chat sends must not carry the original message_id, because
# some channels (e.g. Feishu) use it to determine the target
# conversation via their Reply API, which would route the message
# to the wrong chat entirely.
if channel == self._default_channel and chat_id == self._default_chat_id:
message_id = message_id or self._default_message_id
else:
message_id = None
if not channel or not chat_id:
return "Error: No target channel/chat specified"
@ -101,7 +109,7 @@ class MessageTool(Tool):
media=media or [],
metadata={
"message_id": message_id,
},
} if message_id else {},
)
try:

View File

@ -186,7 +186,9 @@ class ExecTool(Tool):
@staticmethod
def _extract_absolute_paths(command: str) -> list[str]:
win_paths = re.findall(r"[A-Za-z]:\\[^\s\"'|><;]+", command) # Windows: C:\...
# Windows: match drive-root paths like `C:\` as well as `C:\path\to\file`
# NOTE: `*` is required so `C:\` (nothing after the slash) is still extracted.
win_paths = re.findall(r"[A-Za-z]:\\[^\s\"'|><;]*", command)
posix_paths = re.findall(r"(?:^|[\s|>'\"])(/[^\s\"'>;|<]+)", command) # POSIX: /absolute only
home_paths = re.findall(r"(?:^|[\s|>'\"])(~[^\s\"'>;|<]*)", command) # POSIX/Windows home shortcut: ~
return win_paths + posix_paths + home_paths

View File

@ -415,6 +415,9 @@ def _make_provider(config: Config):
api_base=p.api_base,
default_model=model,
)
elif backend == "github_copilot":
from nanobot.providers.github_copilot_provider import GitHubCopilotProvider
provider = GitHubCopilotProvider(default_model=model)
elif backend == "anthropic":
from nanobot.providers.anthropic_provider import AnthropicProvider
provider = AnthropicProvider(
@ -1289,26 +1292,16 @@ def _login_openai_codex() -> None:
@_register_login("github_copilot")
def _login_github_copilot() -> None:
import asyncio
from openai import AsyncOpenAI
console.print("[cyan]Starting GitHub Copilot device flow...[/cyan]\n")
async def _trigger():
client = AsyncOpenAI(
api_key="dummy",
base_url="https://api.githubcopilot.com",
)
await client.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "hi"}],
max_tokens=1,
)
try:
asyncio.run(_trigger())
console.print("[green]✓ Authenticated with GitHub Copilot[/green]")
from nanobot.providers.github_copilot_provider import login_github_copilot
console.print("[cyan]Starting GitHub Copilot device flow...[/cyan]\n")
token = login_github_copilot(
print_fn=lambda s: console.print(s),
prompt_fn=lambda s: typer.prompt(s),
)
account = token.account_id or "GitHub"
console.print(f"[green]✓ Authenticated with GitHub Copilot[/green] [dim]{account}[/dim]")
except Exception as e:
console.print(f"[red]Authentication error: {e}[/red]")
raise typer.Exit(1)

View File

@ -135,6 +135,10 @@ def _make_provider(config: Any) -> Any:
from nanobot.providers.openai_codex_provider import OpenAICodexProvider
provider = OpenAICodexProvider(default_model=model)
elif backend == "github_copilot":
from nanobot.providers.github_copilot_provider import GitHubCopilotProvider
provider = GitHubCopilotProvider(default_model=model)
elif backend == "azure_openai":
from nanobot.providers.azure_openai_provider import AzureOpenAIProvider

View File

@ -13,6 +13,7 @@ __all__ = [
"AnthropicProvider",
"OpenAICompatProvider",
"OpenAICodexProvider",
"GitHubCopilotProvider",
"AzureOpenAIProvider",
]
@ -20,12 +21,14 @@ _LAZY_IMPORTS = {
"AnthropicProvider": ".anthropic_provider",
"OpenAICompatProvider": ".openai_compat_provider",
"OpenAICodexProvider": ".openai_codex_provider",
"GitHubCopilotProvider": ".github_copilot_provider",
"AzureOpenAIProvider": ".azure_openai_provider",
}
if TYPE_CHECKING:
from nanobot.providers.anthropic_provider import AnthropicProvider
from nanobot.providers.azure_openai_provider import AzureOpenAIProvider
from nanobot.providers.github_copilot_provider import GitHubCopilotProvider
from nanobot.providers.openai_compat_provider import OpenAICompatProvider
from nanobot.providers.openai_codex_provider import OpenAICodexProvider

View File

@ -0,0 +1,257 @@
"""GitHub Copilot OAuth-backed provider."""
from __future__ import annotations
import time
import webbrowser
from collections.abc import Callable
import httpx
from oauth_cli_kit.models import OAuthToken
from oauth_cli_kit.storage import FileTokenStorage
from nanobot.providers.openai_compat_provider import OpenAICompatProvider
DEFAULT_GITHUB_DEVICE_CODE_URL = "https://github.com/login/device/code"
DEFAULT_GITHUB_ACCESS_TOKEN_URL = "https://github.com/login/oauth/access_token"
DEFAULT_GITHUB_USER_URL = "https://api.github.com/user"
DEFAULT_COPILOT_TOKEN_URL = "https://api.github.com/copilot_internal/v2/token"
DEFAULT_COPILOT_BASE_URL = "https://api.githubcopilot.com"
GITHUB_COPILOT_CLIENT_ID = "Iv1.b507a08c87ecfe98"
GITHUB_COPILOT_SCOPE = "read:user"
TOKEN_FILENAME = "github-copilot.json"
TOKEN_APP_NAME = "nanobot"
USER_AGENT = "nanobot/0.1"
EDITOR_VERSION = "vscode/1.99.0"
EDITOR_PLUGIN_VERSION = "copilot-chat/0.26.0"
_EXPIRY_SKEW_SECONDS = 60
_LONG_LIVED_TOKEN_SECONDS = 315360000
def _storage() -> FileTokenStorage:
return FileTokenStorage(
token_filename=TOKEN_FILENAME,
app_name=TOKEN_APP_NAME,
import_codex_cli=False,
)
def _copilot_headers(token: str) -> dict[str, str]:
return {
"Authorization": f"token {token}",
"Accept": "application/json",
"User-Agent": USER_AGENT,
"Editor-Version": EDITOR_VERSION,
"Editor-Plugin-Version": EDITOR_PLUGIN_VERSION,
}
def _load_github_token() -> OAuthToken | None:
token = _storage().load()
if not token or not token.access:
return None
return token
def get_github_copilot_login_status() -> OAuthToken | None:
"""Return the persisted GitHub OAuth token if available."""
return _load_github_token()
def login_github_copilot(
print_fn: Callable[[str], None] | None = None,
prompt_fn: Callable[[str], str] | None = None,
) -> OAuthToken:
"""Run GitHub device flow and persist the GitHub OAuth token used for Copilot."""
del prompt_fn
printer = print_fn or print
timeout = httpx.Timeout(20.0, connect=20.0)
with httpx.Client(timeout=timeout, follow_redirects=True, trust_env=True) as client:
response = client.post(
DEFAULT_GITHUB_DEVICE_CODE_URL,
headers={"Accept": "application/json", "User-Agent": USER_AGENT},
data={"client_id": GITHUB_COPILOT_CLIENT_ID, "scope": GITHUB_COPILOT_SCOPE},
)
response.raise_for_status()
payload = response.json()
device_code = str(payload["device_code"])
user_code = str(payload["user_code"])
verify_url = str(payload.get("verification_uri") or payload.get("verification_uri_complete") or "")
verify_complete = str(payload.get("verification_uri_complete") or verify_url)
interval = max(1, int(payload.get("interval") or 5))
expires_in = int(payload.get("expires_in") or 900)
printer(f"Open: {verify_url}")
printer(f"Code: {user_code}")
if verify_complete:
try:
webbrowser.open(verify_complete)
except Exception:
pass
deadline = time.time() + expires_in
current_interval = interval
access_token = None
token_expires_in = _LONG_LIVED_TOKEN_SECONDS
while time.time() < deadline:
poll = client.post(
DEFAULT_GITHUB_ACCESS_TOKEN_URL,
headers={"Accept": "application/json", "User-Agent": USER_AGENT},
data={
"client_id": GITHUB_COPILOT_CLIENT_ID,
"device_code": device_code,
"grant_type": "urn:ietf:params:oauth:grant-type:device_code",
},
)
poll.raise_for_status()
poll_payload = poll.json()
access_token = poll_payload.get("access_token")
if access_token:
token_expires_in = int(poll_payload.get("expires_in") or _LONG_LIVED_TOKEN_SECONDS)
break
error = poll_payload.get("error")
if error == "authorization_pending":
time.sleep(current_interval)
continue
if error == "slow_down":
current_interval += 5
time.sleep(current_interval)
continue
if error == "expired_token":
raise RuntimeError("GitHub device code expired. Please run login again.")
if error == "access_denied":
raise RuntimeError("GitHub device flow was denied.")
if error:
desc = poll_payload.get("error_description") or error
raise RuntimeError(str(desc))
time.sleep(current_interval)
else:
raise RuntimeError("GitHub device flow timed out.")
user = client.get(
DEFAULT_GITHUB_USER_URL,
headers={
"Authorization": f"Bearer {access_token}",
"Accept": "application/vnd.github+json",
"User-Agent": USER_AGENT,
},
)
user.raise_for_status()
user_payload = user.json()
account_id = user_payload.get("login") or str(user_payload.get("id") or "") or None
expires_ms = int((time.time() + token_expires_in) * 1000)
token = OAuthToken(
access=str(access_token),
refresh="",
expires=expires_ms,
account_id=str(account_id) if account_id else None,
)
_storage().save(token)
return token
class GitHubCopilotProvider(OpenAICompatProvider):
"""Provider that exchanges a stored GitHub OAuth token for Copilot access tokens."""
def __init__(self, default_model: str = "github-copilot/gpt-4.1"):
from nanobot.providers.registry import find_by_name
self._copilot_access_token: str | None = None
self._copilot_expires_at: float = 0.0
super().__init__(
api_key="no-key",
api_base=DEFAULT_COPILOT_BASE_URL,
default_model=default_model,
extra_headers={
"Editor-Version": EDITOR_VERSION,
"Editor-Plugin-Version": EDITOR_PLUGIN_VERSION,
"User-Agent": USER_AGENT,
},
spec=find_by_name("github_copilot"),
)
async def _get_copilot_access_token(self) -> str:
now = time.time()
if self._copilot_access_token and now < self._copilot_expires_at - _EXPIRY_SKEW_SECONDS:
return self._copilot_access_token
github_token = _load_github_token()
if not github_token or not github_token.access:
raise RuntimeError("GitHub Copilot is not logged in. Run: nanobot provider login github-copilot")
timeout = httpx.Timeout(20.0, connect=20.0)
async with httpx.AsyncClient(timeout=timeout, follow_redirects=True, trust_env=True) as client:
response = await client.get(
DEFAULT_COPILOT_TOKEN_URL,
headers=_copilot_headers(github_token.access),
)
response.raise_for_status()
payload = response.json()
token = payload.get("token")
if not token:
raise RuntimeError("GitHub Copilot token exchange returned no token.")
expires_at = payload.get("expires_at")
if isinstance(expires_at, (int, float)):
self._copilot_expires_at = float(expires_at)
else:
refresh_in = payload.get("refresh_in") or 1500
self._copilot_expires_at = time.time() + int(refresh_in)
self._copilot_access_token = str(token)
return self._copilot_access_token
async def _refresh_client_api_key(self) -> str:
token = await self._get_copilot_access_token()
self.api_key = token
self._client.api_key = token
return token
async def chat(
self,
messages: list[dict[str, object]],
tools: list[dict[str, object]] | None = None,
model: str | None = None,
max_tokens: int = 4096,
temperature: float = 0.7,
reasoning_effort: str | None = None,
tool_choice: str | dict[str, object] | None = None,
):
await self._refresh_client_api_key()
return await super().chat(
messages=messages,
tools=tools,
model=model,
max_tokens=max_tokens,
temperature=temperature,
reasoning_effort=reasoning_effort,
tool_choice=tool_choice,
)
async def chat_stream(
self,
messages: list[dict[str, object]],
tools: list[dict[str, object]] | None = None,
model: str | None = None,
max_tokens: int = 4096,
temperature: float = 0.7,
reasoning_effort: str | None = None,
tool_choice: str | dict[str, object] | None = None,
on_content_delta: Callable[[str], None] | None = None,
):
await self._refresh_client_api_key()
return await super().chat_stream(
messages=messages,
tools=tools,
model=model,
max_tokens=max_tokens,
temperature=temperature,
reasoning_effort=reasoning_effort,
tool_choice=tool_choice,
on_content_delta=on_content_delta,
)

View File

@ -235,7 +235,9 @@ class OpenAICompatProvider(LLMProvider):
spec = self._spec
if spec and spec.supports_prompt_caching:
messages, tools = self._apply_cache_control(messages, tools)
model_name = model or self.default_model
if any(model_name.lower().startswith(k) for k in ("anthropic/", "claude")):
messages, tools = self._apply_cache_control(messages, tools)
if spec and spec.strip_model_prefix:
model_name = model_name.split("/")[-1]
@ -627,4 +629,4 @@ class OpenAICompatProvider(LLMProvider):
return self._handle_error(e)
def get_default_model(self) -> str:
return self.default_model
return self.default_model

View File

@ -34,7 +34,7 @@ class ProviderSpec:
display_name: str = "" # shown in `nanobot status`
# which provider implementation to use
# "openai_compat" | "anthropic" | "azure_openai" | "openai_codex"
# "openai_compat" | "anthropic" | "azure_openai" | "openai_codex" | "github_copilot"
backend: str = "openai_compat"
# extra env vars, e.g. (("ZHIPUAI_API_KEY", "{api_key}"),)
@ -218,8 +218,9 @@ PROVIDERS: tuple[ProviderSpec, ...] = (
keywords=("github_copilot", "copilot"),
env_key="",
display_name="Github Copilot",
backend="openai_compat",
backend="github_copilot",
default_api_base="https://api.githubcopilot.com",
strip_model_prefix=True,
is_oauth=True,
),
# DeepSeek: OpenAI-compatible at api.deepseek.com

View File

@ -317,6 +317,75 @@ def test_openai_compat_provider_passes_model_through():
assert provider.get_default_model() == "github-copilot/gpt-5.3-codex"
def test_make_provider_uses_github_copilot_backend():
from nanobot.cli.commands import _make_provider
from nanobot.config.schema import Config
config = Config.model_validate(
{
"agents": {
"defaults": {
"provider": "github-copilot",
"model": "github-copilot/gpt-4.1",
}
}
}
)
with patch("nanobot.providers.openai_compat_provider.AsyncOpenAI"):
provider = _make_provider(config)
assert provider.__class__.__name__ == "GitHubCopilotProvider"
def test_github_copilot_provider_strips_prefixed_model_name():
from nanobot.providers.github_copilot_provider import GitHubCopilotProvider
with patch("nanobot.providers.openai_compat_provider.AsyncOpenAI"):
provider = GitHubCopilotProvider(default_model="github-copilot/gpt-5.1")
kwargs = provider._build_kwargs(
messages=[{"role": "user", "content": "hi"}],
tools=None,
model="github-copilot/gpt-5.1",
max_tokens=16,
temperature=0.1,
reasoning_effort=None,
tool_choice=None,
)
assert kwargs["model"] == "gpt-5.1"
@pytest.mark.asyncio
async def test_github_copilot_provider_refreshes_client_api_key_before_chat():
from nanobot.providers.github_copilot_provider import GitHubCopilotProvider
mock_client = MagicMock()
mock_client.api_key = "no-key"
mock_client.chat.completions.create = AsyncMock(return_value={
"choices": [{"message": {"content": "ok"}, "finish_reason": "stop"}],
"usage": {"prompt_tokens": 1, "completion_tokens": 1, "total_tokens": 2},
})
with patch("nanobot.providers.openai_compat_provider.AsyncOpenAI", return_value=mock_client):
provider = GitHubCopilotProvider(default_model="github-copilot/gpt-5.1")
provider._get_copilot_access_token = AsyncMock(return_value="copilot-access-token")
response = await provider.chat(
messages=[{"role": "user", "content": "hi"}],
model="github-copilot/gpt-5.1",
max_tokens=16,
temperature=0.1,
)
assert response.content == "ok"
assert provider._client.api_key == "copilot-access-token"
provider._get_copilot_access_token.assert_awaited_once()
mock_client.chat.completions.create.assert_awaited_once()
def test_openai_codex_strip_prefix_supports_hyphen_and_underscore():
assert _strip_model_prefix("openai-codex/gpt-5.1-codex") == "gpt-5.1-codex"
assert _strip_model_prefix("openai_codex/gpt-5.1-codex") == "gpt-5.1-codex"

View File

@ -11,6 +11,7 @@ def test_importing_providers_package_is_lazy(monkeypatch) -> None:
monkeypatch.delitem(sys.modules, "nanobot.providers.anthropic_provider", raising=False)
monkeypatch.delitem(sys.modules, "nanobot.providers.openai_compat_provider", raising=False)
monkeypatch.delitem(sys.modules, "nanobot.providers.openai_codex_provider", raising=False)
monkeypatch.delitem(sys.modules, "nanobot.providers.github_copilot_provider", raising=False)
monkeypatch.delitem(sys.modules, "nanobot.providers.azure_openai_provider", raising=False)
providers = importlib.import_module("nanobot.providers")
@ -18,6 +19,7 @@ def test_importing_providers_package_is_lazy(monkeypatch) -> None:
assert "nanobot.providers.anthropic_provider" not in sys.modules
assert "nanobot.providers.openai_compat_provider" not in sys.modules
assert "nanobot.providers.openai_codex_provider" not in sys.modules
assert "nanobot.providers.github_copilot_provider" not in sys.modules
assert "nanobot.providers.azure_openai_provider" not in sys.modules
assert providers.__all__ == [
"LLMProvider",
@ -25,6 +27,7 @@ def test_importing_providers_package_is_lazy(monkeypatch) -> None:
"AnthropicProvider",
"OpenAICompatProvider",
"OpenAICodexProvider",
"GitHubCopilotProvider",
"AzureOpenAIProvider",
]

View File

@ -125,6 +125,27 @@ def test_workspace_override(tmp_path):
assert bot._loop.workspace == custom_ws
def test_sdk_make_provider_uses_github_copilot_backend():
from nanobot.config.schema import Config
from nanobot.nanobot import _make_provider
config = Config.model_validate(
{
"agents": {
"defaults": {
"provider": "github-copilot",
"model": "github-copilot/gpt-4.1",
}
}
}
)
with patch("nanobot.providers.openai_compat_provider.AsyncOpenAI"):
provider = _make_provider(config)
assert provider.__class__.__name__ == "GitHubCopilotProvider"
@pytest.mark.asyncio
async def test_run_custom_session_key(tmp_path):
from nanobot.bus.events import OutboundMessage

View File

@ -95,6 +95,14 @@ def test_exec_extract_absolute_paths_keeps_full_windows_path() -> None:
assert paths == [r"C:\user\workspace\txt"]
def test_exec_extract_absolute_paths_captures_windows_drive_root_path() -> None:
"""Windows drive root paths like `E:\\` must be extracted for workspace guarding."""
# Note: raw strings cannot end with a single backslash.
cmd = "dir E:\\"
paths = ExecTool._extract_absolute_paths(cmd)
assert paths == ["E:\\"]
def test_exec_extract_absolute_paths_ignores_relative_posix_segments() -> None:
cmd = ".venv/bin/python script.py"
paths = ExecTool._extract_absolute_paths(cmd)
@ -134,6 +142,45 @@ def test_exec_guard_blocks_quoted_home_path_outside_workspace(tmp_path) -> None:
assert error == "Error: Command blocked by safety guard (path outside working dir)"
def test_exec_guard_blocks_windows_drive_root_outside_workspace(monkeypatch) -> None:
import nanobot.agent.tools.shell as shell_mod
class FakeWindowsPath:
def __init__(self, raw: str) -> None:
self.raw = raw.rstrip("\\") + ("\\" if raw.endswith("\\") else "")
def resolve(self) -> "FakeWindowsPath":
return self
def expanduser(self) -> "FakeWindowsPath":
return self
def is_absolute(self) -> bool:
return len(self.raw) >= 3 and self.raw[1:3] == ":\\"
@property
def parents(self) -> list["FakeWindowsPath"]:
if not self.is_absolute():
return []
trimmed = self.raw.rstrip("\\")
if len(trimmed) <= 2:
return []
idx = trimmed.rfind("\\")
if idx <= 2:
return [FakeWindowsPath(trimmed[:2] + "\\")]
parent = FakeWindowsPath(trimmed[:idx])
return [parent, *parent.parents]
def __eq__(self, other: object) -> bool:
return isinstance(other, FakeWindowsPath) and self.raw.lower() == other.raw.lower()
monkeypatch.setattr(shell_mod, "Path", FakeWindowsPath)
tool = ExecTool(restrict_to_workspace=True)
error = tool._guard_command("dir E:\\", "E:\\workspace")
assert error == "Error: Command blocked by safety guard (path outside working dir)"
# --- cast_params tests ---