mirror of
https://gitea.toothfairyai.com/ToothFairyAI/tf_code.git
synced 2026-04-09 02:09:12 +00:00
feat: tfcode
This commit is contained in:
2
bun.lock
2
bun.lock
@@ -381,7 +381,7 @@
|
||||
},
|
||||
"packages/tfcode": {
|
||||
"name": "@toothfairyai/tfcode",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.26",
|
||||
"bin": {
|
||||
"tfcode": "./bin/tfcode",
|
||||
},
|
||||
|
||||
@@ -68,9 +68,14 @@ function runPythonSync(method, config = null) {
|
||||
const apiKey = config?.api_key || process.env.TF_API_KEY || ""
|
||||
const region = config?.region || process.env.TF_REGION || "au"
|
||||
|
||||
// Add embedded python path to sys.path
|
||||
const embeddedPythonPath = join(__dirname, "..", "python")
|
||||
|
||||
const pythonCode = `
|
||||
import json, sys, os
|
||||
try:
|
||||
# Add embedded tf_sync module path
|
||||
sys.path.insert(0, "${embeddedPythonPath}")
|
||||
os.environ["TF_WORKSPACE_ID"] = "${wsId}"
|
||||
os.environ["TF_API_KEY"] = "${apiKey}"
|
||||
os.environ["TF_REGION"] = "${region}"
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.26",
|
||||
"name": "@toothfairyai/tfcode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"bin",
|
||||
"python",
|
||||
"postinstall.mjs",
|
||||
"LICENSE"
|
||||
],
|
||||
"scripts": {
|
||||
"prepare": "effect-language-service patch || true",
|
||||
"postinstall": "node scripts/postinstall.cjs",
|
||||
|
||||
21
packages/tfcode/python/tf_sync/__init__.py
Normal file
21
packages/tfcode/python/tf_sync/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
tf-sync: ToothFairyAI workspace sync layer for tfcode
|
||||
"""
|
||||
|
||||
from tf_sync.agents import sync_agents
|
||||
from tf_sync.mcp import sync_mcp_servers
|
||||
from tf_sync.tools import sync_tools, ToolType
|
||||
from tf_sync.config import TFConfig, load_config, validate_credentials, get_region_urls
|
||||
|
||||
__all__ = [
|
||||
"sync_agents",
|
||||
"sync_mcp_servers",
|
||||
"sync_tools",
|
||||
"ToolType",
|
||||
"TFConfig",
|
||||
"load_config",
|
||||
"validate_credentials",
|
||||
"get_region_urls",
|
||||
]
|
||||
|
||||
__version__ = "0.1.0"
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
packages/tfcode/python/tf_sync/__pycache__/mcp.cpython-313.pyc
Normal file
BIN
packages/tfcode/python/tf_sync/__pycache__/mcp.cpython-313.pyc
Normal file
Binary file not shown.
BIN
packages/tfcode/python/tf_sync/__pycache__/tools.cpython-313.pyc
Normal file
BIN
packages/tfcode/python/tf_sync/__pycache__/tools.cpython-313.pyc
Normal file
Binary file not shown.
39
packages/tfcode/python/tf_sync/agents.py
Normal file
39
packages/tfcode/python/tf_sync/agents.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""
|
||||
Agent sync module for tfcode.
|
||||
|
||||
NOTE: This module is reserved for future implementation.
|
||||
Currently, tfcode only syncs tools (MCP, Skills, Database, Functions).
|
||||
Agent sync will be added in a later phase.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from tf_sync.config import TFConfig
|
||||
|
||||
|
||||
class AgentSyncResult(BaseModel):
|
||||
"""Result of agent sync operation."""
|
||||
|
||||
success: bool
|
||||
agents: list[dict[str, Any]] = []
|
||||
error: str | None = None
|
||||
|
||||
|
||||
def sync_agents(config: TFConfig) -> AgentSyncResult:
|
||||
"""
|
||||
Sync agents from ToothFairyAI workspace.
|
||||
|
||||
NOTE: Currently not implemented. Reserved for future use.
|
||||
|
||||
Args:
|
||||
config: TFConfig instance
|
||||
|
||||
Returns:
|
||||
AgentSyncResult (currently always returns not implemented)
|
||||
"""
|
||||
return AgentSyncResult(
|
||||
success=False,
|
||||
error="Agent sync not yet implemented. Use tools sync for now.",
|
||||
)
|
||||
221
packages/tfcode/python/tf_sync/config.py
Normal file
221
packages/tfcode/python/tf_sync/config.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""
|
||||
Configuration management for tfcode ToothFairyAI integration.
|
||||
Uses the official ToothFairyAI Python SDK for multi-region support.
|
||||
"""
|
||||
|
||||
import os
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
from toothfairyai import ToothFairyClient
|
||||
from toothfairyai.errors import ToothFairyError
|
||||
|
||||
|
||||
class Region(str, Enum):
|
||||
DEV = "dev"
|
||||
AU = "au"
|
||||
EU = "eu"
|
||||
US = "us"
|
||||
|
||||
|
||||
class ToolType(str, Enum):
|
||||
MCP_SERVER = "mcp_server"
|
||||
AGENT_SKILL = "agent_skill"
|
||||
CODER_AGENT = "coder_agent"
|
||||
DATABASE_SCRIPT = "database_script"
|
||||
API_FUNCTION = "api_function"
|
||||
PROMPT = "prompt"
|
||||
|
||||
|
||||
class FunctionRequestType(str, Enum):
|
||||
GET = "get"
|
||||
POST = "post"
|
||||
PUT = "put"
|
||||
DELETE = "delete"
|
||||
PATCH = "patch"
|
||||
CUSTOM = "custom"
|
||||
GRAPHQL_QUERY = "graphql_query"
|
||||
GRAPHQL_MUTATION = "graphql_mutation"
|
||||
|
||||
|
||||
# Region-specific URL configurations
|
||||
REGION_URLS = {
|
||||
Region.DEV: {
|
||||
"base_url": "https://api.toothfairylab.link",
|
||||
"ai_url": "https://ai.toothfairylab.link",
|
||||
"ai_stream_url": "https://ais.toothfairylab.link",
|
||||
"mcp_url": "https://mcp.toothfairylab.link/sse",
|
||||
"mcp_proxy_url": "https://mcp-proxy.toothfairylab.link",
|
||||
},
|
||||
Region.AU: {
|
||||
"base_url": "https://api.toothfairyai.com",
|
||||
"ai_url": "https://ai.toothfairyai.com",
|
||||
"ai_stream_url": "https://ais.toothfairyai.com",
|
||||
"mcp_url": "https://mcp.toothfairyai.com/sse",
|
||||
"mcp_proxy_url": "https://mcp-proxy.toothfairyai.com",
|
||||
},
|
||||
Region.EU: {
|
||||
"base_url": "https://api.eu.toothfairyai.com",
|
||||
"ai_url": "https://ai.eu.toothfairyai.com",
|
||||
"ai_stream_url": "https://ais.eu.toothfairyai.com",
|
||||
"mcp_url": "https://mcp.eu.toothfairyai.com/sse",
|
||||
"mcp_proxy_url": "https://mcp-proxy.eu.toothfairyai.com",
|
||||
},
|
||||
Region.US: {
|
||||
"base_url": "https://api.us.toothfairyai.com",
|
||||
"ai_url": "https://ai.us.toothfairyai.com",
|
||||
"ai_stream_url": "https://ais.us.toothfairyai.com",
|
||||
"mcp_url": "https://mcp.us.toothfairyai.com/sse",
|
||||
"mcp_proxy_url": "https://mcp-proxy.us.toothfairyai.com",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_region_urls(region: Region) -> dict[str, str]:
|
||||
"""Get URLs for a specific region."""
|
||||
return REGION_URLS.get(region, REGION_URLS[Region.AU])
|
||||
|
||||
|
||||
class TFConfig(BaseModel):
|
||||
"""ToothFairyAI workspace configuration."""
|
||||
|
||||
workspace_id: str
|
||||
api_key: SecretStr
|
||||
region: Region = Region.AU
|
||||
enabled: bool = True
|
||||
|
||||
sync_interval: int = Field(default=3600, ge=60)
|
||||
mcp_proxy_timeout: int = Field(default=30000, ge=1000)
|
||||
|
||||
_client: Optional[ToothFairyClient] = None
|
||||
|
||||
def get_client(self) -> ToothFairyClient:
|
||||
"""
|
||||
Get or create a ToothFairyClient instance configured for this region.
|
||||
|
||||
Returns:
|
||||
ToothFairyClient configured with region-specific URLs
|
||||
"""
|
||||
if self._client is None:
|
||||
urls = get_region_urls(self.region)
|
||||
self._client = ToothFairyClient(
|
||||
api_key=self.api_key.get_secret_value(),
|
||||
workspace_id=self.workspace_id,
|
||||
base_url=urls["base_url"],
|
||||
ai_url=urls["ai_url"],
|
||||
ai_stream_url=urls["ai_stream_url"],
|
||||
)
|
||||
return self._client
|
||||
|
||||
@property
|
||||
def mcp_sse_url(self) -> str:
|
||||
"""Get the MCP SSE endpoint URL for this region."""
|
||||
return get_region_urls(self.region)["mcp_url"]
|
||||
|
||||
@property
|
||||
def mcp_proxy_url(self) -> str:
|
||||
"""Get the MCP proxy URL for this region."""
|
||||
return get_region_urls(self.region)["mcp_proxy_url"]
|
||||
|
||||
|
||||
class CredentialValidationResult(BaseModel):
|
||||
"""Result of credential validation."""
|
||||
success: bool
|
||||
workspace_id: Optional[str] = None
|
||||
workspace_name: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
def load_config(
|
||||
workspace_id: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
region: Optional[Region] = None,
|
||||
) -> TFConfig:
|
||||
"""
|
||||
Load ToothFairyAI configuration from environment or parameters.
|
||||
|
||||
Args:
|
||||
workspace_id: Workspace UUID (defaults to TF_WORKSPACE_ID env var)
|
||||
api_key: API key (defaults to TF_API_KEY env var)
|
||||
region: Region (defaults to TF_REGION env var or 'au')
|
||||
|
||||
Returns:
|
||||
TFConfig instance
|
||||
|
||||
Raises:
|
||||
ValueError: If required configuration is missing
|
||||
"""
|
||||
ws_id = workspace_id or os.environ.get("TF_WORKSPACE_ID")
|
||||
key = api_key or os.environ.get("TF_API_KEY")
|
||||
|
||||
# Parse region from env or use provided/default
|
||||
region_str = os.environ.get("TF_REGION", "au")
|
||||
reg = region or Region(region_str)
|
||||
|
||||
if not ws_id:
|
||||
raise ValueError("TF_WORKSPACE_ID not set. Set environment variable or pass workspace_id.")
|
||||
if not key:
|
||||
raise ValueError("TF_API_KEY not set. Set environment variable or pass api_key.")
|
||||
|
||||
return TFConfig(
|
||||
workspace_id=ws_id,
|
||||
api_key=SecretStr(key),
|
||||
region=reg,
|
||||
)
|
||||
|
||||
|
||||
def validate_credentials(config: TFConfig) -> CredentialValidationResult:
|
||||
"""
|
||||
Validate ToothFairyAI credentials using the SDK.
|
||||
|
||||
Args:
|
||||
config: TFConfig instance
|
||||
|
||||
Returns:
|
||||
CredentialValidationResult indicating success or failure
|
||||
"""
|
||||
try:
|
||||
client = config.get_client()
|
||||
|
||||
# Test connection by listing chats (lightweight operation)
|
||||
if client.test_connection():
|
||||
return CredentialValidationResult(
|
||||
success=True,
|
||||
workspace_id=config.workspace_id,
|
||||
workspace_name="Connected",
|
||||
)
|
||||
else:
|
||||
return CredentialValidationResult(
|
||||
success=False,
|
||||
error="Connection test failed. Check credentials and region.",
|
||||
)
|
||||
|
||||
except ToothFairyError as e:
|
||||
error_msg = str(e)
|
||||
|
||||
if "401" in error_msg or "Unauthorized" in error_msg:
|
||||
return CredentialValidationResult(
|
||||
success=False,
|
||||
error="Invalid API key. Check TF_API_KEY environment variable.",
|
||||
)
|
||||
elif "403" in error_msg or "Forbidden" in error_msg:
|
||||
return CredentialValidationResult(
|
||||
success=False,
|
||||
error="API access not allowed. Business or Enterprise subscription required.",
|
||||
)
|
||||
elif "404" in error_msg or "Not Found" in error_msg:
|
||||
return CredentialValidationResult(
|
||||
success=False,
|
||||
error="Workspace not found. Check TF_WORKSPACE_ID environment variable.",
|
||||
)
|
||||
else:
|
||||
return CredentialValidationResult(
|
||||
success=False,
|
||||
error=f"API error: {error_msg}",
|
||||
)
|
||||
except Exception as e:
|
||||
return CredentialValidationResult(
|
||||
success=False,
|
||||
error=f"Unexpected error: {str(e)}",
|
||||
)
|
||||
41
packages/tfcode/python/tf_sync/mcp.py
Normal file
41
packages/tfcode/python/tf_sync/mcp.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""
|
||||
MCP server sync module for tfcode.
|
||||
|
||||
NOTE: MCP servers are not currently exposed via the ToothFairyAI SDK.
|
||||
This module is reserved for future implementation when MCP server
|
||||
discovery is added to the SDK.
|
||||
|
||||
For now, MCP servers should be configured manually via tfcode.json.
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from tf_sync.config import TFConfig
|
||||
from tf_sync.tools import SyncedTool, ToolType
|
||||
|
||||
|
||||
class MCPServerSyncResult(BaseModel):
|
||||
"""Result of MCP server sync operation."""
|
||||
|
||||
success: bool
|
||||
servers: list[SyncedTool] = []
|
||||
error: str | None = None
|
||||
|
||||
|
||||
def sync_mcp_servers(config: TFConfig) -> MCPServerSyncResult:
|
||||
"""
|
||||
Sync MCP servers from ToothFairyAI workspace.
|
||||
|
||||
NOTE: Currently not supported. MCP servers are not exposed via the SDK.
|
||||
Configure MCP servers manually in tfcode.json instead.
|
||||
|
||||
Args:
|
||||
config: TFConfig instance
|
||||
|
||||
Returns:
|
||||
MCPServerSyncResult with error message
|
||||
"""
|
||||
return MCPServerSyncResult(
|
||||
success=False,
|
||||
error="MCP server sync not available via SDK. Configure MCP servers in tfcode.json.",
|
||||
)
|
||||
282
packages/tfcode/python/tf_sync/tools.py
Normal file
282
packages/tfcode/python/tf_sync/tools.py
Normal file
@@ -0,0 +1,282 @@
|
||||
"""
|
||||
Tool sync module for tfcode.
|
||||
Syncs tools from ToothFairyAI workspace using the official SDK.
|
||||
|
||||
SDK Structure:
|
||||
- agent_functions: API Functions (with request_type)
|
||||
- connections: Provider connections (openai, anthropic, etc.)
|
||||
- agents: TF workspace agents
|
||||
- prompts: Prompt templates (with available_to_agents mapping)
|
||||
"""
|
||||
|
||||
from typing import Any, Optional, List
|
||||
|
||||
from pydantic import BaseModel
|
||||
from toothfairyai.types import AgentFunction
|
||||
|
||||
from tf_sync.config import TFConfig, ToolType, FunctionRequestType
|
||||
|
||||
|
||||
class SyncedTool(BaseModel):
|
||||
"""A tool synced from ToothFairyAI workspace."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
tool_type: ToolType
|
||||
|
||||
is_mcp_server: bool = False
|
||||
is_agent_skill: bool = False
|
||||
is_database_script: bool = False
|
||||
|
||||
request_type: Optional[FunctionRequestType] = None
|
||||
url: Optional[str] = None
|
||||
tools: list[str] = []
|
||||
|
||||
authorisation_type: Optional[str] = None
|
||||
|
||||
auth_via: str = "tf_proxy"
|
||||
|
||||
# Coder agent specific fields for prompting/model configuration
|
||||
interpolation_string: Optional[str] = None
|
||||
goals: Optional[str] = None
|
||||
temperature: Optional[float] = None
|
||||
max_tokens: Optional[int] = None
|
||||
llm_base_model: Optional[str] = None
|
||||
llm_provider: Optional[str] = None
|
||||
|
||||
|
||||
class SyncedPrompt(BaseModel):
|
||||
"""A prompt template synced from ToothFairyAI workspace."""
|
||||
|
||||
id: str
|
||||
label: str
|
||||
interpolation_string: str
|
||||
prompt_type: Optional[str] = None
|
||||
available_to_agents: Optional[List[str]] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class ToolSyncResult(BaseModel):
|
||||
"""Result of tool sync operation."""
|
||||
|
||||
success: bool
|
||||
tools: list[SyncedTool] = []
|
||||
prompts: list[SyncedPrompt] = []
|
||||
by_type: dict[str, int] = {}
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
def classify_tool(func: AgentFunction) -> ToolType:
|
||||
"""
|
||||
Classify a tool based on its properties.
|
||||
|
||||
Types:
|
||||
- AGENT_SKILL: is_agent_skill=True
|
||||
- API_FUNCTION: has request_type
|
||||
|
||||
Args:
|
||||
func: AgentFunction from TF SDK
|
||||
|
||||
Returns:
|
||||
ToolType enum value
|
||||
"""
|
||||
# Agent skills have is_agent_skill=True
|
||||
if getattr(func, 'is_agent_skill', None) is True:
|
||||
return ToolType.AGENT_SKILL
|
||||
|
||||
# All agent_functions with request_type are API Functions
|
||||
if func.request_type:
|
||||
return ToolType.API_FUNCTION
|
||||
|
||||
return ToolType.API_FUNCTION
|
||||
|
||||
|
||||
def parse_function(func: AgentFunction) -> SyncedTool:
|
||||
"""
|
||||
Parse AgentFunction from SDK into SyncedTool.
|
||||
|
||||
Args:
|
||||
func: AgentFunction from TF SDK
|
||||
|
||||
Returns:
|
||||
SyncedTool instance
|
||||
"""
|
||||
tool_type = classify_tool(func)
|
||||
|
||||
request_type_enum = None
|
||||
if func.request_type:
|
||||
try:
|
||||
request_type_enum = FunctionRequestType(func.request_type)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# API Functions may have user-provided auth (authorisation_type)
|
||||
# or may use TF proxy
|
||||
auth_via = "user_provided" if func.authorisation_type == "api_key" else "tf_proxy"
|
||||
|
||||
# Agent skills use skill script
|
||||
if tool_type == ToolType.AGENT_SKILL:
|
||||
auth_via = "tf_skill"
|
||||
|
||||
return SyncedTool(
|
||||
id=func.id,
|
||||
name=func.name,
|
||||
description=func.description,
|
||||
tool_type=tool_type,
|
||||
request_type=request_type_enum,
|
||||
url=func.url,
|
||||
authorisation_type=func.authorisation_type,
|
||||
auth_via=auth_via,
|
||||
is_agent_skill=tool_type == ToolType.AGENT_SKILL,
|
||||
)
|
||||
|
||||
|
||||
def parse_agent(agent) -> SyncedTool:
|
||||
"""
|
||||
Parse Agent from SDK into SyncedTool.
|
||||
|
||||
Coder agents (mode='coder') are CODER_AGENT type, not skills.
|
||||
|
||||
Args:
|
||||
agent: Agent from TF SDK
|
||||
|
||||
Returns:
|
||||
SyncedTool instance with full agent configuration
|
||||
"""
|
||||
return SyncedTool(
|
||||
id=agent.id,
|
||||
name=agent.label or f"agent_{agent.id[:8]}",
|
||||
description=agent.description,
|
||||
tool_type=ToolType.CODER_AGENT,
|
||||
is_agent_skill=False,
|
||||
auth_via="tf_agent",
|
||||
# Agent prompting configuration
|
||||
interpolation_string=getattr(agent, 'interpolation_string', None),
|
||||
goals=getattr(agent, 'goals', None),
|
||||
# Agent model configuration
|
||||
temperature=getattr(agent, 'temperature', None),
|
||||
max_tokens=getattr(agent, 'max_tokens', None),
|
||||
llm_base_model=getattr(agent, 'llm_base_model', None),
|
||||
llm_provider=getattr(agent, 'llm_provider', None),
|
||||
)
|
||||
|
||||
|
||||
def parse_prompt(prompt) -> SyncedPrompt:
|
||||
"""
|
||||
Parse Prompt from SDK into SyncedPrompt.
|
||||
|
||||
Args:
|
||||
prompt: Prompt from TF SDK
|
||||
|
||||
Returns:
|
||||
SyncedPrompt instance
|
||||
"""
|
||||
return SyncedPrompt(
|
||||
id=prompt.id,
|
||||
label=prompt.label,
|
||||
interpolation_string=prompt.interpolation_string,
|
||||
prompt_type=getattr(prompt, 'prompt_type', None),
|
||||
available_to_agents=getattr(prompt, 'available_to_agents', None),
|
||||
description=getattr(prompt, 'description', None),
|
||||
)
|
||||
|
||||
|
||||
def sync_tools(config: TFConfig) -> ToolSyncResult:
|
||||
"""
|
||||
Sync all tools from ToothFairyAI workspace using SDK.
|
||||
|
||||
Includes:
|
||||
- Agent Functions (API Functions with request_type)
|
||||
- Agent Skills (functions with is_agent_skill=True)
|
||||
- Coder Agents (agents with mode='coder')
|
||||
- Prompts (prompt templates with available_to_agents mapping)
|
||||
|
||||
Args:
|
||||
config: TFConfig instance
|
||||
|
||||
Returns:
|
||||
ToolSyncResult with synced tools and prompts
|
||||
"""
|
||||
try:
|
||||
client = config.get_client()
|
||||
|
||||
# Sync agent functions (API auto-paginates up to 5000)
|
||||
func_result = client.agent_functions.list()
|
||||
tools = [parse_function(f) for f in func_result.items]
|
||||
|
||||
# Sync coder agents (API auto-paginates up to 5000)
|
||||
try:
|
||||
agents_result = client.agents.list()
|
||||
for agent in agents_result.items:
|
||||
if getattr(agent, 'mode', None) == 'coder':
|
||||
tools.append(parse_agent(agent))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Sync prompts (API auto-paginates up to 5000)
|
||||
prompts = []
|
||||
try:
|
||||
prompts_result = client.prompts.list()
|
||||
prompts = [parse_prompt(p) for p in prompts_result.items]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
by_type = {}
|
||||
for tool in tools:
|
||||
type_name = tool.tool_type.value
|
||||
by_type[type_name] = by_type.get(type_name, 0) + 1
|
||||
|
||||
if prompts:
|
||||
by_type['prompt'] = len(prompts)
|
||||
|
||||
return ToolSyncResult(
|
||||
success=True,
|
||||
tools=tools,
|
||||
prompts=prompts,
|
||||
by_type=by_type,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return ToolSyncResult(
|
||||
success=False,
|
||||
error=f"Sync failed: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
def sync_tools_by_type(
|
||||
config: TFConfig,
|
||||
tool_types: Optional[list[ToolType]] = None,
|
||||
) -> ToolSyncResult:
|
||||
"""
|
||||
Sync tools of specific types from ToothFairyAI workspace.
|
||||
|
||||
Args:
|
||||
config: TFConfig instance
|
||||
tool_types: List of ToolType to sync (None = all)
|
||||
|
||||
Returns:
|
||||
ToolSyncResult with filtered tools
|
||||
"""
|
||||
result = sync_tools(config)
|
||||
|
||||
if not result.success or not tool_types:
|
||||
return result
|
||||
|
||||
filtered = [t for t in result.tools if t.tool_type in tool_types]
|
||||
|
||||
by_type = {}
|
||||
for tool in filtered:
|
||||
type_name = tool.tool_type.value
|
||||
by_type[type_name] = by_type.get(type_name, 0) + 1
|
||||
|
||||
return ToolSyncResult(
|
||||
success=True,
|
||||
tools=filtered,
|
||||
by_type=by_type,
|
||||
)
|
||||
|
||||
|
||||
def sync_api_functions_only(config: TFConfig) -> ToolSyncResult:
|
||||
"""Sync only API Functions (has requestType)."""
|
||||
return sync_tools_by_type(config, [ToolType.API_FUNCTION])
|
||||
@@ -110,6 +110,10 @@ async function createMainPackage() {
|
||||
await Bun.file(`./dist/tfcode/postinstall.mjs`).write(await Bun.file("./script/postinstall-tfcode.mjs").text())
|
||||
await Bun.file(`./dist/tfcode/LICENSE`).write(await Bun.file("../../LICENSE").text())
|
||||
|
||||
// Copy embedded python module (tf_sync)
|
||||
await $`cp -r ./python ./dist/tfcode/`
|
||||
console.log("Copied embedded python/tf_sync module")
|
||||
|
||||
// Copy the current platform's binary to the main package
|
||||
// This makes installation faster (no need to download or copy from optionalDependencies)
|
||||
const currentPlatform = process.platform === "darwin" ? "darwin" : process.platform === "linux" ? "linux" : "windows"
|
||||
|
||||
@@ -64,11 +64,20 @@ const TFCODE_CONFIG_DIR = ".tfcode"
|
||||
const TFCODE_TOOLS_FILE = "tools.json"
|
||||
|
||||
function getPythonSyncPath(): string {
|
||||
const possible = [
|
||||
// Check embedded python path first (for npm distribution)
|
||||
const embedded = [
|
||||
path.join(__dirname, "..", "..", "..", "..", "python"), // packages/tfcode/python
|
||||
path.join(__dirname, "..", "..", "..", "python"), // dist/python
|
||||
]
|
||||
for (const p of embedded) {
|
||||
if (existsSync(p)) return p
|
||||
}
|
||||
// Fallback to development paths
|
||||
const dev = [
|
||||
path.join(__dirname, "..", "..", "..", "..", "tf-sync", "src", "tf_sync"),
|
||||
path.join(process.cwd(), "packages", "tf-sync", "src", "tf_sync"),
|
||||
]
|
||||
for (const p of possible) {
|
||||
for (const p of dev) {
|
||||
if (existsSync(p)) return p
|
||||
}
|
||||
return "tf_sync"
|
||||
@@ -76,6 +85,7 @@ function getPythonSyncPath(): string {
|
||||
|
||||
async function runPythonSync(method: string, args: Record<string, unknown> = {}): Promise<unknown> {
|
||||
const credentials = await loadCredentials()
|
||||
const pythonPath = getPythonSyncPath()
|
||||
|
||||
const pythonCode = `
|
||||
import json
|
||||
@@ -83,6 +93,8 @@ import sys
|
||||
import os
|
||||
|
||||
try:
|
||||
# Add embedded tf_sync module path
|
||||
sys.path.insert(0, "${pythonPath.replace(/\\/g, "/")}")
|
||||
from tf_sync.config import load_config, validate_credentials
|
||||
from tf_sync.tools import sync_tools, sync_tools_by_type, ToolType
|
||||
from tf_sync.mcp import sync_mcp_servers
|
||||
|
||||
@@ -16,10 +16,17 @@ export async function upgrade() {
|
||||
|
||||
if (Installation.VERSION === latest) return
|
||||
|
||||
if (config.autoupdate === false || Flag.OPENCODE_DISABLE_AUTOUPDATE) return
|
||||
if (config.autoupdate === false) return
|
||||
|
||||
const kind = Installation.getReleaseType(Installation.VERSION, latest)
|
||||
|
||||
// When auto-upgrade is disabled (e.g. tfcode uses npm for updates),
|
||||
// still notify the user that a new version is available.
|
||||
if (Flag.OPENCODE_DISABLE_AUTOUPDATE) {
|
||||
await Bus.publish(Installation.Event.UpdateAvailable, { version: latest })
|
||||
return
|
||||
}
|
||||
|
||||
if (config.autoupdate === "notify" || kind !== "patch") {
|
||||
await Bus.publish(Installation.Event.UpdateAvailable, { version: latest })
|
||||
return
|
||||
|
||||
@@ -340,17 +340,12 @@ export namespace ProviderTransform {
|
||||
if (
|
||||
id.includes("deepseek") ||
|
||||
id.includes("minimax") ||
|
||||
id.includes("glm") ||
|
||||
id.includes("mistral") ||
|
||||
id.includes("kimi") ||
|
||||
// TODO: Remove this after models.dev data is fixed to use "kimi-k2.5" instead of "k2p5"
|
||||
id.includes("k2p5")
|
||||
)
|
||||
return {}
|
||||
|
||||
// ToothFairyAI doesn't support thinking/reasoning parameters yet
|
||||
if (model.api.npm === "@toothfairyai/sdk") return {}
|
||||
|
||||
// see: https://docs.x.ai/docs/guides/reasoning#control-how-hard-the-model-thinks
|
||||
if (id.includes("grok") && id.includes("grok-3-mini")) {
|
||||
if (model.api.npm === "@openrouter/ai-sdk-provider") {
|
||||
|
||||
Reference in New Issue
Block a user