{"tools":[{"name":"getkin_proxy_chat","description":"Route an LLM chat completion through GetKin's proxy. Supports Google Gemini models. OpenAI-compatible format. 3.0% margin on provider cost. Includes request logging and cost tracking.","inputSchema":{"type":"object","properties":{"model":{"type":"string","description":"Model to use (e.g. 'gemini-2.0-flash')"},"messages":{"type":"array","description":"Chat messages in OpenAI format","items":{"type":"object","properties":{"role":{"type":"string"},"content":{"type":"string"}}}},"max_tokens":{"type":"integer","description":"Max output tokens","default":1000}},"required":["model","messages"]}}]}