diff --git a/.gitignore b/.gitignore index befe92f..85f5ffb 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ __pycache__/ .DS_Store dist docs -whatsapp/ \ No newline at end of file +whatsapp/ +docs/ \ No newline at end of file diff --git a/APP.md b/APP.md deleted file mode 100644 index 4eeb74a..0000000 --- a/APP.md +++ /dev/null @@ -1,445 +0,0 @@ -# Building TruffleOS Apps -tell ralph i said hi - -this is without cli -sketch for what docs will be structured like and u can just feed this to whatever model with example app code to make u an app or ven an exisiting mcp and have it port it - -TruffleOS apps come in two flavors: **Focus** and **Background**. - -## App Types - -| Type | When it runs | What it does | -|------|--------------|--------------| -| **Focus** | Always on, waiting | Exposes tools the AI can call on demand | -| **Background** | On a schedule | Runs periodically, posts to user's feed | - ---- - -## Focus Apps - -Focus apps are MCP servers that expose tools to the device AI. When the user asks a question, the AI can call your tools. - -### Example: Finance App - -```python -from mcp.server.fastmcp import FastMCP - -HOST = "0.0.0.0" -PORT = 8000 - -mcp = FastMCP("finance", stateless_http=True, host=HOST, port=PORT) - -@mcp.tool("get_stock_price", description="Get current price for a stock ticker") -async def get_stock_price(symbol: str) -> str: - # fetch from API... - return f"{symbol}: $256.44" - -@mcp.tool("search_ticker", description="Search for stock ticker symbols") -async def search_ticker(keywords: str) -> str: - # search API... - return "AAPL - Apple Inc." - -def main(): - print(f"Starting MCP server on {HOST}:{PORT}") - mcp.run(transport="streamable-http") - -if __name__ == "__main__": - main() -``` - -### How Tool Calls Work - -1. User asks: "What's Apple stock at?" -2. Truffle sees your `get_stock_price` tool -3. calls `get_stock_price("AAPL")` -4. Your app returns the result -5. Truffle responds to you with the data - -### Focus App Requirements - -- Must run an MCP server on `0.0.0.0:8000` -- Use `transport="streamable-http"` -- Tools are defined with `@mcp.tool()` decorator -- Each tool needs a `description` for the AI to understand when to use it - ---- - -## Local Development (Focus Apps) - -You can run your MCP server locally on your machine instead of deploying to the device as well. This is great for fast iteration during development. - -### 1. Run the server locally - -```bash -cd your-app-directory -python app.py -``` - -You should see: -``` -Starting MCP server on 0.0.0.0:8000 -INFO: Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit) -``` - -### 2. Get your machine's IP - -```bash -# macOS -ipconfig getifaddr en0 - -# Linux -hostname -I | awk '{print $1}' -``` - -### 3. Add MCP in TruffleOS Settings - -Go to Settings β†’ Add New MCP and enter: - -| Field | Value | -|-------|-------| -| **Name** | Your App Name | -| **Server URL** | `192.168.X.X` (your IP from step 2) | -| **Port** | `8000` | -| **Path** | `mcp` | - -Now you can use your tools immediately without deploying. Changes to your code take effect as soon as you restart `python app.py`. - ---- - -## Background Apps - -Background apps run on a schedule and post content to the user's feed. - -### Example: Hedge App - -```python -import os -from datetime import datetime -from gourmet.ambient import run_ambient, AmbientContext - -TICKERS = os.getenv("HEDGE_TICKERS", "AAPL,MSFT").split(",") - -def hedge_ambient(ctx: AmbientContext): - for symbol in TICKERS: - # fetch stock data... - price = 256.44 - - ctx.bg.post_to_feed( - title=f"πŸ“ˆ {symbol}: ${price}", - body=f"{symbol} is currently trading at ${price}", - src_uri=f"https://finance.yahoo.com/quote/{symbol}", - media_uris=["https://example.com/chart.png"], # optional - content_timestamp=datetime.now() - ) - -if __name__ == "__main__": - run_ambient(hedge_ambient) -``` - -### post_to_feed() Parameters - -| Parameter | Type | Required | Description | -|-----------|------|----------|-------------| -| `title` | str | yes | Card title shown in feed | -| `body` | str | yes | Main content text | -| `src_uri` | str | no | Link to original source | -| `media_uris` | list[str] | no | List of image URLs to display | -| `content_timestamp` | datetime | no | When the content was created | - -### Background App Requirements - -- Import `run_ambient` and `AmbientContext` from `gourmet.ambient` -- Define a function that takes `ctx: AmbientContext` -- Call `run_ambient(your_function)` in main -- Use `ctx.bg.post_to_feed()` to post content - ---- - -## truffile.yaml - -The `truffile.yaml` defines your app's metadata and installation steps. - -### Focus App Example - -```yaml -metadata: - name: Finance - type: foreground - description: | - Financial data tools for your Truffle. - process: - cmd: - - python - - app.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - icon_file: ./icon.png - -steps: - - name: Welcome - type: welcome - content: | - This app provides financial data tools. - - - name: Copy files - type: files - files: - - source: ./app.py - destination: ./app.py - - - name: Install dependencies - type: bash - run: | - pip install mcp requests -``` - -### Background App Example - -```yaml -metadata: - name: Hedge - type: background - description: | - Track your stock portfolio. - process: - cmd: - - python - - app.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - HEDGE_TICKERS: "AAPL,MSFT,GOOGL" - icon_file: ./icon.png - default_schedule: - type: interval - interval: - duration: 5m - schedule: - daily_window: "06:00-22:00" - allowed_days: [mon, tue, wed, thu, fri] - -steps: - - name: Copy files - type: files - files: - - source: ./app.py - destination: ./app.py - - - name: Install dependencies - type: bash - run: | - pip install requests - pip install gourmet -``` - ---- - -## Metadata Fields - -| Field | Required | Description | -|-------|----------|-------------| -| `name` | yes | App name shown to user | -| `type` | yes | `foreground` or `background` | -| `description` | no | What the app does | -| `process.cmd` | yes | Command to run the app | -| `process.working_directory` | no | Working dir (default: `/`) | -| `process.environment` | no | Environment variables | -| `icon_file` | no | Path to PNG icon | -| `default_schedule` | background only | When the app runs | - ---- - -## Schedule Types (Background Apps Only) - -### 1. Interval - -Run every X time period. - -```yaml -default_schedule: - type: interval - interval: - duration: 5m # required: how often - schedule: # optional: constraints - daily_window: "09:00-17:00" - allowed_days: [mon, tue, wed, thu, fri] -``` - -### 2. Times - -Run at specific times each day. - -```yaml -default_schedule: - type: times - times: - run_times: # required: list of times - - "09:00" - - "12:00" - - "18:00" - allowed_days: [mon, wed, fri] # optional -``` - ---- - -## Duration Format - -| Format | Example | Meaning | -|--------|---------|---------| -| `Xms` | `500ms` | 500 milliseconds | -| `Xs` | `30s` | 30 seconds | -| `Xm` | `5m` | 5 minutes | -| `Xh` | `2h` | 2 hours | -| `Xd` | `1d` | 1 day | - ---- - -## Daily Window - -Restrict when the app can run during the day. - -```yaml -daily_window: "09:00-17:30" -``` - -Or verbose format: - -```yaml -daily_window: - start: "09:00" - end: "17:30" -``` - ---- - -## Day Restrictions - -Use ONE of these (not both): - -**allowed_days** - only run on these days: -```yaml -allowed_days: [mon, tue, wed, thu, fri] -``` - -**forbidden_days** - don't run on these days: -```yaml -forbidden_days: [sat, sun] -``` - -Valid day values: `sun`, `mon`, `tue`, `wed`, `thu`, `fri`, `sat` - ---- - -## Installation Step Types - -### files - -Copy files from your app directory to the container. - -```yaml -- name: Copy files - type: files - files: - - source: ./app.py - destination: ./app.py - - source: ./config.yaml - destination: ./config.yaml - permissions: 600 # optional -``` - -### bash - -Run shell commands. - -```yaml -- name: Install dependencies - type: bash - run: | - pip install requests - apk add --no-cache curl -``` - -### welcome - -Show a welcome message to the user. - -```yaml -- name: Welcome - type: welcome - content: | - Welcome to my app! - It does cool things. -``` - -### text - -Prompt user for text input (saved to env vars). - -```yaml -- name: Configure API Key - type: text - content: | - Enter your API key to continue. - fields: - - name: api_key - label: API Key - type: password - env: MY_API_KEY - placeholder: "sk-..." -``` - -Field types: `text`, `password`, `number` - -### vnc - -Open a VNC window for user interaction (login flows, etc). - -```yaml -- name: Sign into Twitter - type: vnc - cmd: - - python - - onboard.py - closes_on_complete: true - description: | - Sign into your account in the browser window. -``` - ---- - -## Environment Variables - -Set in `process.environment`: - -```yaml -process: - environment: - PYTHONUNBUFFERED: "1" # always use this for Python apps - MY_API_KEY: "secret" - DEBUG: "true" -``` - -Or collected from user input via `text` steps (uses `env` field). - ---- - -## Quick Reference - -### Minimal Focus App - -``` -my-focus-app/ -β”œβ”€β”€ app.py # MCP server with @mcp.tool() functions -β”œβ”€β”€ truffile.yaml # type: foreground -└── icon.png # optional -``` - -### Minimal Background App - -``` -my-bg-app/ -β”œβ”€β”€ app.py # Uses run_ambient() + post_to_feed() -β”œβ”€β”€ truffile.yaml # type: background + default_schedule -└── icon.png # optional -``` diff --git a/README.md b/README.md index 910d4c4..1907272 100644 --- a/README.md +++ b/README.md @@ -1,107 +1,95 @@ -# truffile +# πŸ„β€πŸŸ« Truffile -TruffleOS SDK - deploy apps to Truffle devices +Python SDK/CLI for Truffle devices. -## install +## What It Does -```bash -pip install truffile -``` +- discovers and connects to your Truffle (`scan`, `connect`, `disconnect`) +- validates and deploys apps from `truffile.yaml` (`validate`, `deploy`) +- manages installed apps (`list apps`, `delete`) +- talks to inference directly (`models`, `chat`) +- exposes an OpenAI-compatible local proxy (`proxy`) -or from source: -```bash -git clone -cd truffile -pip install -e . -``` +## Start making your Own Apps -## commands +- app schema and validation: `truffile/truffile/schema/app_config.py` +- schedule parsing: `truffile/truffile/schedule.py` +- deploy planning + builder flow: `truffile/truffile/deploy/builder.py` +- generated TruffleOS protos vendored in: `truffile/truffle/` +- examples: + - `truffile/example-apps/kalshi` + - `truffile/example-apps/reddit` -```bash -# find truffle devices on your network -truffile scan +`truffile.yaml` defines: +- metadata (`name`, `description`, `type`) +- process (`cmd`, `working_directory`, `environment`) +- files to upload +- optional run/build commands +- background schedule policy (for BG apps) -# connect to a device (first time requires approval on device) -truffile connect truffle-6272 +## App Types and Runtime Model -# deploy an app from current directory -truffile deploy +Apps can be: -# deploy an app from a specific path -truffile deploy ./my-app +- foreground (`fg`): exposes MCP tools that tasks/agents can call during active execution +- background (`bg`): runs on schedule and emits context for proactivity, enabling the device to trigger actions and write/update memory +- both (`fg` + `bg`): one app package can provide MCP tools and scheduled context emission -# deploy with interactive shell (for debugging) -truffile deploy -i +How to think about it: -# list installed apps on connected device -truffile list apps +- FG path is tool-serving: app process is used as a callable capability surface (MCP) +- BG path is context/proactivity: scheduled runs feed the proactive agent with fresh signals +- Proactivity can take actions and persist memory based on BG outputs -# list connected devices -truffile list devices +In practice: -# disconnect from a device -truffile disconnect truffle-6272 +- use `fg` when you need direct tool invocation from tasks +- use `bg` when you need periodic monitoring, summaries, or event-driven context +- use `both` when the same app should both expose tools and continuously feed proactivity/memory -# disconnect from all devices -truffile disconnect all -``` +## Core Commands -## truffile.yaml - -apps need a `truffile.yaml` in their directory: - -```yaml -metadata: - name: My App - description: does cool stuff - type: background # or foreground - icon_file: ./icon.png - process: - cmd: [python, app.py] - working_directory: / - environment: - MY_VAR: value - # schedule for background apps only: - default_schedule: - type: interval # interval | times - interval: - duration: "1h" # 15m, 2h, 1d, etc. - schedule: - daily_window: "09:00-17:30" # optional - allowed_days: [mon, tue, wed, thu, fri] # optional - -files: - - source: ./app.py - destination: ./app.py - -run: | - pip install requests +```bash +truffile scan +truffile connect +truffile validate [app_dir] +truffile deploy [app_dir] +truffile deploy --dry-run [app_dir] +truffile list apps +truffile delete +truffile models +truffile chat "hello" +truffile proxy --host 127.0.0.1 --port 8080 ``` -### schedule types - -**interval** - run every N minutes/hours: -```yaml -default_schedule: - type: interval - interval: - duration: "30m" - schedule: - daily_window: "06:00-22:00" - allowed_days: [mon, tue, wed, thu, fri] -``` +## Inference Interfaces -**times** - run at specific times: -```yaml -default_schedule: - type: times - times: - run_times: ["08:00", "12:00", "18:00"] - allowed_days: [mon, tue, wed, thu, fri] -``` +Direct IF2: +- list models: `GET /if2/v1/models` +- chat completions: `POST /if2/v1/chat/completions` + +CLI wrappers: +- `truffile models` +- `truffile chat` (streaming by default) + +## Proxy + +`truffile proxy` serves OpenAI-compatible routes locally and forwards to device IF2: + +- `GET /v1/models` +- `POST /v1/chat/completions` -## example apps +Default local base URL: +- `http://127.0.0.1:8080/v1` -see `example-apps/` for working examples: -- `example-apps/ambient/hedge` - background app -- `example-apps/focus/finance` - foreground app +Reasoning behavior: +- default: proxy can inject reasoning into `content` as `...` +- `--no-think-tags`: keeps reasoning separate as `reasoning_content` in stream deltas + +## Proto Sync + +Refresh vendored protos from firmware repo: + +```bash +./scripts/sync_protos.sh +``` diff --git a/example-apps/ambient/hedge/app.py b/example-apps/ambient/hedge/app.py deleted file mode 100644 index f643e14..0000000 --- a/example-apps/ambient/hedge/app.py +++ /dev/null @@ -1,120 +0,0 @@ -import os -import requests -import urllib.parse -from datetime import datetime -from gourmet.ambient import run_ambient, AmbientContext -import logging - -logger = logging.getLogger("hedge") -logger.setLevel(logging.DEBUG) - -API_KEY = os.getenv("ALPHAVANTAGE_API_KEY", "go get your key dudes") -TICKERS = os.getenv("HEDGE_TICKERS", "AAPL,MSFT,GOOGL,IREN").split(",") -BASE_URL = "https://www.alphavantage.co/query" - - -def fetch_daily_data(symbol: str) -> dict | None: - try: - resp = requests.get(BASE_URL, params={ - "function": "TIME_SERIES_DAILY", - "symbol": symbol.strip().upper(), - "outputsize": "compact", - "apikey": API_KEY, - }, timeout=30) - resp.raise_for_status() - data = resp.json() - if "Time Series (Daily)" not in data: - logger.warning(f"No daily data for {symbol}: {data}") - return None - return data - except Exception as e: - logger.error(f"Failed to fetch {symbol}: {e}") - return None - - -def generate_chart_url(symbol: str, dates: list[str], prices: list[float]) -> str: - chart_config = { - "type": "line", - "data": { - "labels": dates, - "datasets": [{ - "label": symbol, - "data": prices, - "fill": False, - "borderColor": "#4CAF50", - "tension": 0.1, - "pointRadius": 2, - }] - }, - "options": { - "plugins": { - "legend": {"display": False}, - "title": {"display": True, "text": f"{symbol} - Last 5 Days"} - }, - "scales": { - "y": {"beginAtZero": False} - } - } - } - chart_json = str(chart_config).replace("'", '"').replace("False", "false").replace("True", "true") - encoded = urllib.parse.quote(chart_json, safe='') - return f"https://quickchart.io/chart?c={encoded}&w=400&h=200&bkg=white" - - -def hedge_ambient(ctx: AmbientContext): - logger.info(f"Hedge running for tickers: {TICKERS}") - - for symbol in TICKERS: - symbol = symbol.strip().upper() - if not symbol: - continue - - data = fetch_daily_data(symbol) - if not data: - continue - - ts = data["Time Series (Daily)"] - sorted_dates = sorted(ts.keys(), reverse=True)[:5] - sorted_dates.reverse() - - dates = [d[5:] for d in sorted_dates] - prices = [float(ts[d]["4. close"]) for d in sorted_dates] - - today = sorted_dates[-1] - today_data = ts[today] - current_price = float(today_data["4. close"]) - open_price = float(today_data["1. open"]) - high = float(today_data["2. high"]) - low = float(today_data["3. low"]) - volume = int(today_data["5. volume"]) - - change = current_price - open_price - change_pct = (change / open_price) * 100 if open_price else 0 - arrow = "πŸ“ˆ" if change >= 0 else "πŸ“‰" - - chart_url = generate_chart_url(symbol, dates, prices) - - title = f"{arrow} {symbol}: ${current_price:.2f}" - body = f""" -{symbol} Stock Update - -Price: ${current_price:.2f} -Change: {'+' if change >= 0 else ''}{change:.2f} ({'+' if change_pct >= 0 else ''}{change_pct:.2f}%) - -Today's Range: ${low:.2f} - ${high:.2f} -Volume: {volume:,} -""".strip() - - logger.info(f"Posting {symbol} to feed: {title}") - ctx.bg.post_to_feed( - title=title, - body=body, - src_uri=f"https://finance.yahoo.com/quote/{symbol}", - media_uris=[chart_url], - content_timestamp=datetime.now() - ) - logger.info(f"Posted {symbol} to feed") - - -if __name__ == "__main__": - run_ambient(hedge_ambient) diff --git a/example-apps/ambient/hedge/icon.png b/example-apps/ambient/hedge/icon.png deleted file mode 100644 index 27bcddf..0000000 Binary files a/example-apps/ambient/hedge/icon.png and /dev/null differ diff --git a/example-apps/ambient/hedge/truffile.yaml b/example-apps/ambient/hedge/truffile.yaml deleted file mode 100644 index d338f06..0000000 --- a/example-apps/ambient/hedge/truffile.yaml +++ /dev/null @@ -1,27 +0,0 @@ -metadata: - name: Hedge - type: background - description: | - Track your stock portfolio with live price updates and charts in your feed. - Configure your tickers via the HEDGE_TICKERS environment variable. - process: - cmd: - - python - - app.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - HEDGE_TICKERS: "AAPL,MSFT,GOOGL" - icon_file: ./icon.png - default_schedule: - type: interval - interval: - duration: "2m" - schedule: - daily_window: "06:00-22:00" -files: - - source: ./app.py - destination: ./app.py -run: | - pip install --no-cache-dir requests - pip install --no-cache-dir -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ 'gourmet[ambient]==0.1.dev45' \ No newline at end of file diff --git a/example-apps/ambient/reddit/truffile.yaml b/example-apps/ambient/reddit/truffile.yaml deleted file mode 100644 index 3162af9..0000000 --- a/example-apps/ambient/reddit/truffile.yaml +++ /dev/null @@ -1,70 +0,0 @@ -metadata: - name: Front Page of the Internet - type: background - description: | - Have your TruffleΒΉ browse Reddit and post relevant content to your feed. - process: - cmd: - - python - - /opt/reddit.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - - icon_file: ./icon.png - default_schedule: - type: interval - interval: - duration: 15m - schedule: - daily_window: "01:00-22:30" -steps: - - name: Reddit Setup - type: welcome - content: | - The Reddit app will allow your Truffle to post relevant content from Reddit on your feed. - You can customize which subreddits to follow in the configuration step. - Please use this app in accordance with Reddit's terms of service. - - name: Install dependencies - type: bash - run: | - pip install --no-cache-dir --force-reinstall requests feedparser trafilatura==2.0.0 tld==0.13.1 - - name: Copy application files - type: files - files: - - source: ./reddit.py - destination: ./opt/reddit.py - - name: Configure Reddit - type: text - content: | - Please provide either a comma separated list of subreddits to follow, or leave blank to follow r/all. - Example: news, worldnews, technology, science, funny, pics, videos, gaming - Alternatively, [copy the URL of your personal frontpage RSS feed (JSON) from here](https://old.reddit.com/prefs/feeds/) - If a personal feed URL is provided, that will be used instead of the subreddit list if the URL is valid. - fields: - - name: subreddits - label: Comma Separated Subreddits - type: text - placeholder: news, worldnews, technology, LosAngeles - default: all - env: SUBREDDITS - - name: user_feed_url - label: Optional - Personal Frontpage RSS Feed URL (JSON) - type: text - placeholder: - default: - env: USER_FEED_URL - validator: - type: bash - run: | - python /opt/reddit.py --verify - timeout: 60 - error_message: | - Was unable to scrape Reddit with the provided configuration. - Please ensure the subreddit names are valid or the feed URL is correct. - - - - - - \ No newline at end of file diff --git a/example-apps/focus/finance/app.py b/example-apps/focus/finance/app.py deleted file mode 100644 index 8deff0e..0000000 --- a/example-apps/focus/finance/app.py +++ /dev/null @@ -1,157 +0,0 @@ -import requests -from mcp.server.fastmcp import FastMCP - -API_KEY = "nope" -BASE_URL = "https://www.alphavantage.co/query" - -HOST = "0.0.0.0" -PORT = 8000 - -mcp = FastMCP("finance", stateless_http=True, host=HOST, port=PORT) - - -def _call_av(function: str, **params) -> dict: - params["function"] = function - params["apikey"] = API_KEY - resp = requests.get(BASE_URL, params=params, timeout=30) - resp.raise_for_status() - return resp.json() - - -@mcp.tool("get_stock_price", description="Get current price and daily stats for a stock ticker (e.g. AAPL, MSFT, TSLA)") -async def get_stock_price(symbol: str) -> str: - data = _call_av("GLOBAL_QUOTE", symbol=symbol.upper()) - quote = data.get("Global Quote", {}) - if not quote: - return f"No data found for {symbol}" - return f""" -{quote.get('01. symbol', symbol)} -Price: ${quote.get('05. price', 'N/A')} -Change: {quote.get('09. change', 'N/A')} ({quote.get('10. change percent', 'N/A')}) -Open: ${quote.get('02. open', 'N/A')} -High: ${quote.get('03. high', 'N/A')} -Low: ${quote.get('04. low', 'N/A')} -Volume: {quote.get('06. volume', 'N/A')} -Previous Close: ${quote.get('08. previous close', 'N/A')} -""".strip() - - -@mcp.tool("get_stock_history", description="Get daily price history for a stock. Returns last 30 days by default.") -async def get_stock_history(symbol: str, days: int = 30) -> str: - data = _call_av("TIME_SERIES_DAILY", symbol=symbol.upper(), outputsize="compact") - ts = data.get("Time Series (Daily)", {}) - if not ts: - return f"No historical data for {symbol}" - lines = [f"{symbol.upper()} - Last {min(days, len(ts))} trading days:"] - for i, (date, vals) in enumerate(sorted(ts.items(), reverse=True)): - if i >= days: - break - lines.append(f"{date}: Open ${vals['1. open']} | High ${vals['2. high']} | Low ${vals['3. low']} | Close ${vals['4. close']} | Vol {vals['5. volume']}") - return "\n".join(lines) - - -@mcp.tool("get_company_overview", description="Get company profile, financials, and key metrics for a stock") -async def get_company_overview(symbol: str) -> str: - data = _call_av("OVERVIEW", symbol=symbol.upper()) - if not data or "Symbol" not in data: - return f"No company data for {symbol}" - return f""" -{data.get('Name', symbol)} ({data.get('Symbol', '')}) -Sector: {data.get('Sector', 'N/A')} | Industry: {data.get('Industry', 'N/A')} -Market Cap: ${data.get('MarketCapitalization', 'N/A')} -P/E Ratio: {data.get('PERatio', 'N/A')} | EPS: ${data.get('EPS', 'N/A')} -52-Week High: ${data.get('52WeekHigh', 'N/A')} | 52-Week Low: ${data.get('52WeekLow', 'N/A')} -Dividend Yield: {data.get('DividendYield', 'N/A')} -Description: {data.get('Description', 'N/A')[:500]}... -""".strip() - - -@mcp.tool("search_ticker", description="Search for stock ticker symbols by company name or keywords") -async def search_ticker(keywords: str) -> str: - data = _call_av("SYMBOL_SEARCH", keywords=keywords) - matches = data.get("bestMatches", []) - if not matches: - return f"No matches for '{keywords}'" - lines = [f"Search results for '{keywords}':"] - for m in matches[:10]: - lines.append(f" {m.get('1. symbol', '')} - {m.get('2. name', '')} ({m.get('4. region', '')})") - return "\n".join(lines) - - -@mcp.tool("get_market_news", description="Get latest market news and sentiment for a stock or topic") -async def get_market_news(tickers: str = "", topics: str = "", limit: int = 5) -> str: - params = {"limit": min(limit, 50)} - if tickers: - params["tickers"] = tickers.upper() - if topics: - params["topics"] = topics - data = _call_av("NEWS_SENTIMENT", **params) - feed = data.get("feed", []) - if not feed: - return "No news found" - lines = ["Latest Market News:"] - for article in feed[:limit]: - sentiment = article.get("overall_sentiment_label", "") - lines.append(f"[{sentiment}] {article.get('title', 'No title')}") - lines.append(f" Source: {article.get('source', 'Unknown')} | {article.get('time_published', '')[:10]}") - lines.append(f" {article.get('summary', '')[:200]}...") - lines.append("") - return "\n".join(lines) - - -@mcp.tool("get_top_movers", description="Get top gainers, losers, and most actively traded stocks today") -async def get_top_movers() -> str: - data = _call_av("TOP_GAINERS_LOSERS") - lines = [] - for category in ["top_gainers", "top_losers", "most_actively_traded"]: - items = data.get(category, [])[:5] - if items: - lines.append(f"\n{category.replace('_', ' ').title()}:") - for item in items: - lines.append(f" {item.get('ticker', '')} ${item.get('price', '')} ({item.get('change_percentage', '')})") - return "\n".join(lines).strip() or "No market data available" - - -@mcp.tool("get_crypto_price", description="Get current exchange rate for a cryptocurrency (e.g. BTC, ETH)") -async def get_crypto_price(crypto: str, currency: str = "USD") -> str: - data = _call_av("CURRENCY_EXCHANGE_RATE", from_currency=crypto.upper(), to_currency=currency.upper()) - rate = data.get("Realtime Currency Exchange Rate", {}) - if not rate: - return f"No data for {crypto}/{currency}" - return f""" -{rate.get('1. From_Currency Code', crypto)} β†’ {rate.get('3. To_Currency Code', currency)} -Rate: {rate.get('5. Exchange Rate', 'N/A')} -Bid: {rate.get('8. Bid Price', 'N/A')} | Ask: {rate.get('9. Ask Price', 'N/A')} -Last Updated: {rate.get('6. Last Refreshed', 'N/A')} -""".strip() - - -@mcp.tool("get_economic_indicator", description="Get economic indicators: GDP, CPI, UNEMPLOYMENT, INTEREST_RATE, INFLATION") -async def get_economic_indicator(indicator: str) -> str: - indicator = indicator.upper() - func_map = { - "GDP": "REAL_GDP", - "CPI": "CPI", - "UNEMPLOYMENT": "UNEMPLOYMENT", - "INTEREST_RATE": "FEDERAL_FUNDS_RATE", - "INFLATION": "INFLATION", - } - func = func_map.get(indicator, indicator) - data = _call_av(func) - vals = data.get("data", [])[:10] - if not vals: - return f"No data for {indicator}" - name = data.get("name", indicator) - lines = [f"{name}:"] - for v in vals: - lines.append(f" {v.get('date', '')}: {v.get('value', 'N/A')}") - return "\n".join(lines) - - -def main(): - print(f"Starting Finance MCP server on {HOST}:{PORT}") - mcp.run(transport="streamable-http") - - -if __name__ == "__main__": - main() diff --git a/example-apps/focus/finance/icon.png b/example-apps/focus/finance/icon.png deleted file mode 100644 index 90f6234..0000000 Binary files a/example-apps/focus/finance/icon.png and /dev/null differ diff --git a/example-apps/focus/finance/truffile.yaml b/example-apps/focus/finance/truffile.yaml deleted file mode 100644 index 75c1a1e..0000000 --- a/example-apps/focus/finance/truffile.yaml +++ /dev/null @@ -1,20 +0,0 @@ -metadata: - name: Finance - type: foreground - description: | - Financial data analyst tools for your Truffle. Get stock prices, company info, - market news, crypto rates, and economic indicators powered by Alpha Vantage. - process: - cmd: - - python - - app.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - icon_file: ./icon.png -files: - - source: ./app.py - destination: ./app.py -run: | - pip install --no-cache-dir mcp requests - diff --git a/example-apps/focus/research/icon.png b/example-apps/focus/research/icon.png deleted file mode 100644 index fbd6440..0000000 Binary files a/example-apps/focus/research/icon.png and /dev/null differ diff --git a/example-apps/focus/research/research.py b/example-apps/focus/research/research.py deleted file mode 100644 index 2dffc44..0000000 --- a/example-apps/focus/research/research.py +++ /dev/null @@ -1,87 +0,0 @@ -import requests -from ddgs import DDGS -from ddgs.results import TextResult, NewsResult -from typing import List -from mcp.server.fastmcp import FastMCP -from abrasive.extract import extract_content_from_url - -PERPLEXITY_KEY = "you thought" -class PerplexitySearcher: - def __init__(self, key : str = PERPLEXITY_KEY): - self.system_prompt = "You provide concise and accurate answers to queries, aim for recent information. This is presented to another LLM which will use it to help a user. Cite sources where possible and format your answer in markdown." - self.model = "sonar" - self.url = "https://api.perplexity.ai/chat/completions" - self.key = key - def run(self, query: str) -> str: - messages = [ - {"role": "system", "content": self.system_prompt}, - {"role": "user", "content": query} - ] - response = requests.post( - self.url, - json={ - "model": self.model, - "messages": messages - }, - headers={ - "accept": "application/json", - "Content-Type": "application/json", - "Authorization": f"Bearer {self.key}" - } - ) - return response.json()["choices"][0]["message"]["content"] - -HOST = "0.0.0.0" -PORT = 8000 - -mcp = FastMCP("research", stateless_http=True, host=HOST, port=PORT) - - -@mcp.tool("search_perplexity", description="Searches Perplexity AI for an answer to the given query.") -async def search_perplexity(query: str) -> str: - searcher = PerplexitySearcher() - result = searcher.run(query) - return result - -@mcp.tool("search_web") -async def search_web(query: str, num_results: int = 5) -> str: - - results : List[dict] = DDGS().text(query, max_results=num_results, region='us-en', safesearch='off') # type: ignore - formatted_results = "\n".join([f"{i+1}. {res['title']}: {res['href']}" for i, res in enumerate(results)]) - return formatted_results - -@mcp.tool("search_news") -async def search_news(query: str, num_results: int = 5) -> str: - results : List[dict] = DDGS().news(query, max_results=num_results, region='us-en', safesearch='off') # type: ignore - formatted_results = "\n".join([f"{i+1}. [{res['source']}] '{res['title']}': {res['body']} <{res['url']}>" for i, res in enumerate(results)]) - return formatted_results - -@mcp.tool("fetch_url_content", description="Fetches and extracts the main content from a given URL. Will return text and any images found in markdown format. Some sites may block scraping.") -async def fetch_url_content(url: str) -> str: - try: - content = extract_content_from_url(url.strip()) - if content is None: - return "Error: Failed to extract content from the URL. Was it valid? This site may block scraping." - - content_str = f"<{url}>\n\n{content.text}\n" - if content.images: - content_str += "\n\nImages:\n" + "\n".join(content.images) - if content.source_name: - content_str = f"Source: {content.source_name}\n\n" + content_str - return content_str - except Exception as e: - return f"Error: Exception occurred while fetching URL: {str(e)}" - -def main(): - print(f"Starting MCP server on {HOST}:{PORT}") - mcp.run(transport="streamable-http") - - - - - -if __name__ == "__main__": - main() - - - diff --git a/example-apps/focus/research/truffile.yaml b/example-apps/focus/research/truffile.yaml deleted file mode 100644 index 97da18c..0000000 --- a/example-apps/focus/research/truffile.yaml +++ /dev/null @@ -1,20 +0,0 @@ -metadata: - name: Research - type: foreground - description: | - Tools to help your TruffleΒΉ research and gather information from the web. - process: - cmd: - - python - - research.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - icon_file: ./icon.png - -files: - - source: ./research.py - destination: ./research.py - -run: | - pip install --no-cache-dir mcp requests ddgs diff --git a/example-apps/kalshi/bg_worker.py b/example-apps/kalshi/bg_worker.py new file mode 100644 index 0000000..517d982 --- /dev/null +++ b/example-apps/kalshi/bg_worker.py @@ -0,0 +1,352 @@ +from __future__ import annotations + +import logging +from dataclasses import dataclass, field +from datetime import UTC, datetime +from typing import Any + +import httpx + +from client import KalshiClient +from config import ( + CATEGORY_KEYWORDS, + DEFAULT_WATCHED_TICKERS, + KALSHI_API_KEY, + KALSHI_BASE_URL, + KALSHI_CATEGORIES, + KALSHI_FEED_URL, + KALSHI_PRIVATE_KEY, + normalize_private_key, +) + +logger = logging.getLogger("kalshi.bg_worker") + +PRICE_CHANGE_THRESHOLD = 10 +FEED_ITEMS_PER_CYCLE = 3 + + +@dataclass +class BackgroundDigest: + generated_at: str + portfolio_summary: str = "" + price_alerts: list[dict[str, Any]] = field(default_factory=list) + settlement_alerts: list[dict[str, Any]] = field(default_factory=list) + order_updates: list[dict[str, Any]] = field(default_factory=list) + feed_items: list[dict[str, Any]] = field(default_factory=list) + error: str = "" + + +class KalshiBackgroundWorker: + def __init__(self) -> None: + if not KALSHI_API_KEY or not KALSHI_PRIVATE_KEY: + raise ValueError("Missing KALSHI_API_KEY or KALSHI_PRIVATE_KEY") + + self.client = KalshiClient( + api_key=KALSHI_API_KEY, + private_key_pem=normalize_private_key(KALSHI_PRIVATE_KEY), + base_url=KALSHI_BASE_URL, + ) + + self._last_prices: dict[str, int] = {} + self._last_order_ids: set[str] = set() + self._settled_tickers: set[str] = set() + self._watched_tickers: set[str] = set(DEFAULT_WATCHED_TICKERS) + self._is_seeded = False + self._categories: set[str] = KALSHI_CATEGORIES + self._seen_feed_events: set[str] = set() + self._feed_url_tickers: list[str] = self._parse_feed_url(KALSHI_FEED_URL) + + async def close(self) -> None: + try: + await self.client.close() + except Exception: + pass + + async def verify(self) -> tuple[bool, str]: + try: + data = await self.client.get_balance() + balance = int(data.get("balance", 0)) + return True, f"Kalshi auth OK, balance: {balance}c" + except httpx.HTTPStatusError as error: + return False, f"Kalshi API error: {error.response.status_code}" + except Exception as error: + return False, f"Kalshi verification failed: {error}" + + async def run_cycle(self) -> BackgroundDigest: + generated_at = datetime.now(UTC).replace(microsecond=0).isoformat() + + try: + balance_data = await self.client.get_balance() + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + return BackgroundDigest(generated_at=generated_at, error="auth_failure") + return BackgroundDigest(generated_at=generated_at, error=str(error)) + except Exception as error: + return BackgroundDigest(generated_at=generated_at, error=str(error)) + + balance = int(balance_data.get("balance", 0)) + portfolio_value = int(balance_data.get("portfolio_value", 0)) + + try: + positions_data = await self.client.get_positions(limit=100) + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + return BackgroundDigest(generated_at=generated_at, error="auth_failure") + return BackgroundDigest(generated_at=generated_at, error=str(error)) + except Exception as error: + return BackgroundDigest(generated_at=generated_at, error=str(error)) + + positions = positions_data.get("market_positions", []) + active_tickers: set[str] = set() + for position in positions: + count = int(position.get("position", 0)) + if count != 0: + ticker = (position.get("ticker") or "").strip().upper() + if ticker: + active_tickers.add(ticker) + + all_watched = self._watched_tickers | active_tickers + + try: + price_alerts = await self._check_price_changes(all_watched) + settlement_alerts = await self._check_settlements(active_tickers) + order_updates = await self._check_order_changes() + feed_items = await self._fetch_feed_items() + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + return BackgroundDigest(generated_at=generated_at, error="auth_failure") + return BackgroundDigest(generated_at=generated_at, error=str(error)) + except Exception as error: + return BackgroundDigest(generated_at=generated_at, error=str(error)) + + has_activity = balance > 0 or portfolio_value > 0 or active_tickers or all_watched + portfolio_summary = ( + f"Portfolio: ${balance/100:.2f} cash, ${portfolio_value/100:.2f} value. " + f"{len(active_tickers)} open positions. " + f"Watching {len(all_watched)} markets." + ) if has_activity else "" + + if not self._is_seeded: + self._is_seeded = True + return BackgroundDigest( + generated_at=generated_at, + portfolio_summary=portfolio_summary, + feed_items=feed_items, + ) + + return BackgroundDigest( + generated_at=generated_at, + portfolio_summary=portfolio_summary, + price_alerts=price_alerts, + settlement_alerts=settlement_alerts, + order_updates=order_updates, + feed_items=feed_items, + ) + + async def _check_price_changes(self, tickers: set[str]) -> list[dict[str, Any]]: + alerts: list[dict[str, Any]] = [] + for ticker in tickers: + if not ticker: + continue + try: + data = await self.client.get_market(ticker) + market = data.get("market", {}) + yes_bid = int(market.get("yes_bid") or 0) + title = market.get("title", ticker) + + previous = self._last_prices.get(ticker) + self._last_prices[ticker] = yes_bid + + if previous is not None: + diff = yes_bid - previous + if abs(diff) >= PRICE_CHANGE_THRESHOLD: + alerts.append( + { + "ticker": ticker, + "title": title, + "previous_price": previous, + "current_price": yes_bid, + "change": diff, + "direction": "up" if diff > 0 else "down", + } + ) + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + raise + logger.warning("Failed to check price for %s", ticker, exc_info=True) + except Exception: + logger.warning("Failed to check price for %s", ticker, exc_info=True) + return alerts + + async def _check_settlements(self, active_tickers: set[str]) -> list[dict[str, Any]]: + alerts: list[dict[str, Any]] = [] + try: + data = await self.client.get_settlements(limit=20) + for settlement in data.get("settlements", []): + ticker = (settlement.get("ticker") or "").strip().upper() + # Prefer active position settlements, but allow all as fallback context. + if active_tickers and ticker and ticker not in active_tickers: + continue + key = f"{ticker}:{settlement.get('settled_time', settlement.get('settled_at', ''))}" + if key in self._settled_tickers: + continue + self._settled_tickers.add(key) + + revenue = int(settlement.get("revenue") or 0) + alerts.append( + { + "ticker": ticker, + "revenue_cents": revenue, + "revenue_dollars": f"{revenue/100:.2f}", + "result": "profit" if revenue > 0 else ("loss" if revenue < 0 else "break-even"), + } + ) + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + raise + logger.warning("Failed to check settlements", exc_info=True) + except Exception: + logger.warning("Failed to check settlements", exc_info=True) + return alerts + + async def _check_order_changes(self) -> list[dict[str, Any]]: + alerts: list[dict[str, Any]] = [] + try: + data = await self.client.get_orders(status="resting", limit=100) + current_ids = { + order.get("order_id", "") + for order in data.get("orders", []) + if order.get("order_id") + } + + if self._last_order_ids: + filled_or_canceled = self._last_order_ids - current_ids + new_orders = current_ids - self._last_order_ids + + for order_id in filled_or_canceled: + alerts.append({"order_id": order_id, "change": "filled_or_canceled"}) + for order_id in new_orders: + alerts.append({"order_id": order_id, "change": "new_resting"}) + + self._last_order_ids = current_ids + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + raise + logger.warning("Failed to check order changes", exc_info=True) + except Exception: + logger.warning("Failed to check order changes", exc_info=True) + return alerts + + async def _fetch_feed_items(self) -> list[dict[str, Any]]: + items: list[dict[str, Any]] = [] + try: + data = await self.client.get_events( + status="open", with_nested_markets=True, limit=30, + ) + events = data.get("events", []) + + candidates: list[dict[str, Any]] = [] + for event in events: + event_ticker = (event.get("event_ticker") or "").strip() + if not event_ticker or event_ticker in self._seen_feed_events: + continue + + title = event.get("title", "") + markets = event.get("markets") or [] + total_volume = sum(int(m.get("volume") or 0) for m in markets) + + matched = self._match_categories(title) + score = total_volume + (1_000_000 if matched else 0) + tags = sorted(matched | {"trending"}) + + candidates.append({ + "event_ticker": event_ticker, + "title": title, + "categories": tags, + "total_volume": total_volume, + "market_count": len(markets), + "top_markets": self._format_top_markets(markets), + "_score": score, + }) + + candidates.sort(key=lambda e: e["_score"], reverse=True) + + for c in candidates[:FEED_ITEMS_PER_CYCLE]: + del c["_score"] + self._seen_feed_events.add(c["event_ticker"]) + items.append(c) + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + raise + logger.warning("Failed to fetch feed items", exc_info=True) + except Exception: + logger.warning("Failed to fetch feed items", exc_info=True) + + for ticker in self._feed_url_tickers: + if ticker in self._seen_feed_events: + continue + try: + data = await self.client.get_event(ticker, with_nested_markets=True) + event = data.get("event") + if event: + markets = data.get("markets") or [] + self._seen_feed_events.add(ticker) + items.append({ + "event_ticker": ticker, + "title": event.get("title", ticker), + "categories": ["followed"], + "total_volume": sum(int(m.get("volume") or 0) for m in markets), + "market_count": len(markets), + "top_markets": self._format_top_markets(markets), + }) + except httpx.HTTPStatusError as error: + if error.response.status_code in {401, 403}: + raise + logger.debug("URL ticker %s not found as event", ticker) + except Exception: + logger.debug("URL ticker %s not found as event", ticker) + + if len(self._seen_feed_events) > 500: + self._seen_feed_events.clear() + + return items + + def _match_categories(self, text: str) -> set[str]: + matched: set[str] = set() + text_lower = text.lower() + for category in self._categories: + if category == "trending": + continue + keywords = CATEGORY_KEYWORDS.get(category, []) + for keyword in keywords: + if keyword in text_lower: + matched.add(category) + break + return matched + + @staticmethod + def _format_top_markets(markets: list[dict[str, Any]]) -> list[dict[str, Any]]: + sorted_markets = sorted( + markets, key=lambda m: int(m.get("volume") or 0), reverse=True, + ) + return [ + { + "ticker": m.get("ticker", ""), + "title": m.get("title", ""), + "yes_bid": m.get("yes_bid"), + "volume": m.get("volume", 0), + } + for m in sorted_markets[:3] + ] + + @staticmethod + def _parse_feed_url(url: str) -> list[str]: + if not url: + return [] + from urllib.parse import urlparse + + path = urlparse(url).path.strip("/") + parts = [p for p in path.split("/") if p] + if parts: + return [parts[-1].upper()] + return [] diff --git a/example-apps/kalshi/client.py b/example-apps/kalshi/client.py new file mode 100755 index 0000000..48afbca --- /dev/null +++ b/example-apps/kalshi/client.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python3 +"""Async Kalshi API client using API key + RSA-PSS authentication.""" + +from __future__ import annotations + +import base64 +import time +from typing import Any, Dict, Optional +from urllib.parse import urlparse + +import httpx +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding + + +class KalshiClient: + """Minimal async client for Kalshi REST endpoints used by the MCP tools.""" + + def __init__( + self, + api_key: str, + private_key_pem: str, + base_url: str, + timeout: float = 30.0, + ) -> None: + self._api_key = api_key + self._base_url = base_url.rstrip("/") + self._private_key = serialization.load_pem_private_key( + private_key_pem.encode("utf-8"), + password=None, + ) + self._http = httpx.AsyncClient(timeout=timeout) + + def _build_auth_headers(self, method: str, path: str) -> Dict[str, str]: + timestamp = str(int(time.time() * 1000)) + message = f"{timestamp}{method.upper()}{path}".encode("utf-8") + + signature = self._private_key.sign( + message, + padding.PSS( + mgf=padding.MGF1(hashes.SHA256()), + salt_length=hashes.SHA256().digest_size, + ), + hashes.SHA256(), + ) + signature_b64 = base64.b64encode(signature).decode("utf-8") + + return { + "KALSHI-ACCESS-KEY": self._api_key, + "KALSHI-ACCESS-SIGNATURE": signature_b64, + "KALSHI-ACCESS-TIMESTAMP": timestamp, + } + + async def _request( + self, + method: str, + path: str, + *, + params: Optional[Dict[str, Any]] = None, + json_body: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + url = f"{self._base_url}{path}" + parsed = urlparse(url) + headers = self._build_auth_headers(method, parsed.path) + clean_params = ( + {k: v for k, v in params.items() if v is not None} + if params is not None + else None + ) + clean_json = ( + {k: v for k, v in json_body.items() if v is not None} + if json_body is not None + else None + ) + + response = await self._http.request( + method=method.upper(), + url=url, + params=clean_params, + json=clean_json, + headers=headers, + ) + response.raise_for_status() + return response.json() + + async def close(self) -> None: + try: + await self._http.aclose() + except Exception: + pass + + async def get_markets( + self, + *, + limit: Optional[int] = None, + cursor: Optional[str] = None, + event_ticker: Optional[str] = None, + series_ticker: Optional[str] = None, + status: Optional[str] = None, + tickers: Optional[str] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/markets", + params={ + "limit": limit, + "cursor": cursor, + "event_ticker": event_ticker, + "series_ticker": series_ticker, + "status": status, + "tickers": tickers, + }, + ) + + async def get_market(self, ticker: str) -> Dict[str, Any]: + return await self._request("GET", f"/markets/{ticker}") + + async def get_market_orderbook( + self, + ticker: str, + *, + depth: Optional[int] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + f"/markets/{ticker}/orderbook", + params={"depth": depth}, + ) + + async def get_trades( + self, + *, + limit: Optional[int] = None, + cursor: Optional[str] = None, + ticker: Optional[str] = None, + min_ts: Optional[int] = None, + max_ts: Optional[int] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/markets/trades", + params={ + "limit": limit, + "cursor": cursor, + "ticker": ticker, + "min_ts": min_ts, + "max_ts": max_ts, + }, + ) + + async def get_events( + self, + *, + limit: Optional[int] = None, + cursor: Optional[str] = None, + with_nested_markets: Optional[bool] = None, + status: Optional[str] = None, + series_ticker: Optional[str] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/events", + params={ + "limit": limit, + "cursor": cursor, + "with_nested_markets": with_nested_markets, + "status": status, + "series_ticker": series_ticker, + }, + ) + + async def get_event( + self, + event_ticker: str, + *, + with_nested_markets: Optional[bool] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + f"/events/{event_ticker}", + params={"with_nested_markets": with_nested_markets}, + ) + + async def get_balance(self) -> Dict[str, Any]: + return await self._request("GET", "/portfolio/balance") + + async def get_positions( + self, + *, + cursor: Optional[str] = None, + limit: Optional[int] = None, + count_filter: Optional[str] = None, + ticker: Optional[str] = None, + event_ticker: Optional[str] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/portfolio/positions", + params={ + "cursor": cursor, + "limit": limit, + "count_filter": count_filter, + "ticker": ticker, + "event_ticker": event_ticker, + }, + ) + + async def get_orders( + self, + *, + ticker: Optional[str] = None, + event_ticker: Optional[str] = None, + min_ts: Optional[int] = None, + max_ts: Optional[int] = None, + status: Optional[str] = None, + limit: Optional[int] = None, + cursor: Optional[str] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/portfolio/orders", + params={ + "ticker": ticker, + "event_ticker": event_ticker, + "min_ts": min_ts, + "max_ts": max_ts, + "status": status, + "limit": limit, + "cursor": cursor, + }, + ) + + async def create_order(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return await self._request("POST", "/portfolio/orders", json_body=payload) + + async def cancel_order(self, order_id: str) -> Dict[str, Any]: + return await self._request("DELETE", f"/portfolio/orders/{order_id}") + + async def batch_cancel_orders(self, order_ids: list[str]) -> Dict[str, Any]: + return await self._request( + "DELETE", + "/portfolio/orders/batched", + json_body={"ids": order_ids}, + ) + + async def get_fills( + self, + *, + ticker: Optional[str] = None, + order_id: Optional[str] = None, + min_ts: Optional[int] = None, + max_ts: Optional[int] = None, + limit: Optional[int] = None, + cursor: Optional[str] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/portfolio/fills", + params={ + "ticker": ticker, + "order_id": order_id, + "min_ts": min_ts, + "max_ts": max_ts, + "limit": limit, + "cursor": cursor, + }, + ) + + async def get_settlements( + self, + *, + limit: Optional[int] = None, + cursor: Optional[str] = None, + ticker: Optional[str] = None, + event_ticker: Optional[str] = None, + min_ts: Optional[int] = None, + max_ts: Optional[int] = None, + ) -> Dict[str, Any]: + return await self._request( + "GET", + "/portfolio/settlements", + params={ + "limit": limit, + "cursor": cursor, + "ticker": ticker, + "event_ticker": event_ticker, + "min_ts": min_ts, + "max_ts": max_ts, + }, + ) diff --git a/example-apps/kalshi/config.py b/example-apps/kalshi/config.py new file mode 100755 index 0000000..92bf0f2 --- /dev/null +++ b/example-apps/kalshi/config.py @@ -0,0 +1,83 @@ +"""Configuration for the Kalshi Truffle app.""" + +from __future__ import annotations + +import os + +KALSHI_API_KEY: str = os.getenv("KALSHI_API_KEY", "") +KALSHI_PRIVATE_KEY: str = os.getenv("KALSHI_PRIVATE_KEY", "") +KALSHI_BASE_URL: str = os.getenv( + "KALSHI_BASE_PATH", + "https://api.elections.kalshi.com/trade-api/v2", +) + +DEFAULT_WATCHED_TICKERS: list[str] = [] + +KALSHI_CATEGORIES_RAW: str = os.getenv("KALSHI_CATEGORIES", "") +KALSHI_FEED_URL: str = os.getenv("KALSHI_FEED_URL", "").strip() + +CATEGORY_KEYWORDS: dict[str, list[str]] = { + "politics": [ + "president", "election", "nominee", "senate", "house", "governor", + "congress", "democrat", "republican", "vote", "party", "cabinet", + "trump", "biden", "vance", + ], + "sports": [ + "nfl", "nba", "mlb", "nhl", "super bowl", "world series", + "championship", "ufc", "boxing", "playoffs", "soccer", "fifa", + ], + "culture": [ + "oscar", "grammy", "emmy", "movie", "film", "music", + "celebrity", "award", "entertainment", "streaming", + ], + "crypto": [ + "bitcoin", "btc", "ethereum", "eth", "crypto", "solana", + "dogecoin", "blockchain", + ], + "climate": [ + "temperature", "weather", "hurricane", "climate", "wildfire", + "flood", "drought", "storm", "tornado", + ], + "economics": [ + "gdp", "inflation", "cpi", "fed", "interest rate", "unemployment", + "recession", "gas price", "oil", "spending", "treasury", + ], + "mentions": [ + "mention", "say", "speech", "briefing", "address", + "state of the union", + ], + "companies": [ + "company", "stock", "ipo", "acquisition", "merger", "earnings", + "tesla", "apple", "google", "amazon", "meta", + ], + "financials": [ + "s&p", "dow", "nasdaq", "index", "bond", "yield", "forex", + "close price", + ], + "tech & science": [ + "ai", "technology", "science", "space", "nasa", "launch", + "starship", "openai", "quantum", + ], +} + + +def parse_categories(raw: str) -> set[str]: + categories: set[str] = {"trending"} + if not raw.strip(): + return categories + for cat in raw.split(","): + cleaned = cat.strip().lower() + if cleaned in CATEGORY_KEYWORDS: + categories.add(cleaned) + return categories + + +KALSHI_CATEGORIES: set[str] = parse_categories(KALSHI_CATEGORIES_RAW) + + +def normalize_private_key(raw: str) -> str: + """Normalize a pasted PEM key from env text fields.""" + key = (raw or "").strip() + if "\\n" in key: + key = key.replace("\\n", "\n") + return key diff --git a/example-apps/kalshi/icon.png b/example-apps/kalshi/icon.png new file mode 100644 index 0000000..7cf1710 Binary files /dev/null and b/example-apps/kalshi/icon.png differ diff --git a/example-apps/kalshi/kalshi_background.py b/example-apps/kalshi/kalshi_background.py new file mode 100644 index 0000000..9128996 --- /dev/null +++ b/example-apps/kalshi/kalshi_background.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +import asyncio +import atexit +import logging +import sys + +import httpx + +from app_runtime.background import BackgroundRunContext, run_background +from truffle.app.background_pb2 import BackgroundContext + +from bg_worker import KalshiBackgroundWorker + +logger = logging.getLogger("kalshi.background") +logger.setLevel(logging.INFO) + +_worker: KalshiBackgroundWorker | None = None +_loop: asyncio.AbstractEventLoop | None = None + + +def _is_verify_mode() -> bool: + return bool(sys.argv and len(sys.argv) > 1 and "verify" in sys.argv[1].lower()) + + +def _run(coro): + global _loop + if _loop is None or _loop.is_closed(): + _loop = asyncio.new_event_loop() + return _loop.run_until_complete(coro) + + +def _ensure_worker() -> KalshiBackgroundWorker: + global _worker + if _worker is None: + _worker = KalshiBackgroundWorker() + return _worker + + +async def _report_auth_failure(description: str) -> None: + from app_runtime import AppRuntimeErrorType, report_app_error + + await report_app_error( + error_message=f"Kalshi authentication failure: {description}", + error_type=AppRuntimeErrorType.APP_ERROR_AUTH, + needs_intervention=True, + is_fatal=False, + ) + + +def _submit(ctx: BackgroundRunContext, content: str, priority: int) -> None: + ctx.bg.submit_context(content=content, uris=[], priority=priority) + + +def kalshi_ambient(ctx: BackgroundRunContext) -> None: + worker = _ensure_worker() + + try: + result = _run(worker.run_cycle()) + except httpx.HTTPStatusError as error: + logger.exception("Kalshi API error in background cycle") + if error.response.status_code in {401, 403}: + try: + _run(_report_auth_failure(f"API returned {error.response.status_code}")) + except Exception: + logger.exception("Failed to report auth failure") + return + except Exception: + logger.exception("Kalshi background cycle crashed") + return + + if result.error: + logger.error("Kalshi background cycle failed", extra={"error": result.error}) + if result.error == "auth_failure": + try: + _run(_report_auth_failure("API returned 401/403")) + except Exception: + logger.exception("Failed to report auth failure") + return + + if result.portfolio_summary: + _submit(ctx, result.portfolio_summary, BackgroundContext.PRIORITY_LOW) + + for alert in result.price_alerts: + content = ( + f"Price alert: {alert['title']} ({alert['ticker']}) moved " + f"{alert['direction']} {abs(alert['change'])}c " + f"(was {alert['previous_price']}c, now {alert['current_price']}c)" + ) + _submit(ctx, content, BackgroundContext.PRIORITY_HIGH) + + for alert in result.settlement_alerts: + content = ( + f"Market settled: {alert['ticker']} β€” {alert['result']} " + f"(${alert['revenue_dollars']})" + ) + _submit(ctx, content, BackgroundContext.PRIORITY_HIGH) + + for update in result.order_updates: + content = f"Order {update['order_id']}: {update['change']}" + _submit(ctx, content, BackgroundContext.PRIORITY_DEFAULT) + + for item in result.feed_items: + top = item.get("top_markets", []) + top_str = ", ".join( + f"{m['title']} ({m.get('yes_bid', '?')}Β’)" for m in top[:2] + ) + content = ( + f"Kalshi [{', '.join(item.get('categories', []))}]: " + f"{item['title']} β€” " + f"{item['total_volume']:,} vol, " + f"{item['market_count']} markets" + ) + if top_str: + content += f" β€” {top_str}" + _submit(ctx, content, BackgroundContext.PRIORITY_LOW) + + +def verify() -> int: + worker = _ensure_worker() + ok, message = _run(worker.verify()) + if ok: + logger.info(message) + return 0 + logger.error(message) + return 1 + + +def _cleanup() -> None: + global _worker + global _loop + + if _worker is not None: + try: + _run(_worker.close()) + except Exception: + logger.exception("Failed to close Kalshi background worker") + finally: + _worker = None + + if _loop is not None and not _loop.is_closed(): + _loop.close() + _loop = None + + +if __name__ == "__main__": + atexit.register(_cleanup) + if _is_verify_mode(): + sys.exit(verify()) + run_background(kalshi_ambient) diff --git a/example-apps/kalshi/kalshi_foreground.py b/example-apps/kalshi/kalshi_foreground.py new file mode 100755 index 0000000..2e52681 --- /dev/null +++ b/example-apps/kalshi/kalshi_foreground.py @@ -0,0 +1,903 @@ +#!/usr/bin/env python3 +"""Kalshi MCP server (streamable HTTP) for foreground Truffle app.""" + +from __future__ import annotations + +import atexit +import asyncio +import logging +from typing import Any + +import httpx +from app_runtime.mcp import create_mcp_server, run_mcp_server + +from client import KalshiClient +from config import ( + KALSHI_API_KEY, + KALSHI_BASE_URL, + KALSHI_PRIVATE_KEY, + normalize_private_key, +) + +logger = logging.getLogger("kalshi.foreground") +logger.setLevel(logging.INFO) + +mcp = create_mcp_server("kalshi") + +_api: KalshiClient | None = None +_watched_tickers: set[str] = set() + + +def _error(message: str, **extra: Any) -> dict[str, Any]: + payload: dict[str, Any] = {"status": "error", "message": message} + payload.update(extra) + return payload + + +def _success(message: str, **extra: Any) -> dict[str, Any]: + payload: dict[str, Any] = {"status": "success", "message": message} + payload.update(extra) + return payload + + +async def _report_auth_failure(description: str) -> None: + from app_runtime import AppRuntimeErrorType, report_app_error + + await report_app_error( + error_message=f"Kalshi authentication failure: {description}", + error_type=AppRuntimeErrorType.APP_ERROR_AUTH, + needs_intervention=True, + is_fatal=False, + ) + + +def _validate_range(name: str, value: int | None, min_value: int, max_value: int) -> None: + if value is None: + return + if value < min_value or value > max_value: + raise ValueError(f"{name} must be between {min_value} and {max_value}") + + +def _get_api() -> KalshiClient: + global _api + if _api is not None: + return _api + + if not KALSHI_API_KEY: + raise ValueError("Missing KALSHI_API_KEY") + if not KALSHI_PRIVATE_KEY: + raise ValueError("Missing KALSHI_PRIVATE_KEY") + + _api = KalshiClient( + api_key=KALSHI_API_KEY, + private_key_pem=normalize_private_key(KALSHI_PRIVATE_KEY), + base_url=KALSHI_BASE_URL, + ) + return _api + + +async def _handle_api_error(error: httpx.HTTPStatusError) -> dict[str, Any]: + status = error.response.status_code + if status in {401, 403}: + try: + await _report_auth_failure(f"Kalshi API returned {status}") + except Exception: + logger.exception("Failed to report Kalshi auth failure") + response_text = "" + try: + response_text = error.response.text + except Exception: + response_text = "" + return _error( + f"Kalshi API error: {status}", + response=response_text[:1500], + ) + + +async def _validate_order( + *, + ticker: str, + side: str, + action: str, + count: int, + price: int | None, +) -> dict[str, Any]: + api = _get_api() + errors: list[str] = [] + warnings: list[str] = [] + estimated_cost = 0 + current_balance = 0 + + market_resp = await api.get_market(ticker) + market = market_resp.get("market") or {} + market_status = str(market.get("status", "unknown")).lower() + # Kalshi market data can surface tradable markets as "active". + # Keep strict blocks for clearly non-tradable statuses, and let create_order + # API decide for ambiguous/unknown status values. + if market_status in {"closed", "settled", "paused", "unopened"}: + errors.append(f"Market {ticker} is {market.get('status')}, not open for trading") + elif market_status not in {"open", "active"}: + warnings.append( + f"Market {ticker} has unrecognized status '{market.get('status')}'. " + "Proceeding to API submission for final tradability check." + ) + + if side == "yes": + current_price = market.get("yes_ask") if action == "buy" else market.get("yes_bid") + else: + current_price = market.get("no_ask") if action == "buy" else market.get("no_bid") + + if price is not None and current_price is not None: + diff = abs(int(price) - int(current_price)) + if diff > 20: + warnings.append( + f"Provided price ({price}c) is {diff}c away from market ({current_price}c)" + ) + + balance_resp = await api.get_balance() + current_balance = int(balance_resp.get("balance") or 0) + + if action == "buy": + effective_price = int(price if price is not None else (current_price or 50)) + estimated_cost = count * effective_price + if estimated_cost > current_balance: + errors.append( + f"Insufficient balance: need {estimated_cost}c, have {current_balance}c" + ) + + if count <= 0: + errors.append("Order quantity must be positive") + if count > 1000: + warnings.append("Large order size may have poor execution") + + if price is not None and (price < 1 or price > 99): + errors.append("Price must be between 1 and 99 cents") + + return { + "valid": len(errors) == 0, + "errors": errors, + "warnings": warnings, + "estimated_cost": estimated_cost, + "current_balance": current_balance, + "market_status": market.get("status"), + "market_open_time": market.get("open_time"), + "market_close_time": market.get("close_time"), + } + + +@mcp.tool( + "get_markets", + description=( + "Search and list Kalshi prediction markets. " + "Parameters: limit (int, max 1000), cursor (str, pagination), event_ticker (str, filter by event), " + "series_ticker (str, filter by series), status (str: unopened|open|paused|closed|settled), " + "tickers (str, comma-separated). " + "Returns: JSON with fields: markets, count, cursor. " + "IMPORTANT: use cursor for pagination over large result sets. " + "Example: get_markets(status='open', limit=20)." + ), +) +async def get_markets( + limit: int | None = None, + cursor: str | None = None, + event_ticker: str | None = None, + series_ticker: str | None = None, + status: str | None = None, + tickers: str | None = None, +) -> dict[str, Any]: + """List and search Kalshi prediction markets.""" + try: + _validate_range("limit", limit, 1, 1000) + if status and status not in {"unopened", "open", "paused", "closed", "settled"}: + return _error("status must be one of unopened|open|paused|closed|settled") + + api = _get_api() + data = await api.get_markets( + limit=limit, + cursor=cursor, + event_ticker=event_ticker, + series_ticker=series_ticker, + status=status, + tickers=tickers, + ) + markets = data.get("markets", []) + return _success("Markets fetched", markets=markets, count=len(markets), cursor=data.get("cursor")) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_market", + description=( + "Get full details for a single market by ticker. " + "Parameters: ticker (str, required). " + "Returns: JSON with fields: market (ticker, title, yes_bid, yes_ask, no_bid, no_ask, volume, open_interest, status). " + "IMPORTANT: pass exact market ticker. " + "Example: get_market(ticker='PRES-2028-DEM')." + ), +) +async def get_market(ticker: str) -> dict[str, Any]: + """Get details for a specific market ticker.""" + try: + api = _get_api() + data = await api.get_market(ticker) + market = data.get("market") + if not market: + return _error(f"Market not found: {ticker}") + return _success("Market fetched", market=market) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_orderbook", + description=( + "Get the orderbook (bid/ask levels) for a market. " + "Parameters: ticker (str, required), depth (int, default None, max 100). " + "Returns: JSON with fields: yes_bids, no_bids, summary. " + "IMPORTANT: use small depth for faster responses. " + "Example: get_orderbook(ticker='PRES-2028-DEM', depth=10)." + ), +) +async def get_orderbook(ticker: str, depth: int | None = None) -> dict[str, Any]: + """Get orderbook levels for a market.""" + try: + _validate_range("depth", depth, 1, 100) + api = _get_api() + data = await api.get_market_orderbook(ticker, depth=depth) + orderbook = data.get("orderbook") + if not orderbook: + return _error(f"Orderbook not found: {ticker}") + yes_bids = orderbook.get("yes_dollars") or [] + no_bids = orderbook.get("no_dollars") or [] + return _success( + "Orderbook fetched", + ticker=ticker, + yes_bids=[{"price_dollars": x[0], "quantity": x[1]} for x in yes_bids], + no_bids=[{"price_dollars": x[0], "quantity": x[1]} for x in no_bids], + summary={ + "yes_levels": len(yes_bids), + "no_levels": len(no_bids), + "best_yes_bid": yes_bids[-1][0] if yes_bids else None, + "best_no_bid": no_bids[-1][0] if no_bids else None, + }, + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_trades", + description=( + "Get recent public trades for a market or globally. " + "Parameters: ticker (str), limit (int, max 1000), cursor (str), min_ts (int), max_ts (int). " + "Returns: JSON with fields: trades, count, cursor. " + "IMPORTANT: combine ticker + time filters for focused analysis. " + "Example: get_trades(ticker='PRES-2028-DEM', limit=50)." + ), +) +async def get_trades( + ticker: str | None = None, + limit: int | None = None, + cursor: str | None = None, + min_ts: int | None = None, + max_ts: int | None = None, +) -> dict[str, Any]: + """Get recent trades across markets or for a specific ticker.""" + try: + _validate_range("limit", limit, 1, 1000) + api = _get_api() + data = await api.get_trades( + ticker=ticker, + limit=limit, + cursor=cursor, + min_ts=min_ts, + max_ts=max_ts, + ) + trades = data.get("trades", []) + return _success("Trades fetched", trades=trades, count=len(trades), cursor=data.get("cursor")) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_events", + description=( + "List Kalshi events (each event can contain multiple markets). " + "Parameters: limit (int, max 200), cursor (str), status (str: open|closed|settled), " + "series_ticker (str), with_nested_markets (bool). " + "Returns: JSON with fields: events, count, cursor. " + "IMPORTANT: set with_nested_markets=true to include markets inline. " + "Example: get_events(status='open', with_nested_markets=True)." + ), +) +async def get_events( + limit: int | None = None, + cursor: str | None = None, + status: str | None = None, + series_ticker: str | None = None, + with_nested_markets: bool | None = None, +) -> dict[str, Any]: + """List Kalshi events.""" + try: + _validate_range("limit", limit, 1, 200) + if status and status not in {"open", "closed", "settled"}: + return _error("status must be one of open|closed|settled") + + api = _get_api() + data = await api.get_events( + limit=limit, + cursor=cursor, + status=status, + series_ticker=series_ticker, + with_nested_markets=with_nested_markets, + ) + events = data.get("events", []) + return _success("Events fetched", events=events, count=len(events), cursor=data.get("cursor")) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_event", + description=( + "Get details for a single event with optional nested markets. " + "Parameters: event_ticker (str, required), with_nested_markets (bool, default None). " + "Returns: JSON with fields: event, markets, markets_count. " + "IMPORTANT: use event ticker, not market ticker. " + "Example: get_event(event_ticker='FED-RATE-25MAR', with_nested_markets=True)." + ), +) +async def get_event(event_ticker: str, with_nested_markets: bool | None = None) -> dict[str, Any]: + """Get detailed info for a specific event ticker.""" + try: + api = _get_api() + data = await api.get_event(event_ticker, with_nested_markets=with_nested_markets) + event = data.get("event") + if not event: + return _error(f"Event not found: {event_ticker}") + markets = data.get("markets") or [] + return _success("Event fetched", event=event, markets=markets, markets_count=len(markets)) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_balance", + description=( + "Get account cash balance and portfolio value. " + "Parameters: none. " + "Returns: JSON with fields: balance_cents, balance_dollars, portfolio_value_cents, portfolio_value_dollars. " + "IMPORTANT: values are cents from API and dollars as formatted strings. " + "Example: get_balance()." + ), +) +async def get_balance() -> dict[str, Any]: + """Get account balance and portfolio value.""" + try: + api = _get_api() + data = await api.get_balance() + balance = int(data.get("balance") or 0) + portfolio_value = int(data.get("portfolio_value") or 0) + return _success( + "Balance fetched", + balance_cents=balance, + portfolio_value_cents=portfolio_value, + balance_dollars=f"{balance / 100:.2f}", + portfolio_value_dollars=f"{portfolio_value / 100:.2f}", + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_positions", + description=( + "Get current open positions. " + "Parameters: limit (int, max 100), cursor (str), ticker (str), event_ticker (str), " + "count_filter (str: position|total_traded). " + "Returns: JSON with fields: positions, summary, cursor. " + "IMPORTANT: summary.total_positions counts only non-zero positions. " + "Example: get_positions(limit=50)." + ), +) +async def get_positions( + limit: int | None = None, + cursor: str | None = None, + ticker: str | None = None, + event_ticker: str | None = None, + count_filter: str | None = None, +) -> dict[str, Any]: + """Get current positions.""" + try: + _validate_range("limit", limit, 1, 100) + if count_filter and count_filter not in {"position", "total_traded"}: + return _error("count_filter must be one of position|total_traded") + + api = _get_api() + data = await api.get_positions( + limit=limit, + cursor=cursor, + ticker=ticker, + event_ticker=event_ticker, + count_filter=count_filter, + ) + positions = data.get("market_positions", []) + total_positions = len([p for p in positions if int(p.get("position", 0)) != 0]) + return _success( + "Positions fetched", + positions=positions, + summary={"total_positions": total_positions, "total_returned": len(positions)}, + cursor=data.get("cursor"), + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_orders", + description=( + "Get order history or active resting orders. " + "Parameters: ticker (str), event_ticker (str), status (str: resting|canceled|executed), " + "limit (int, max 200), cursor (str), min_ts/max_ts (int). " + "Returns: JSON with fields: orders, summary, cursor. " + "IMPORTANT: use status='resting' to monitor currently open orders. " + "Example: get_orders(status='resting')." + ), +) +async def get_orders( + ticker: str | None = None, + event_ticker: str | None = None, + status: str | None = None, + limit: int | None = None, + cursor: str | None = None, + min_ts: int | None = None, + max_ts: int | None = None, +) -> dict[str, Any]: + """Get order history and active orders.""" + try: + _validate_range("limit", limit, 1, 200) + if status and status not in {"resting", "canceled", "executed"}: + return _error("status must be one of resting|canceled|executed") + + api = _get_api() + data = await api.get_orders( + ticker=ticker, + event_ticker=event_ticker, + status=status, + limit=limit, + cursor=cursor, + min_ts=min_ts, + max_ts=max_ts, + ) + orders = data.get("orders", []) + return _success( + "Orders fetched", + orders=orders, + summary={ + "total": len(orders), + "resting": len([o for o in orders if o.get("status") == "resting"]), + "executed": len([o for o in orders if o.get("status") == "executed"]), + }, + cursor=data.get("cursor"), + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "create_order", + description=( + "Place a new order on Kalshi (executes a real trade). " + "Parameters: ticker (str), side (yes|no), action (buy|sell), count (int), type (limit|market, default limit), " + "yes_price/no_price (int, cents 1-99), client_order_id (str), expiration_ts (int). " + "Returns: JSON with fields: order, warnings. " + "IMPORTANT: this tool pre-validates market status and available balance before submission. " + "Example: create_order(ticker='PRES-2028-DEM', side='yes', action='buy', count=10, type='limit', yes_price=55)." + ), +) +async def create_order( + ticker: str, + side: str, + action: str, + count: int, + type: str = "limit", + yes_price: int | None = None, + no_price: int | None = None, + client_order_id: str | None = None, + expiration_ts: int | None = None, +) -> dict[str, Any]: + """Place a new order on Kalshi.""" + try: + if side not in {"yes", "no"}: + return _error("side must be yes or no") + if action not in {"buy", "sell"}: + return _error("action must be buy or sell") + if type not in {"limit", "market"}: + return _error("type must be limit or market") + + # Keep price inputs aligned to side to avoid ambiguous/invalid payloads. + if side == "yes" and no_price is not None: + return _error("For side='yes', provide yes_price only") + if side == "no" and yes_price is not None: + return _error("For side='no', provide no_price only") + if type == "limit": + if side == "yes" and yes_price is None: + return _error("For limit yes orders, provide yes_price") + if side == "no" and no_price is None: + return _error("For limit no orders, provide no_price") + + selected_price = yes_price if side == "yes" else no_price + validation = await _validate_order( + ticker=ticker, + side=side, + action=action, + count=count, + price=selected_price, + ) + if not validation["valid"]: + return _error( + "Order validation failed", + errors=validation["errors"], + warnings=validation["warnings"], + estimated_cost=validation["estimated_cost"], + current_balance=validation["current_balance"], + ) + + payload = { + "ticker": ticker, + "side": side, + "action": action, + "count": count, + "type": type, + "yes_price": yes_price, + "no_price": no_price, + "client_order_id": client_order_id, + "expiration_ts": expiration_ts, + } + payload = {k: v for k, v in payload.items() if v is not None} + + api = _get_api() + data = await api.create_order(payload) + order = data.get("order") + if not order: + return _error("Order created but no order details were returned") + + return _success( + "Order created successfully", + order=order, + warnings=validation["warnings"], + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "cancel_order", + description=( + "Cancel a single resting order. " + "Parameters: order_id (str, required). " + "Returns: JSON with fields: order. " + "IMPORTANT: order must still be cancelable (resting). " + "Example: cancel_order(order_id='abc-123-def')." + ), +) +async def cancel_order(order_id: str) -> dict[str, Any]: + """Cancel a resting order by order ID.""" + try: + api = _get_api() + data = await api.cancel_order(order_id) + order = data.get("order") + if not order: + return _error(f"Order {order_id} canceled but no order details were returned") + return _success("Order canceled successfully", order=order) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "batch_cancel_orders", + description=( + "Cancel up to 20 orders in one request. " + "Parameters: order_ids (array of strings, required, max 20). " + "Returns: JSON with fields: cancelled_count, requested_count, cancelled_orders. " + "IMPORTANT: include only currently resting orders for best results. " + "Example: batch_cancel_orders(order_ids=['id1','id2'])." + ), +) +async def batch_cancel_orders(order_ids: list[str]) -> dict[str, Any]: + """Cancel up to 20 orders in one request.""" + try: + if not order_ids: + return _error("order_ids must contain at least one order ID") + if len(order_ids) > 20: + return _error("order_ids cannot exceed 20") + + api = _get_api() + data = await api.batch_cancel_orders(order_ids) + orders = data.get("orders", []) + return _success( + "Batch cancel executed", + cancelled_count=len(orders), + requested_count=len(order_ids), + cancelled_orders=orders, + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_fills", + description=( + "Get fill history (executed trades on your orders). " + "Parameters: ticker (str), order_id (str), limit (int, max 200), cursor (str), min_ts/max_ts (int). " + "Returns: JSON with fields: fills, summary, cursor. " + "IMPORTANT: summary includes total volume and buy/sell counts. " + "Example: get_fills(ticker='PRES-2028-DEM', limit=20)." + ), +) +async def get_fills( + ticker: str | None = None, + order_id: str | None = None, + limit: int | None = None, + cursor: str | None = None, + min_ts: int | None = None, + max_ts: int | None = None, +) -> dict[str, Any]: + """Get fill history (executed trades).""" + try: + _validate_range("limit", limit, 1, 200) + api = _get_api() + data = await api.get_fills( + ticker=ticker, + order_id=order_id, + limit=limit, + cursor=cursor, + min_ts=min_ts, + max_ts=max_ts, + ) + fills = data.get("fills", []) + return _success( + "Fills fetched", + fills=fills, + summary={ + "total": len(fills), + "total_volume": sum(int(f.get("count") or 0) for f in fills), + "buys": len([f for f in fills if f.get("action") == "buy"]), + "sells": len([f for f in fills if f.get("action") == "sell"]), + }, + cursor=data.get("cursor"), + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_settlements", + description=( + "Get settlement history for resolved markets. " + "Parameters: ticker (str), event_ticker (str), limit (int, max 200), cursor (str), min_ts/max_ts (int). " + "Returns: JSON with fields: settlements, summary, cursor. " + "IMPORTANT: summary includes aggregate realized revenue. " + "Example: get_settlements(limit=20)." + ), +) +async def get_settlements( + ticker: str | None = None, + event_ticker: str | None = None, + limit: int | None = None, + cursor: str | None = None, + min_ts: int | None = None, + max_ts: int | None = None, +) -> dict[str, Any]: + """Get settlement history.""" + try: + _validate_range("limit", limit, 1, 200) + api = _get_api() + data = await api.get_settlements( + ticker=ticker, + event_ticker=event_ticker, + limit=limit, + cursor=cursor, + min_ts=min_ts, + max_ts=max_ts, + ) + settlements = data.get("settlements", []) + total_revenue = sum(int(s.get("revenue") or 0) for s in settlements) + return _success( + "Settlements fetched", + settlements=settlements, + summary={ + "total": len(settlements), + "total_revenue_cents": total_revenue, + "total_revenue_dollars": f"{total_revenue / 100:.2f}", + "profitable_settlements": len([s for s in settlements if int(s.get("revenue") or 0) > 0]), + }, + cursor=data.get("cursor"), + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "get_portfolio_summary", + description=( + "Get a complete portfolio overview in one call. " + "Parameters: none. " + "Returns: JSON with fields: balance_cents, balance_dollars, portfolio_value_cents, portfolio_value_dollars, positions, total_positions. " + "IMPORTANT: enriches positions with current market info when available. " + "Example: get_portfolio_summary()." + ), +) +async def get_portfolio_summary() -> dict[str, Any]: + try: + api = _get_api() + balance_data = await api.get_balance() + positions_data = await api.get_positions(limit=100) + positions = positions_data.get("market_positions", []) + + enriched = [] + for pos in positions: + count = int(pos.get("position", 0)) + if count == 0: + continue + ticker = pos.get("ticker", "") + market_info = {} + try: + market_resp = await api.get_market(ticker) + market_info = market_resp.get("market", {}) + except Exception: + pass + enriched.append( + { + "ticker": ticker, + "title": market_info.get("title", ""), + "side": "yes" if count > 0 else "no", + "count": abs(count), + "yes_price": market_info.get("yes_bid"), + "no_price": market_info.get("no_bid"), + "status": market_info.get("status", "unknown"), + } + ) + + balance = int(balance_data.get("balance", 0)) + portfolio_value = int(balance_data.get("portfolio_value", 0)) + return _success( + f"Portfolio: ${balance/100:.2f} cash, ${portfolio_value/100:.2f} portfolio value, {len(enriched)} open positions", + balance_cents=balance, + balance_dollars=f"{balance/100:.2f}", + portfolio_value_cents=portfolio_value, + portfolio_value_dollars=f"{portfolio_value/100:.2f}", + positions=enriched, + total_positions=len(enriched), + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error)) + + +@mcp.tool( + "watchlist_manage", + description=( + "Manage foreground watchlist metadata for Kalshi tickers. " + "Parameters: action (list|add|remove|clear), tickers (array for add/remove). " + "Returns: JSON with fields: watched_tickers. " + "IMPORTANT: foreground and background run in separate processes; background monitoring is based on its own state and active positions. " + "Example: watchlist_manage(action='add', tickers=['PRES-2028-DEM'])." + ), +) +async def watchlist_manage(action: str, tickers: list[str] | None = None) -> dict[str, Any]: + global _watched_tickers + normalized_action = action.strip().lower() + + if normalized_action == "list": + return _success( + f"Watching {len(_watched_tickers)} tickers", + watched_tickers=sorted(_watched_tickers), + ) + + if normalized_action == "add": + if not tickers: + return _error("tickers list required for add") + for ticker in tickers: + cleaned = ticker.strip().upper() + if cleaned: + _watched_tickers.add(cleaned) + return _success( + f"Added {len(tickers)} tickers, now watching {len(_watched_tickers)}", + watched_tickers=sorted(_watched_tickers), + ) + + if normalized_action == "remove": + if not tickers: + return _error("tickers list required for remove") + for ticker in tickers: + cleaned = ticker.strip().upper() + if cleaned: + _watched_tickers.discard(cleaned) + return _success( + f"Removed tickers, now watching {len(_watched_tickers)}", + watched_tickers=sorted(_watched_tickers), + ) + + if normalized_action == "clear": + _watched_tickers.clear() + return _success("Watchlist cleared", watched_tickers=[]) + + return _error(f"Unknown action: {action}. Use list, add, remove, or clear.") + + +@mcp.tool( + "kalshi_health", + description=( + "Check health of Kalshi API connection and credentials. " + "Parameters: none. " + "Returns: JSON with fields: api_healthy, balance_cents, balance_dollars (when healthy). " + "IMPORTANT: auth failures are reported to runtime as APP_ERROR_AUTH. " + "Example: kalshi_health()." + ), +) +async def kalshi_health() -> dict[str, Any]: + try: + api = _get_api() + data = await api.get_balance() + balance = int(data.get("balance", 0)) + return _success( + "Kalshi API healthy", + api_healthy=True, + balance_cents=balance, + balance_dollars=f"{balance / 100:.2f}", + ) + except httpx.HTTPStatusError as error: + return await _handle_api_error(error) + except Exception as error: + return _error(str(error), api_healthy=False) + + +def _cleanup() -> None: + global _api + if _api is None: + return + try: + asyncio.run(_api.close()) + except Exception: + pass + _api = None + + +def main() -> None: + atexit.register(_cleanup) + run_mcp_server(mcp, logger) + + +if __name__ == "__main__": + main() diff --git a/example-apps/kalshi/truffile.yaml b/example-apps/kalshi/truffile.yaml new file mode 100644 index 0000000..fb81377 --- /dev/null +++ b/example-apps/kalshi/truffile.yaml @@ -0,0 +1,68 @@ +metadata: + name: Kalshi + bundle_id: org.deepshard.kalshi + description: | + Have Truffle Trade and monitor Kalshi prediction markets for you. + icon_file: ./icon.png + background: + process: + cmd: + - python + - kalshi_background.py + working_directory: / + environment: + PYTHONUNBUFFERED: "1" + # Required secrets for Kalshi API auth. + KALSHI_API_KEY: "REPLACE_WITH_KALSHI_API_KEY" + # PEM private key. Use literal block style to preserve newlines. + KALSHI_PRIVATE_KEY: | + REPLACE_WITH_KALSHI_PRIVATE_KEY_PEM + default_schedule: + type: interval + interval: + duration: 30m + prod_duration: 60m + schedule: + daily_window: "00:00-23:59" + foreground: + process: + cmd: + - python + - kalshi_foreground.py + working_directory: / + environment: + PYTHONUNBUFFERED: "1" + # Required secrets for Kalshi API auth. + KALSHI_API_KEY: "REPLACE_WITH_KALSHI_API_KEY" + # PEM private key. Use literal block style to preserve newlines. + KALSHI_PRIVATE_KEY: | + REPLACE_WITH_KALSHI_PRIVATE_KEY_PEM + +steps: + # - name: Welcome + # type: welcome + # content: | + # Welcome to Kalshi for Truffle. + + # This app lets your Truffle trade prediction markets, monitor + # positions, and get alerts on price changes and settlements. + + - name: Install dependencies + type: bash + run: | + apk add --no-cache gcc musl-dev libffi-dev openssl-dev + pip install --no-cache-dir "httpx>=0.27.0" "cryptography>=42.0.0" + + - name: Copy application files + type: files + files: + - source: ./config.py + destination: ./config.py + - source: ./client.py + destination: ./client.py + - source: ./bg_worker.py + destination: ./bg_worker.py + - source: ./kalshi_foreground.py + destination: ./kalshi_foreground.py + - source: ./kalshi_background.py + destination: ./kalshi_background.py diff --git a/example-apps/reddit/icon.png b/example-apps/reddit/icon.png new file mode 100644 index 0000000..202faab Binary files /dev/null and b/example-apps/reddit/icon.png differ diff --git a/example-apps/ambient/reddit/icon.png b/example-apps/reddit/old.png similarity index 100% rename from example-apps/ambient/reddit/icon.png rename to example-apps/reddit/old.png diff --git a/example-apps/ambient/reddit/reddit.py b/example-apps/reddit/reddit.py similarity index 88% rename from example-apps/ambient/reddit/reddit.py rename to example-apps/reddit/reddit.py index 85e4372..4e5b37b 100644 --- a/example-apps/ambient/reddit/reddit.py +++ b/example-apps/reddit/reddit.py @@ -1,7 +1,8 @@ -from gourmet.ambient import run_ambient, AmbientContext, InferenceClient +from __future__ import annotations +from app_runtime.background import BackgroundRunContext, run_background, BackgroundContext +from app_runtime.abrasive.extract import extract_content_from_url, ExtractedContent +from app_runtime.abrasive.fetch import USER_AGENT -from abrasive.extract import extract_content_from_url, ExtractedContent -from abrasive.fetch import USER_AGENT from typing import Deque, List, Optional, Tuple, Any from collections import deque import logging @@ -39,6 +40,8 @@ def get_listing_url(self) -> str: def load_reddit_config_from_env() -> "RedditConfig": subreddits_str = os.getenv("SUBREDDITS", "").strip() user_feed_url = os.getenv("USER_FEED_URL", "").strip() + if user_feed_url.lower() in {"", "none", "null", "undefined", "n/a"}: + user_feed_url = "" subreddits: List[str] = [] @@ -163,6 +166,7 @@ def _fetch_listing(after: Optional[str] = None) -> Tuple[List[RedditPost], Optio for img in d['preview']['images']: if 'source' in img and 'url' in img['source']: image_urls.append(img['source']['url'].replace("&", "&")) + print("got image urls from preview:", image_urls) if not image_urls: thumb = d.get("thumbnail") @@ -353,7 +357,7 @@ def test_once(): print("article images:", post.images[0] if post and len(post.images) else "None") print("-----") -TESTING_MODE = False +TESTING_MODE = os.getenv("REDDIT_TEST_MODE", "0").lower() in ("1", "true", "yes") def run_test(): global reddit_config @@ -370,7 +374,7 @@ def run_test(): print("Exiting.") break -def process_reddit_item(ctx: AmbientContext, item: RedditPost): +def process_reddit_item(ctx: BackgroundRunContext, item: RedditPost): title, url, item_id, subreddit, image_urls = ( item.title, item.article_url, @@ -380,15 +384,25 @@ def process_reddit_item(ctx: AmbientContext, item: RedditPost): ) logger.info(f"Processing Reddit item {item_id}: {title} ({url}) {subreddit if subreddit else ''} {image_urls if image_urls else ''}") link_content, comments = get_content_for_reddit_item(item) - ctx.bg.post_to_feed( - title=link_content.title if link_content and link_content.title else title, - body=link_content.text if link_content and link_content.text else f'**{subreddit}** {item.domain} {item.score}', - src_uri=url, - media_uris=link_content.images if link_content and link_content.images else (image_urls if image_urls else []), - content_timestamp = link_content.date if link_content and link_content.date else item.created_utc + content_str = "" + if link_content and link_content.text: + content_str = f" {link_content.title or "unknown"} \n {link_content.text} \n {str(link_content.date) or ""} {str(link_content.source_name) or ""} " + post_str = ( + f"From Reddit: **{subreddit}** {item.domain} {item.score} points, {item.num_comments} comments\n" + f"\t Title: {title}, Link: {url}\n" + f"{content_str}\n" + f"{"top comments: " if comments else ""} {[str(c) for c in comments[:10]] if comments else ''}\n" + "image urls: " + (", ".join(image_urls) if image_urls else "None Found") ) + uris = [] + if url: + uris.append(url) + if link_content and link_content.source_url and link_content.source_url != url: + uris.append(link_content.source_url) -def reddit_ambient(ctx: AmbientContext): + ctx.bg.submit_context(content=post_str, uris=uris, priority=BackgroundContext.PRIORITY_LOW) + +def reddit_ambient(ctx: BackgroundRunContext): for _ in range(3): item = _next_new_reddit_item() if not item: @@ -399,19 +413,6 @@ def reddit_ambient(ctx: AmbientContext): return - #post_body = f"{f'**{subreddit}**' if subreddit else ''} {item.score }\n" - # - - # if link_content and link_content.text: - # post_body += f"\n\n{link_content.text[:2000]}" - - # if comments: - # post_body += "\n\n**Top Comments:**\n" - # for c in comments: - # post_body += f"- @{c.author if c.author else 'unknown'}: {c.body[:300].replace('\n', ' ')}...\n" - - - if __name__ == "__main__": import sys @@ -424,9 +425,9 @@ def reddit_ambient(ctx: AmbientContext): if TESTING_MODE: run_test() else: - run_ambient(reddit_ambient) + run_background(reddit_ambient) # :cp ./apps/ambient/reddit.py /app.py # :cp /home/dylan/ds/3fw/python/dist/gourmet-0.1.dev0-py3-none-any.whl /tmp/gourmet-0.1.dev0-py3-none-any.whl -# pip install /tmp/gourmet-0.1.dev0-py3-none-any.whl[abrasive] \ No newline at end of file +# pip install /tmp/gourmet-0.1.dev0-py3-none-any.whl[abrasive] diff --git a/example-apps/reddit/truffile.yaml b/example-apps/reddit/truffile.yaml new file mode 100644 index 0000000..1838adc --- /dev/null +++ b/example-apps/reddit/truffile.yaml @@ -0,0 +1,41 @@ +metadata: + name: Reddit + bundle_id: org.deepshard.reddit + description: | + Have your TruffleΒΉ browse Reddit and post relevant content to your feed. + background: + process: + cmd: + - python + - /opt/reddit.py + working_directory: / + environment: + PYTHONUNBUFFERED: "1" + # Comma-separated subreddit list. Use "none" to disable and rely on USER_FEED_URL. + SUBREDDITS: "news,worldnews,technology" + # Optional personal Reddit JSON feed URL. Use "none" when not used. + USER_FEED_URL: "none" + default_schedule: + type: interval + interval: + duration: 60m + prod_duration: 60m + schedule: + daily_window: "00:00-23:59" + icon_file: ./icon.png +steps: + # - name: Reddit Setup + # type: welcome + # content: | + # The Reddit app will allow your Truffle to post relevant content from Reddit on your feed. + # You can customize which subreddits to follow in the configuration step. + # Please use this app in accordance with Reddit's terms of service. + - name: Install dependencies + type: bash + run: | + pip install --no-cache-dir --force-reinstall requests feedparser trafilatura==2.0.0 tld==0.13.1 + - name: Copy application files + type: files + files: + - source: ./reddit.py + destination: ./opt/reddit.py diff --git a/example-apps/truffile-example.yaml b/example-apps/truffile-example.yaml deleted file mode 100644 index 0e38580..0000000 --- a/example-apps/truffile-example.yaml +++ /dev/null @@ -1,76 +0,0 @@ -metadata: - name: Twitter App - type: background - process: - cmd: - - python - - app.py - working_directory: / - environment: - PYTHONUNBUFFERED: "1" - icon_file: ./icon.png - default_schedule: - type: interval - interval: - duration: 1h - schedule: - daily_window: "09:00-17:30" # optional - allowed_days: [mon, tue, wed, thu, fri] # optional, default = all days -steps: - - name: Copy application files - type: files - files: - - source: ./requirements.txt - destination: ./requirements.txt - - source: ./app.py - destination: ./app.py - - source: ./config.yaml - destination: ./config.yaml - permissions: 600 # optional - - name: Install dependencies - type: bash - run: | - pip install requests - apk add --no-cache btop - - name: Sign into X - type: vnc - cmd: - - python - - app.py - - --install - closes_on_complete: true - description: | - A VNC window will open. Please sign into your X account to continue the installation. - - name: Configure Email Account - type: text - content: | - Please provide an IMAP server, email address, and password to configure your email account. - fields: - - name: imap_server - label: IMAP Server - type: text # one of: text, password, number - placeholder: imap.example.com #optional - default: imap.gmail.com #optional - env: IMAP_SERVER - - name: email_address - label: Email Address - type: text - env: EMAIL_ADDRESS - placeholder: dude@wheresmycar.com - - name: password - label: Password - type: password - env: EMAIL_PASSWORD - validator: - cmd: - - python - - validate_email.py - args: - - ${inputs.imap_server} - - ${inputs.email_address} - - ${inputs.password} - - - - - \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 238e887..4f4c3a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,6 @@ dependencies = [ [project.scripts] truffile = "truffile.cli:main" -truffleinferproxy = "truffile.infer.proxy:main" [project.optional-dependencies] dev = [ diff --git a/scripts/test_oai_proxy.py b/scripts/test_oai_proxy.py deleted file mode 100644 index dcdf6a3..0000000 --- a/scripts/test_oai_proxy.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python3 -"""Smoke test for the local OpenAI-compatible proxy.""" - -from __future__ import annotations - -import argparse -import os -from typing import Any, Dict, List -try: - from openai import OpenAI -except ImportError: - raise ImportError("Please install the 'openai' package to run this test script.") - -def _print_header(title: str) -> None: - print("\n" + "=" * 8 + f" {title} " + "=" * 8) - - -def test_basic(client: OpenAI, model: str) -> None: - _print_header("basic") - resp = client.chat.completions.create( - model=model, - messages=[{"role": "user", "content": "Say hello in one sentence."}], - max_tokens=2048, - temperature=0.7, - top_p=0.9, - ) - msg = resp.choices[0].message - reasoning = getattr(msg, "reasoning_content", None) - if reasoning: - print("reasoning_content:", reasoning[:200], "..." if len(reasoning) > 200 else "") - print("content:", msg.content) - - -def test_json_schema(client: OpenAI, model: str) -> None: - _print_header("json_schema") - schema: Dict[str, Any] = { - "type": "object", - "properties": { - "answer": {"type": "string"}, - "confidence": {"type": "number"}, - }, - "required": ["answer", "confidence"], - } - resp = client.chat.completions.create( - model=model, - messages=[{"role": "user", "content": "What is 2+2? Respond as JSON."}], - response_format={"type": "json_schema", "json_schema": schema}, - max_tokens=2048, - ) - msg = resp.choices[0].message - print("content:", msg.content) - - -def test_tools(client: OpenAI, model: str) -> None: - _print_header("tools") - tools: List[Dict[str, Any]] = [ - { - "type": "function", - "function": { - "name": "get_time", - "description": "Return the current time in ISO-8601", - "parameters": { - "type": "object", - "properties": {"tz": {"type": "string"}}, - "required": [], - }, - }, - } - ] - resp = client.chat.completions.create( - model=model, - messages=[{"role": "user", "content": "What time is it? Use the tool."}], - tools=tools, - tool_choice="auto", - max_tokens=2048, - ) - msg = resp.choices[0].message - print("tool_calls:", msg.tool_calls) - print("content:", msg.content) - - -def test_stream(client: OpenAI, model: str) -> None: - _print_header("stream") - stream = client.chat.completions.create( - model=model, - messages=[{"role": "user", "content": "Stream a short haiku."}], - max_tokens=2048, - stream=True, - ) - parts: List[str] = [] - reasoning_parts: List[str] = [] - for chunk in stream: - delta = chunk.choices[0].delta - if delta: - if delta.content: - parts.append(delta.content) - reasoning = getattr(delta, "reasoning_content", None) - if reasoning: - reasoning_parts.append(reasoning) - if reasoning_parts: - full_reasoning = "".join(reasoning_parts) - print("reasoning_content:", full_reasoning[:200], "..." if len(full_reasoning) > 200 else "") - print("content:", "".join(parts)) - - -def main() -> None: - parser = argparse.ArgumentParser(description="Smoke test for OpenAI proxy") - parser.add_argument("--base-url", default="http://127.0.0.1:8080/v1", help="Proxy base URL") - parser.add_argument("--model", default="auto", help="Model name or UUID") - parser.add_argument("--no-stream", action="store_true", help="Skip streaming test") - args = parser.parse_args() - - api_key = os.getenv("OPENAI_API_KEY", "test") - client = OpenAI(base_url=args.base_url, api_key=api_key) - - test_basic(client, args.model) - test_json_schema(client, args.model) - test_tools(client, args.model) - if not args.no_stream: - test_stream(client, args.model) - - -if __name__ == "__main__": - main() diff --git a/truffile/__init__.py b/truffile/__init__.py index 532ea71..0954d6e 100644 --- a/truffile/__init__.py +++ b/truffile/__init__.py @@ -5,7 +5,6 @@ from .client import TruffleClient, ExecResult, UploadResult, resolve_mdns, NewSessionStatus from .schedule import parse_runtime_policy -from truffle.app.app_type_pb2 import AppType __all__ = [ "__version__", @@ -14,6 +13,5 @@ "UploadResult", "resolve_mdns", "NewSessionStatus", - "AppType", "parse_runtime_policy", ] diff --git a/truffile/_version.py b/truffile/_version.py new file mode 100644 index 0000000..a621a39 --- /dev/null +++ b/truffile/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '0.1.15.dev3' +__version_tuple__ = version_tuple = (0, 1, 15, 'dev3') + +__commit_id__ = commit_id = 'g0212865ef' diff --git a/truffile/cli.py b/truffile/cli.py index fa61753..ca489aa 100644 --- a/truffile/cli.py +++ b/truffile/cli.py @@ -1,21 +1,20 @@ import argparse import asyncio -import ast +import json +import re import signal import socket import sys import threading import time +from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer from pathlib import Path -import yaml - +import httpx from truffile.storage import StorageService from truffile.client import TruffleClient, resolve_mdns, NewSessionStatus - -import grpc -from truffle.infer.infer_pb2_grpc import InferenceServiceStub -from truffle.infer.model_pb2 import GetModelListRequest, Model +from truffile.schema import validate_app_dir +from truffile.deploy import build_deploy_plan, deploy_with_builder # ANSI colors @@ -38,6 +37,8 @@ class C: ARROW = "β†’" DOT = "β€’" WARN = "⚠" +TOOL_TAGS = ("", "") +TOOL_TAG_PATTERN = re.compile(r"\s*(.*?)\s*", re.DOTALL) class Spinner: @@ -261,204 +262,73 @@ def cmd_disconnect(args, storage: StorageService) -> int: return 0 -def check_python_syntax(file_path: Path) -> tuple[bool, str]: - try: - with open(file_path) as f: - source = f.read() - ast.parse(source) - return True, "" - except SyntaxError as e: - return False, f"Line {e.lineno}: {e.msg}" - - -def validate_app_dir(app_dir: Path) -> tuple[bool, dict | None, str | None, list[str]]: - """Validate app directory and return (valid, config, app_type, warnings).""" - warnings = [] - - truffile = app_dir / "truffile.yaml" - if not truffile.exists(): - error(f"No truffile.yaml found in {app_dir}") - return False, None, None, warnings - - try: - with open(truffile) as f: - config = yaml.safe_load(f) - except yaml.YAMLError as e: - error(f"Invalid truffile.yaml: {e}") - return False, None, None, warnings - - meta = config.get("metadata", {}) - if not meta.get("name"): - error("metadata.name is required in truffile.yaml") - return False, None, None, warnings - - cfg_type = meta.get("type", "").lower() - if cfg_type in ("background", "ambient"): - app_type = "ambient" - elif cfg_type in ("foreground", "focus"): - app_type = "focus" - else: - app_type = "focus" - warnings.append(f"No type specified in truffile.yaml, defaulting to focus") - - icon_file = meta.get("icon_file") - if icon_file: - icon_path = app_dir / icon_file - if not icon_path.exists(): - warnings.append(f"Icon file not found: {icon_file}") - else: - warnings.append("No icon specified in truffile.yaml") - - # Check files - either in steps or top-level files: - files_to_check = [] - for step in config.get("steps", []): - if step.get("type") == "files": - files_to_check.extend(step.get("files", [])) - # Also check top-level files: (simplified format) - files_to_check.extend(config.get("files", [])) - - for f in files_to_check: - src = app_dir / f["source"] - if not src.exists(): - error(f"Source file not found: {src}") - return False, None, None, warnings - if src.suffix == ".py": - ok, err = check_python_syntax(src) - if not ok: - error(f"Syntax error in {src.name}: {err}") - return False, None, None, warnings - - return True, config, app_type, warnings - - -async def _do_deploy(client: TruffleClient, config: dict, app_dir: Path, app_type: str, device: str, interactive: bool = False) -> int: - meta = config["metadata"] - name = meta["name"] - description = meta.get("description", "") - process = meta.get("process", {}) - cmd_list = process.get("cmd", ["python", "app.py"]) - cwd = process.get("working_directory", "/") - env_dict = process.get("environment", {}) - env = [f"{k}={v}" for k, v in env_dict.items()] - icon_file = meta.get("icon_file") - icon_path = (app_dir / icon_file) if icon_file and (app_dir / icon_file).exists() else None - - spinner = Spinner(f"Connecting to {device}") - spinner.start() - await client.connect() - spinner.stop(success=True) - - spinner = Spinner("Starting build session") - spinner.start() - await client.start_build() - await asyncio.sleep(5) - spinner.stop(success=True) - print(f" {C.DIM}Session: {client.app_uuid}{C.RESET}") - - # Always upload files first - files_to_upload = [] - for step in config.get("steps", []): - if step.get("type") == "files": - files_to_upload.extend(step.get("files", [])) - files_to_upload.extend(config.get("files", [])) - - for f in files_to_upload: - src = app_dir / f["source"] - dest = f["destination"] - spinner = Spinner(f"Uploading {src.name} {ARROW} {dest}") - spinner.start() - result = await client.upload(src, dest) - spinner.stop(success=True) - print(f" {C.DIM}{result.bytes} bytes, sha256={result.sha256[:12]}...{C.RESET}") - - # always run bash commands - bash_commands = [] - for step in config.get("steps", []): - if step.get("type") == "bash": - bash_commands.append((step.get("name", "bash"), step["run"])) - if config.get("run"): - bash_commands.append(("Install dependencies", config["run"])) - - for step_name, run_cmd in bash_commands: - info(f"Running: {step_name}") - log = ScrollingLog(height=6, prefix=" ") - exit_code = 0 - async for ev, data in client.exec_stream(run_cmd, cwd=cwd): - if ev == "log": - try: - import json - obj = json.loads(data) - line = obj.get("line", "") - except Exception: - line = data - log.add(line) - elif ev == "exit": - try: - import json - exit_code = int(json.loads(data).get("code", 0)) - except (ValueError, KeyError): - pass - log.finish() - if exit_code != 0: - error(f"Step '{step_name}' failed with exit code {exit_code}") - raise RuntimeError(f"Step '{step_name}' failed with exit code {exit_code}") - - if interactive: - # interactive mode: open shell after setup for testing/debugging - print() - info("Opening interactive shell (exit with Ctrl+D or 'exit' to finish deploy)") - ws_url = str(client.http_base or "").replace("http://", "ws://").replace("https://", "wss://") + "/term" - await _interactive_shell(ws_url) - print() - spinner = Spinner(f"Finishing as {app_type} app") - spinner.start() - - cmd = cmd_list[0] if cmd_list[0].startswith("/") else f"/usr/bin/{cmd_list[0]}" - - if app_type == "focus": - await client.finish_foreground( - name=name, - cmd=cmd, - args=cmd_list[1:], - cwd=cwd, - env=env, - description=description, - icon=icon_path, - ) - else: - default_schedule = meta.get("default_schedule") - await client.finish_background( - name=name, - cmd=cmd, - args=cmd_list[1:], - cwd=cwd, - env=env, - description=description, - icon=icon_path, - default_schedule=default_schedule, - ) - - spinner.stop(success=True) - print() - success(f"Deployed: {C.BOLD}{name}{C.RESET} ({app_type})") - return 0 - - async def cmd_deploy(args, storage: StorageService) -> int: app_path = args.path if args.path else "." app_dir = Path(app_path).resolve() interactive = args.interactive + dry_run = bool(getattr(args, "dry_run", False)) if not app_dir.exists() or not app_dir.is_dir(): error(f"{app_dir} is not a valid directory") return 1 info(f"Validating app in {app_dir.name}") - valid, config, app_type, warnings = validate_app_dir(app_dir) + valid, config, app_type, warnings, errors = validate_app_dir(app_dir) if not valid or not app_type: + for msg in errors: + error(msg) return 1 for w in warnings: warn(w) + + if dry_run: + try: + plan = build_deploy_plan(config=config, app_dir=app_dir, app_type=app_type) + except Exception as e: + error(f"Failed to build deploy plan: {e}") + return 1 + print() + print(f"{C.BOLD}Dry Run: Deploy Plan{C.RESET}") + print(f" Name: {plan['name']}") + print(f" Bundle ID: {plan['bundle_id']}") + print(f" Mode: {plan['finish_label']}") + print(f" App Dir: {app_dir}") + print(f" Exec CWD: {plan['exec_cwd']}") + if plan["icon_path"] is not None: + print(f" Icon: {plan['icon_path']}") + else: + print(f" Icon: {C.DIM}{C.RESET}") + + fg = plan["fg_payload"] + if fg is not None: + fg_keys = [e.split("=", 1)[0] for e in fg.get("env", []) if "=" in e] + print(f" Foreground Cmd: {fg['cmd']} {' '.join(fg.get('args', []))}".rstrip()) + print(f" Foreground Env Keys: {', '.join(fg_keys) if fg_keys else ''}") + + bg = plan["bg_payload"] + if bg is not None: + bg_keys = [e.split('=', 1)[0] for e in bg.get("env", []) if "=" in e] + print(f" Background Cmd: {bg['cmd']} {' '.join(bg.get('args', []))}".rstrip()) + print(f" Background Env Keys: {', '.join(bg_keys) if bg_keys else ''}") + if plan["default_schedule"] is not None: + print(f" Background Schedule: configured") + else: + print(f" Background Schedule: {C.DIM}{C.RESET}") + + files = plan["files_to_upload"] + print(f" Files To Upload: {len(files)}") + for f in files: + src = f.get("source", "") + dst = f.get("destination", "") + print(f" - {src} {ARROW} {dst}") + + cmds = plan["bash_commands"] + print(f" Bash Steps: {len(cmds)}") + for name, _cmd in cmds: + print(f" - {name}") + print() + success("Dry run complete (no device changes made)") + return 0 device = storage.state.last_used_device if not device: @@ -496,7 +366,26 @@ def handle_sigint(): loop.add_signal_handler(signal.SIGINT, handle_sigint) try: - deploy_task = asyncio.create_task(_do_deploy(client, config, app_dir, app_type, device, interactive)) + deploy_task = asyncio.create_task( + deploy_with_builder( + client=client, + config=config, + app_dir=app_dir, + app_type=app_type, + device=device, + interactive=interactive, + spinner_cls=Spinner, + scrolling_log_cls=ScrollingLog, + info=info, + success=success, + error=error, + color_dim=C.DIM, + color_reset=C.RESET, + color_bold=C.BOLD, + arrow=ARROW, + interactive_shell=_interactive_shell, + ) + ) return await deploy_task except asyncio.CancelledError: print() @@ -552,47 +441,55 @@ async def cmd_list_apps(storage: StorageService) -> int: try: await client.connect() - foreground, background = await client.get_all_apps() + apps = await client.get_all_apps() spinner.stop(success=True) - - if not foreground and not background: + + if not apps: print(f" {C.DIM}No apps installed{C.RESET}") return 0 - + + focus_apps = [app for app in apps if app.HasField("foreground")] + ambient_apps = [app for app in apps if app.HasField("background")] + both_apps = [app for app in apps if app.HasField("foreground") and app.HasField("background")] + print() - if foreground: + if focus_apps: print(f"{C.BOLD}Focus Apps{C.RESET}") - for app in foreground: + for app in focus_apps: print(f" {C.CYAN}{DOT}{C.RESET} {app.metadata.name}") setattr(app.metadata, "description", getattr(app.metadata, "description", "")) if hasattr(app.metadata, "description") and app.metadata.description: desc = app.metadata.description.strip().split('\n')[0][:55] print(f" {C.DIM}{desc}{C.RESET}") - - if background: - if foreground: + + if ambient_apps: + if focus_apps: print() print(f"{C.BOLD}Ambient Apps{C.RESET}") - for app in background: + for app in ambient_apps: schedule = "" - if app.runtime_policy.HasField("interval"): - secs = app.runtime_policy.interval.duration.seconds + policy = app.background.runtime_policy + if policy.HasField("interval"): + secs = policy.interval.duration.seconds if secs >= 3600: schedule = f"every {secs // 3600}h" elif secs >= 60: schedule = f"every {secs // 60}m" else: schedule = f"every {secs}s" - elif app.runtime_policy.HasField("always"): + elif policy.HasField("always"): schedule = "always" print(f" {C.CYAN}{DOT}{C.RESET} {app.metadata.name} {C.DIM}({schedule}){C.RESET}") setattr(app.metadata, "description", getattr(app.metadata, "description", "")) if hasattr(app.metadata, "description") and app.metadata.description: desc = app.metadata.description.strip().split('\n')[0][:55] print(f" {C.DIM}{desc}{C.RESET}") - + print() - print(f"{C.DIM}Total: {len(foreground)} focus, {len(background)} ambient{C.RESET}") + print( + f"{C.DIM}Total: {len(focus_apps)} focus, {len(ambient_apps)} ambient, " + f"{len(both_apps)} both{C.RESET}" + ) return 0 except Exception as e: @@ -628,14 +525,21 @@ async def cmd_delete(args, storage: StorageService) -> int: try: await client.connect() - foreground, background = await client.get_all_apps() + apps = await client.get_all_apps() spinner.stop(success=True) all_apps = [] - for app in foreground: - all_apps.append(("focus", app.uuid, app.metadata.name, app.metadata.description.strip().split('\n')[0][:55] if app.metadata.description else "")) - for app in background: - all_apps.append(("ambient", app.uuid, app.metadata.name, app.metadata.description.strip().split('\n')[0][:55] if app.metadata.description else "")) + for app in apps: + if app.HasField("foreground") and app.HasField("background"): + kind = "both" + elif app.HasField("foreground"): + kind = "focus" + elif app.HasField("background"): + kind = "ambient" + else: + kind = "unknown" + desc = app.metadata.description.strip().split('\n')[0][:55] if app.metadata.description else "" + all_apps.append((kind, app.uuid, app.metadata.name, desc)) if not all_apps: print(f" {C.DIM}No apps installed{C.RESET}") @@ -800,7 +704,7 @@ def cmd_list(args, storage: StorageService) -> int: async def cmd_models(storage: StorageService) -> int: - """List models on the connected device.""" + """List models on your Truffle.""" device = storage.state.last_used_device if not device: error("No device connected") @@ -815,140 +719,810 @@ async def cmd_models(storage: StorageService) -> int: except RuntimeError: spinner.fail(f"Could not resolve {device}.local") return 1 - + try: - channel = grpc.insecure_channel(f"{ip}:80") - stub = InferenceServiceStub(channel) - model_list = stub.GetModelList(GetModelListRequest(use_filter=False)) + url = f"http://{ip}/if2/v1/models" + with httpx.Client(timeout=15.0) as client: + resp = client.get(url) + resp.raise_for_status() + payload = resp.json() spinner.stop(success=True) except Exception as e: - spinner.fail(f"Failed to get models: {e}") + spinner.fail(f"Failed to get IF2 models: {e}") return 1 - - loaded = [m for m in model_list.models if m.state == Model.MODEL_STATE_LOADED] - available = [m for m in model_list.models if m.state == Model.MODEL_STATE_AVAILABLE] - + + models = payload.get("data", []) + if not isinstance(models, list): + spinner.fail("Invalid response: missing 'data' list") + return 1 + print() - print(f"{MUSHROOM} {C.BOLD}Models on {device}{C.RESET}") + print(f"{MUSHROOM} {C.BOLD}IF2 Models on {device}{C.RESET}") print() - - if loaded: - for m in loaded: - reasoner = f" {C.MAGENTA}reasoner{C.RESET}" if m.config.info.has_chain_of_thought else "" - print(f" {C.GREEN}{CHECK}{C.RESET} {m.name}{reasoner}") - print(f" {C.DIM}id: {m.uuid}{C.RESET}") - - if available: - for m in available: - print(f" {C.DIM}β—‹ {m.name} (not loaded){C.RESET}") - - if not loaded and not available: + + if not models: print(f" {C.DIM}No models found{C.RESET}") - - print() - total_mb = model_list.total_memory // (1024 * 1024) if model_list.total_memory else 0 - used_mb = model_list.used_memory // (1024 * 1024) if model_list.used_memory else 0 - print(f"{C.DIM}Memory: {used_mb}MB / {total_mb}MB{C.RESET}") - + return 0 + + for m in models: + if not isinstance(m, dict): + continue + model_id = m.get("id", "") + name = m.get("name", model_id) + uuid = m.get("uuid", "") + ctx = m.get("context_length", "") + arch = m.get("architecture", {}) + tokenizer = arch.get("tokenizer", "") if isinstance(arch, dict) else "" + max_batch = m.get("max_batch_size", "") + print(f" {C.GREEN}{CHECK}{C.RESET} {name}") + print(f" {C.DIM}id: {model_id}{C.RESET}") + print(f" {C.DIM}uuid: {uuid}{C.RESET}") + print(f" {C.DIM}context: {ctx}, tokenizer: {tokenizer}, max_batch: {max_batch}{C.RESET}") + return 0 -def cmd_proxy(args, storage: StorageService) -> int: - """Start the OpenAI-compatible proxy.""" - device = args.device if hasattr(args, 'device') and args.device else storage.state.last_used_device +async def _resolve_connected_device(storage: StorageService) -> tuple[str, str] | tuple[None, None]: + device = storage.state.last_used_device + if not device: + error("No device connected") + print(f" {C.DIM}Run: truffile connect {C.RESET}") + return None, None + try: + ip = await resolve_mdns(f"{device}.local") + except RuntimeError: + error(f"Could not resolve {device}.local") + return None, None + return device, ip + + +async def _default_model(ip: str) -> str | None: + try: + with httpx.Client(timeout=10.0) as client: + resp = client.get(f"http://{ip}/if2/v1/models") + resp.raise_for_status() + payload = resp.json() + models = payload.get("data", []) + if not isinstance(models, list) or not models: + return None + first = models[0] + if not isinstance(first, dict): + return None + return str(first.get("uuid") or first.get("id") or "") + except Exception: + return None + + +async def cmd_chat(args, storage: StorageService) -> int: + device, ip = await _resolve_connected_device(storage) + if not device or not ip: + return 1 + + prompt = args.prompt + if not prompt and args.prompt_words: + prompt = " ".join(args.prompt_words).strip() + if not prompt: + error("Missing prompt") + print(f" {C.DIM}Usage: truffile chat --prompt \"hello\"{C.RESET}") + print(f" {C.DIM}Or: truffile chat \"hello\"{C.RESET}") + return 1 + + model = args.model + if not model: + spinner = Spinner("Resolving default model") + spinner.start() + model = await _default_model(ip) + if not model: + spinner.fail("Failed to resolve default model from IF2") + return 1 + spinner.stop(success=True) + + stream = not args.no_stream and not args.json + messages: list[dict[str, str]] = [] + if args.system: + messages.append({"role": "system", "content": args.system}) + messages.append({"role": "user", "content": prompt}) + + payload: dict = { + "model": model, + "messages": messages, + "stream": stream, + "reasoning": {"enabled": bool(args.reasoning)}, + } + if args.max_tokens is not None: + payload["max_tokens"] = args.max_tokens + else: + payload["max_tokens"] = 512 + if args.temperature is not None: + payload["temperature"] = args.temperature + if args.top_p is not None: + payload["top_p"] = args.top_p + if stream: + payload["stream_options"] = {"include_usage": True} + + url = f"http://{ip}/if2/v1/chat/completions" + headers = {"Content-Type": "application/json"} + + spinner = Spinner(f"Connecting to {device}") + spinner.start() + try: + with httpx.Client(timeout=None) as client: + if stream: + with client.stream("POST", url, headers=headers, json=payload) as resp: + resp.raise_for_status() + spinner.stop(success=True) + usage_printed = False + for raw in resp.iter_lines(): + if not raw: + continue + line = raw.strip() + if not line.startswith("data:"): + continue + data = line[len("data:"):].strip() + if data == "[DONE]": + break + try: + evt = json.loads(data) + except Exception: + continue + + choices = evt.get("choices") + if isinstance(choices, list) and choices: + c0 = choices[0] + if isinstance(c0, dict): + delta = c0.get("delta", {}) + if isinstance(delta, dict): + txt = delta.get("content") + if isinstance(txt, str) and txt: + print(txt, end="", flush=True) + reasoning = delta.get("reasoning") + if args.reasoning and isinstance(reasoning, str) and reasoning: + print(reasoning, end="", flush=True) + + usage = evt.get("usage") + if isinstance(usage, dict) and not usage_printed: + usage_printed = True + print(f"\n{C.DIM}[usage] {usage}{C.RESET}", flush=True) + print() + else: + resp = client.post(url, headers=headers, json=payload, timeout=120.0) + resp.raise_for_status() + spinner.stop(success=True) + body = resp.json() + if args.json: + print(json.dumps(body, indent=2)) + else: + content = "" + try: + choices = body.get("choices", []) + if isinstance(choices, list) and choices: + msg = choices[0].get("message", {}) + if isinstance(msg, dict): + content = str(msg.get("content", "")) + except Exception: + content = "" + print(content) + return 0 + except Exception as e: + spinner.fail(f"Chat request failed: {e}") + return 1 + + +def _inject_reasoning_into_chunk(chunk: dict, state: dict) -> dict: + choices = chunk.get("choices") + if not isinstance(choices, list) or not choices: + return chunk + c0 = choices[0] + if not isinstance(c0, dict): + return chunk + delta = c0.get("delta") + if not isinstance(delta, dict): + return chunk + + reasoning = delta.get("reasoning") + content = delta.get("content") + merged = "" + + if isinstance(reasoning, str) and reasoning: + if not state.get("thinking_open", False): + merged += "\n" + state["thinking_open"] = True + merged += reasoning + + if isinstance(content, str) and content: + if state.get("thinking_open", False): + merged += "\n\n" + state["thinking_open"] = False + merged += content + + if merged: + delta["content"] = merged + if "reasoning" in delta: + del delta["reasoning"] + return chunk + + +def _normalize_finish_reason(fr: str | None) -> str | None: + if fr is None: + return None + s = str(fr).strip().lower() + if s in {"stop", "finish_stop"}: + return "stop" + if s in {"length", "finish_length"}: + return "length" + if s in {"tool_calls", "toolcalls", "finish_toolcalls"}: + return "tool_calls" + if s in {"content_filter"}: + return "content_filter" + return "stop" + + +def _normalize_usage_dict(usage: dict | None) -> dict | None: + if not isinstance(usage, dict): + return usage + if {"prompt_tokens", "completion_tokens", "total_tokens"}.issubset(set(usage.keys())): + return usage + tokens = usage.get("tokens") + if isinstance(tokens, dict): + prompt = int(tokens.get("prompt", 0) or 0) + completion = int(tokens.get("completion", 0) or 0) + out = dict(usage) + out["prompt_tokens"] = prompt + out["completion_tokens"] = completion + out["total_tokens"] = prompt + completion + return out + return usage + + +def _flatten_content(content: object) -> str: + if content is None: + return "" + if isinstance(content, str): + return content + if isinstance(content, list): + parts: list[str] = [] + for p in content: + if isinstance(p, dict) and p.get("type") == "text": + parts.append(str(p.get("text", ""))) + return "".join(parts) + return str(content) + + +def _extract_tool_calls_and_clean(text: str) -> tuple[list[dict], str]: + calls: list[dict] = [] + for m in TOOL_TAG_PATTERN.findall(text): + try: + obj = json.loads(m.strip()) + if isinstance(obj, dict): + calls.append(obj) + except Exception: + continue + cleaned = TOOL_TAG_PATTERN.sub("", text).strip() + return calls, cleaned + + +def _tool_prompt(tools_spec: list[dict]) -> str: + desc_lines: list[str] = [] + for t in tools_spec: + if not isinstance(t, dict) or t.get("type") != "function": + continue + fn = t.get("function", {}) + if not isinstance(fn, dict): + continue + name = fn.get("name") + if not isinstance(name, str) or not name: + continue + description = str(fn.get("description") or "") + params = fn.get("parameters") if isinstance(fn.get("parameters"), dict) else {"type": "object"} + desc_lines.append(f"{name}: {description}\nArg Schema: {json.dumps(params, indent=2)}") + if not desc_lines: + return "" + open_tag, close_tag = TOOL_TAGS + return ( + "You have access to the following tools:\n" + + "\n".join(desc_lines) + + "\nWhen you decide to use a tool, respond with a JSON object enclosed by " + + f"{open_tag} and {close_tag} tags in this format:\n" + + f"{open_tag}\n" + + '{\n "tool": "",\n "args": {}\n}\n' + + f"{close_tag}\n" + + "Only use tools listed above, and ensure your JSON is valid." + ) + + +def _serialize_tool_calls(tool_calls: list[dict]) -> str: + blocks: list[str] = [] + open_tag, close_tag = TOOL_TAGS + for tc in tool_calls: + if not isinstance(tc, dict) or tc.get("type") != "function": + continue + fn = tc.get("function", {}) + if not isinstance(fn, dict): + continue + name = fn.get("name") + if not isinstance(name, str) or not name: + continue + args_raw = fn.get("arguments") + args = {} + if isinstance(args_raw, str): + try: + maybe = json.loads(args_raw) + if isinstance(maybe, dict): + args = maybe + except Exception: + args = {"_raw": args_raw} + elif isinstance(args_raw, dict): + args = args_raw + blocks.append(f"{open_tag}\n{json.dumps({'tool': name, 'args': args})}\n{close_tag}") + return "\n".join(blocks) + + +def _massage_messages_for_tools(messages: list[dict], tools_spec: list[dict], tool_choice: object) -> list[dict]: + out: list[dict] = [] + prompt = _tool_prompt(tools_spec) if tool_choice != "none" else "" + injected = False + + tool_name_by_id: dict[str, str] = {} + for msg in messages: + if isinstance(msg, dict) and msg.get("role") == "assistant": + for tc in msg.get("tool_calls", []) or []: + if isinstance(tc, dict): + tc_id = tc.get("id") + fn = tc.get("function", {}) + if isinstance(tc_id, str) and isinstance(fn, dict) and isinstance(fn.get("name"), str): + tool_name_by_id[tc_id] = fn["name"] + + for msg in messages: + if not isinstance(msg, dict): + continue + role = msg.get("role") + content = _flatten_content(msg.get("content")) + + if role == "assistant" and isinstance(msg.get("tool_calls"), list): + serialized = _serialize_tool_calls(msg.get("tool_calls") or []) + if serialized: + content = (content + "\n" + serialized).strip() + + if role == "tool": + tool_name = msg.get("name") + if not isinstance(tool_name, str) or not tool_name: + tcid = msg.get("tool_call_id") + if isinstance(tcid, str): + tool_name = tool_name_by_id.get(tcid, "") + content = f' "tool" : "{tool_name or ""}" "output": "{content}" ' + + if role == "system" and prompt and not injected: + content = (content + "\n\n" + prompt).strip() + injected = True + + out.append({"role": role, "content": content}) + + if prompt and not injected: + out.insert(0, {"role": "system", "content": prompt}) + return out + + +class _ToolTagStreamFilter: + def __init__(self): + self.buf = "" + + def feed(self, text: str) -> str: + if not text: + return "" + s = self.buf + text + self.buf = "" + out: list[str] = [] + open_tag, close_tag = TOOL_TAGS + while s: + start = s.find(open_tag) + if start == -1: + keep = len(open_tag) - 1 + if len(s) > keep: + out.append(s[:-keep] if keep > 0 else s) + self.buf = s[-keep:] if keep > 0 else "" + else: + self.buf = s + break + if start > 0: + out.append(s[:start]) + s = s[start:] + end = s.find(close_tag) + if end == -1: + self.buf = s + break + s = s[end + len(close_tag):] + return "".join(out) + + def finalize(self) -> str: + if not self.buf: + return "" + open_tag, _ = TOOL_TAGS + if open_tag in self.buf: + self.buf = "" + return "" + tail = self.buf + self.buf = "" + return tail + + +def _inject_reasoning_into_response(body: dict) -> dict: + choices = body.get("choices") + if not isinstance(choices, list): + return body + for c in choices: + if not isinstance(c, dict): + continue + msg = c.get("message") + if not isinstance(msg, dict): + continue + reasoning = msg.get("reasoning") + content = msg.get("content", "") + if isinstance(reasoning, str) and reasoning: + content_text = content if isinstance(content, str) else str(content) + msg["content"] = f"\n{reasoning}\n\n{content_text}" + if "reasoning" in msg: + del msg["reasoning"] + return body + + +async def cmd_proxy(args, storage: StorageService) -> int: + device = args.device if args.device else storage.state.last_used_device if not device: error("No device specified or connected") print(f" {C.DIM}Run: truffile connect {C.RESET}") print(f" {C.DIM}Or: truffile proxy --device {C.RESET}") return 1 - - port = args.port if hasattr(args, 'port') else 8080 - host = args.host if hasattr(args, 'host') else "127.0.0.1" - debug = args.debug if hasattr(args, 'debug') else False - - spinner = None - + + spinner = Spinner(f"Resolving {device}.local") + spinner.start() try: - print(f"{MUSHROOM} {C.BOLD}Starting OpenAI proxy{C.RESET}") - print() - - spinner = Spinner(f"Resolving {device}.local") - spinner.start() - - hostname = f"{device}.local" - ip = socket.gethostbyname(hostname) - spinner.stop(success=True) - - grpc_address = f"{ip}:80" - - spinner = Spinner("Connecting to inference service") - spinner.start() - - from truffile.infer.proxy import OpenAIProxy, OpenAIProxyHandler - from http.server import ThreadingHTTPServer - - proxy = OpenAIProxy(grpc_address, include_debug=debug) - - channel = grpc.insecure_channel(grpc_address) - stub = InferenceServiceStub(channel) - model_list = stub.GetModelList(GetModelListRequest(use_filter=False)) - loaded = [m for m in model_list.models if m.state == Model.MODEL_STATE_LOADED] + ip = await resolve_mdns(f"{device}.local") spinner.stop(success=True) - spinner = None - - print(f" {C.DIM}Device: {device} ({ip}){C.RESET}") - print(f" {C.DIM}Models: {len(loaded)} loaded{C.RESET}") - - print() - print(f"{C.GREEN}{CHECK}{C.RESET} Proxy running at {C.BOLD}http://{host}:{port}/v1{C.RESET}") - print() - print(f" {C.DIM}Use with OpenAI SDK:{C.RESET}") - print(f" {C.CYAN}from openai import OpenAI{C.RESET}") - print(f" {C.CYAN}client = OpenAI(base_url=\"http://{host}:{port}/v1\", api_key=\"x\"){C.RESET}") - print() - print(f" {C.DIM}Or set environment variables:{C.RESET}") - print(f" {C.CYAN}export OPENAI_BASE_URL=http://{host}:{port}/v1{C.RESET}") - print(f" {C.CYAN}export OPENAI_API_KEY=anything{C.RESET}") - print() - print(f" {C.DIM}Press Ctrl+C to stop{C.RESET}") - print() - - class _Server(ThreadingHTTPServer): - def __init__(self, server_address, handler_cls): - super().__init__(server_address, handler_cls) - self.proxy = proxy - - server = _Server((host, port), OpenAIProxyHandler) + except RuntimeError: + spinner.fail(f"Could not resolve {device}.local") + return 1 + + target_base = f"http://{ip}" + host = args.host + port = args.port + include_think_tags = not args.no_think_tags + + class ProxyHandler(BaseHTTPRequestHandler): + protocol_version = "HTTP/1.1" + + def log_message(self, _format, *_args): + return + + def _send_json(self, code: int, body: dict): + raw = json.dumps(body).encode("utf-8") + self.send_response(code) + self.send_header("Content-Type", "application/json") + self.send_header("Content-Length", str(len(raw))) + self.end_headers() + self.wfile.write(raw) + + def _map_path(self, path: str) -> str | None: + if path == "/v1/models": + return "/if2/v1/models" + if path == "/v1/chat/completions": + return "/if2/v1/chat/completions" + return None + + def _forward_headers(self) -> dict[str, str]: + out: dict[str, str] = {"Content-Type": "application/json"} + auth = self.headers.get("Authorization") + if auth: + out["Authorization"] = auth + return out + + def do_GET(self): + mapped = self._map_path(self.path) + if not mapped: + self._send_json(404, {"error": {"message": "Not found"}}) + return + + try: + with httpx.Client(timeout=30.0) as client: + resp = client.get(f"{target_base}{mapped}", headers=self._forward_headers()) + self.send_response(resp.status_code) + self.send_header("Content-Type", resp.headers.get("content-type", "application/json")) + self.send_header("Content-Length", str(len(resp.content))) + self.end_headers() + self.wfile.write(resp.content) + except Exception as e: + self._send_json(502, {"error": {"message": f"Upstream GET failed: {e}"}}) + + def do_POST(self): + mapped = self._map_path(self.path) + if not mapped: + self._send_json(404, {"error": {"message": "Not found"}}) + return + + raw_body = b"" + try: + content_len = int(self.headers.get("Content-Length", "0")) + raw_body = self.rfile.read(content_len) if content_len > 0 else b"{}" + body = json.loads(raw_body.decode("utf-8")) + except Exception as e: + self._send_json(400, {"error": {"message": f"Invalid JSON body: {e}"}}) + return + + if mapped == "/if2/v1/chat/completions": + if "reasoning" not in body: + body["reasoning"] = {"enabled": False} + if isinstance(body.get("tools"), list): + messages = body.get("messages", []) + if isinstance(messages, list): + body["messages"] = _massage_messages_for_tools( + messages=messages, + tools_spec=body.get("tools") or [], + tool_choice=body.get("tool_choice"), + ) + # Let proxy map tool tags back to OpenAI tool_calls. + body.pop("tools", None) + body.pop("tool_choice", None) + + stream_mode = bool(body.get("stream")) and mapped == "/if2/v1/chat/completions" + + try: + with httpx.Client(timeout=None) as client: + if stream_mode: + with client.stream( + "POST", + f"{target_base}{mapped}", + headers=self._forward_headers(), + json=body, + ) as resp: + self.send_response(resp.status_code) + self.send_header("Content-Type", "text/event-stream; charset=utf-8") + self.send_header("Cache-Control", "no-cache") + self.send_header("Connection", "keep-alive") + self.end_headers() + + state = {"thinking_open": False} + tool_filter = _ToolTagStreamFilter() + acc_text_parts: list[str] = [] + seen_finish_reason: str | None = None + stream_id = None + created = None + model_name = None + for raw_line in resp.iter_lines(): + line = raw_line if isinstance(raw_line, str) else raw_line.decode("utf-8", errors="replace") + if not line: + self.wfile.write(b"\n") + self.wfile.flush() + continue + if line.startswith("data:"): + payload = line[5:].strip() + if payload == "[DONE]": + clean_tail = tool_filter.finalize() + if clean_tail: + chunk = { + "choices": [{"index": 0, "delta": {"content": clean_tail}, "finish_reason": None}] + } + if stream_id is not None: + chunk["id"] = stream_id + if created is not None: + chunk["created"] = created + if model_name is not None: + chunk["model"] = model_name + out = f"data: {json.dumps(chunk, separators=(',', ':'))}\n\n" + self.wfile.write(out.encode("utf-8")) + + if acc_text_parts: + tool_calls, _clean = _extract_tool_calls_and_clean("".join(acc_text_parts)) + if tool_calls: + tc_list = [] + for i, tc in enumerate(tool_calls): + name = str(tc.get("tool", "")) + args = tc.get("args", {}) + if not isinstance(args, dict): + args = {"_raw": str(args)} + tc_list.append( + { + "id": f"call_{i+1}", + "type": "function", + "index": i, + "function": {"name": name, "arguments": json.dumps(args, separators=(',', ':'))}, + } + ) + tc_chunk = { + "choices": [{"index": 0, "delta": {"tool_calls": tc_list}, "finish_reason": None}] + } + if stream_id is not None: + tc_chunk["id"] = stream_id + if created is not None: + tc_chunk["created"] = created + if model_name is not None: + tc_chunk["model"] = model_name + out = f"data: {json.dumps(tc_chunk, separators=(',', ':'))}\n\n" + self.wfile.write(out.encode("utf-8")) + seen_finish_reason = "tool_calls" + + if seen_finish_reason is None: + fin = { + "choices": [{"index": 0, "delta": {}, "finish_reason": "stop"}] + } + if stream_id is not None: + fin["id"] = stream_id + if created is not None: + fin["created"] = created + if model_name is not None: + fin["model"] = model_name + out = f"data: {json.dumps(fin, separators=(',', ':'))}\n\n" + self.wfile.write(out.encode("utf-8")) + + if include_think_tags and state.get("thinking_open", False): + close_evt = { + "choices": [{"delta": {"content": "\n\n"}, "index": 0}] + } + out = f"data: {json.dumps(close_evt, separators=(',', ':'))}\n\n" + self.wfile.write(out.encode("utf-8")) + self.wfile.write(b"data: [DONE]\n\n") + self.wfile.flush() + break + try: + evt = json.loads(payload) + if stream_id is None and isinstance(evt, dict): + stream_id = evt.get("id") + created = evt.get("created") + model_name = evt.get("model") + if include_think_tags: + evt = _inject_reasoning_into_chunk(evt, state) + else: + # OpenAI-style proxy field for reasoning deltas. + choices = evt.get("choices") + if isinstance(choices, list) and choices: + c0 = choices[0] + if isinstance(c0, dict): + delta = c0.get("delta") + if isinstance(delta, dict) and isinstance(delta.get("reasoning"), str): + delta["reasoning_content"] = delta.pop("reasoning") + choices = evt.get("choices") + if isinstance(choices, list) and choices: + c0 = choices[0] + if isinstance(c0, dict): + fr = c0.get("finish_reason") + mapped_fr = _normalize_finish_reason(fr) if fr is not None else None + if fr is not None: + c0["finish_reason"] = mapped_fr + seen_finish_reason = mapped_fr + delta = c0.get("delta") + if isinstance(delta, dict): + content = delta.get("content") + if isinstance(content, str) and content: + acc_text_parts.append(content) + filtered = tool_filter.feed(content) + if filtered != content: + if filtered: + delta["content"] = filtered + else: + delta.pop("content", None) + out = f"data: {json.dumps(evt, separators=(',', ':'))}\n\n" + except Exception: + out = f"{line}\n\n" + self.wfile.write(out.encode("utf-8")) + else: + self.wfile.write((line + "\n").encode("utf-8")) + self.wfile.flush() + else: + resp = client.post( + f"{target_base}{mapped}", + headers=self._forward_headers(), + json=body, + timeout=120.0, + ) + content = resp.content + if ( + mapped == "/if2/v1/chat/completions" + and include_think_tags + and "application/json" in resp.headers.get("content-type", "") + ): + try: + parsed = json.loads(content.decode("utf-8")) + parsed = _inject_reasoning_into_response(parsed) + choices = parsed.get("choices") + if isinstance(choices, list) and choices: + c0 = choices[0] + if isinstance(c0, dict): + msg = c0.get("message") + if isinstance(msg, dict): + msg_content = msg.get("content") + if isinstance(msg_content, str): + tool_calls, cleaned = _extract_tool_calls_and_clean(msg_content) + if tool_calls: + tc_list = [] + for i, tc in enumerate(tool_calls): + name = str(tc.get("tool", "")) + args = tc.get("args", {}) + if not isinstance(args, dict): + args = {"_raw": str(args)} + tc_list.append( + { + "id": f"call_{i+1}", + "type": "function", + "function": {"name": name, "arguments": json.dumps(args, separators=(',', ':'))}, + } + ) + msg["tool_calls"] = tc_list + msg["content"] = cleaned if cleaned else None + c0["finish_reason"] = "tool_calls" + fr = c0.get("finish_reason") + c0["finish_reason"] = _normalize_finish_reason(fr) if fr is not None else None + usage = parsed.get("usage") + if isinstance(usage, dict): + parsed["usage"] = _normalize_usage_dict(usage) + content = json.dumps(parsed).encode("utf-8") + except Exception: + pass + elif mapped == "/if2/v1/chat/completions" and "application/json" in resp.headers.get("content-type", ""): + try: + parsed = json.loads(content.decode("utf-8")) + choices = parsed.get("choices") + if isinstance(choices, list) and choices: + c0 = choices[0] + if isinstance(c0, dict): + msg = c0.get("message") + if isinstance(msg, dict): + msg_content = msg.get("content") + if isinstance(msg_content, str): + tool_calls, cleaned = _extract_tool_calls_and_clean(msg_content) + if tool_calls: + tc_list = [] + for i, tc in enumerate(tool_calls): + name = str(tc.get("tool", "")) + args = tc.get("args", {}) + if not isinstance(args, dict): + args = {"_raw": str(args)} + tc_list.append( + { + "id": f"call_{i+1}", + "type": "function", + "function": {"name": name, "arguments": json.dumps(args, separators=(',', ':'))}, + } + ) + msg["tool_calls"] = tc_list + msg["content"] = cleaned if cleaned else None + c0["finish_reason"] = "tool_calls" + fr = c0.get("finish_reason") + c0["finish_reason"] = _normalize_finish_reason(fr) if fr is not None else None + usage = parsed.get("usage") + if isinstance(usage, dict): + parsed["usage"] = _normalize_usage_dict(usage) + content = json.dumps(parsed).encode("utf-8") + except Exception: + pass + self.send_response(resp.status_code) + self.send_header("Content-Type", resp.headers.get("content-type", "application/json")) + self.send_header("Content-Length", str(len(content))) + self.end_headers() + self.wfile.write(content) + except Exception as e: + self._send_json(502, {"error": {"message": f"Upstream POST failed: {e}"}}) + + print(f"{MUSHROOM} {C.BOLD}truffile proxy{C.RESET}") + print() + print(f" {C.DIM}Device:{C.RESET} {device} ({ip})") + print(f" {C.DIM}Listen:{C.RESET} http://{host}:{port}") + print(f" {C.DIM}Upstream:{C.RESET} {target_base}/if2/v1/*") + print(f" {C.DIM}Reasoning tags:{C.RESET} {'on' if include_think_tags else 'off'}") + print() + print(f" {C.DIM}OpenAI-compatible base URL:{C.RESET}") + print(f" {C.CYAN}http://{host}:{port}/v1{C.RESET}") + print() + print(f" {C.DIM}Press Ctrl+C to stop{C.RESET}") + print() + + try: + server = ThreadingHTTPServer((host, port), ProxyHandler) server.serve_forever() - except KeyboardInterrupt: - if spinner: - spinner.running = False - sys.stdout.write("\r\033[K") - sys.stdout.flush() print(f"{C.RED}{CROSS} Cancelled{C.RESET}") return 130 - except socket.gaierror: - if spinner: - spinner.fail(f"Could not resolve {device}.local") - else: - error(f"Could not resolve {device}.local") - print(f" {C.DIM}Try: ping {device}.local{C.RESET}") - return 1 except OSError as e: - if spinner: - spinner.fail(str(e)) - else: - error(f"Could not start server: {e}") - print(f" {C.DIM}Port {port} may already be in use{C.RESET}") - return 1 - except Exception as e: - if spinner: - spinner.fail(str(e)) - else: - error(str(e)) + error(f"Could not start proxy: {e}") return 1 - + return 0 @@ -1061,6 +1635,25 @@ class FakeArgs: return 1 +def cmd_validate(args) -> int: + app_dir = Path(args.path).resolve() + if not app_dir.exists() or not app_dir.is_dir(): + error(f"{app_dir} is not a valid directory") + return 1 + + info(f"Validating app in {app_dir.name}") + valid, _config, app_type, warnings, errors = validate_app_dir(app_dir) + for w in warnings: + warn(w) + if not valid: + for e in errors: + error(e) + return 1 + + success(f"Validation passed ({app_type})") + return 0 + + def print_help(): print(f"{MUSHROOM} {C.BOLD}truffile{C.RESET} - TruffleOS SDK") print() @@ -1071,20 +1664,24 @@ def print_help(): print(f" {C.BLUE}connect{C.RESET} Connect to a Truffle device") print(f" {C.BLUE}disconnect{C.RESET} Disconnect and clear credentials") print(f" {C.BLUE}deploy{C.RESET} [path] Deploy an app (reads type from truffile.yaml)") + print(f" {C.BLUE}validate{C.RESET} [path] Validate app config and files") print(f" {C.BLUE}delete{C.RESET} Delete installed apps from device") print(f" {C.BLUE}list{C.RESET} List installed apps or devices") - print(f" {C.BLUE}models{C.RESET} List AI models on connected device") - print(f" {C.BLUE}proxy{C.RESET} Start OpenAI-compatible inference proxy") + print(f" {C.BLUE}models{C.RESET} List models on your Truffle") + print(f" {C.BLUE}chat{C.RESET} [prompt] Chat with any model on your Truffle") + print(f" {C.BLUE}proxy{C.RESET} Run OpenAI-compatible proxy") print() print(f"{C.BOLD}Examples:{C.RESET}") print(f" {C.DIM}truffile scan{C.RESET} {C.DIM}# find devices on network{C.RESET}") print(f" {C.DIM}truffile connect truffle-6272{C.RESET}") print(f" {C.DIM}truffile deploy ./my-app{C.RESET}") + print(f" {C.DIM}truffile deploy --dry-run ./my-app{C.RESET}") print(f" {C.DIM}truffile deploy{C.RESET} {C.DIM}# uses current directory{C.RESET}") + print(f" {C.DIM}truffile validate ./my-app{C.RESET}") print(f" {C.DIM}truffile list apps{C.RESET}") - print(f" {C.DIM}truffile models{C.RESET} {C.DIM}# show loaded models{C.RESET}") - print(f" {C.DIM}truffile proxy{C.RESET} {C.DIM}# start proxy on :8080{C.RESET}") - print(f" {C.DIM}truffile proxy --port 9000{C.RESET}") + print(f" {C.DIM}truffile models{C.RESET} {C.DIM}# show models on your Truffle{C.RESET}") + print(f" {C.DIM}truffile chat \"hello\"{C.RESET} {C.DIM}# run chat completion on your Truffle{C.RESET}") + print(f" {C.DIM}truffile proxy{C.RESET} {C.DIM}# run local /v1 proxy{C.RESET}") print() @@ -1112,6 +1709,10 @@ def main() -> int: p_deploy = subparsers.add_parser("deploy", add_help=False) p_deploy.add_argument("path", nargs="?", default=".") p_deploy.add_argument("-i", "--interactive", action="store_true", help="Interactive terminal mode") + p_deploy.add_argument("--dry-run", action="store_true", help="Show deploy plan without mutating device") + + p_validate = subparsers.add_parser("validate", add_help=False) + p_validate.add_argument("path", nargs="?", default=".") p_delete = subparsers.add_parser("delete", add_help=False) @@ -1119,12 +1720,24 @@ def main() -> int: p_list.add_argument("what", choices=["apps", "devices"], nargs="?") p_models = subparsers.add_parser("models", add_help=False) - + + p_chat = subparsers.add_parser("chat", add_help=False) + p_chat.add_argument("prompt_words", nargs="*", help="Prompt text (alternative to --prompt)") + p_chat.add_argument("-p", "--prompt", help="Prompt text") + p_chat.add_argument("-m", "--model", help="Model id/uuid (default: first model from IF2 list)") + p_chat.add_argument("--system", help="System prompt") + p_chat.add_argument("--reasoning", action="store_true", help="Enable reasoning mode") + p_chat.add_argument("--max-tokens", type=int, help="Max response tokens") + p_chat.add_argument("--temperature", type=float, help="Sampling temperature") + p_chat.add_argument("--top-p", type=float, help="Nucleus sampling top-p") + p_chat.add_argument("--no-stream", action="store_true", help="Disable streaming output") + p_chat.add_argument("--json", action="store_true", help="Print full JSON response (non-stream)") + p_proxy = subparsers.add_parser("proxy", add_help=False) - p_proxy.add_argument("--device", "-d", help="Device name (defaults to last connected)") - p_proxy.add_argument("--port", "-p", type=int, default=8080, help="Port to listen on") - p_proxy.add_argument("--host", default="127.0.0.1", help="Host to bind to") - p_proxy.add_argument("--debug", action="store_true", help="Include reasoning in responses") + p_proxy.add_argument("--device", "-d", help="Device name (default: last connected)") + p_proxy.add_argument("--host", default="127.0.0.1", help="Host to bind") + p_proxy.add_argument("--port", "-p", type=int, default=8080, help="Port to bind") + p_proxy.add_argument("--no-think-tags", action="store_true", help="Do not inject tags") args = parser.parse_args() @@ -1164,8 +1777,12 @@ def main() -> int: return cmd_list(args, storage) elif args.command == "models": return run_async(cmd_models(storage)) + elif args.command == "chat": + return run_async(cmd_chat(args, storage)) elif args.command == "proxy": - return cmd_proxy(args, storage) + return run_async(cmd_proxy(args, storage)) + elif args.command == "validate": + return cmd_validate(args) return 0 diff --git a/truffile/client.py b/truffile/client.py index 1608159..0ad8a67 100644 --- a/truffile/client.py +++ b/truffile/client.py @@ -1,362 +1,17 @@ -import asyncio -import json -import platform -import socket -from dataclasses import dataclass -from pathlib import Path -from typing import AsyncIterator -import grpc -from grpc import aio -import httpx -from google.protobuf import empty_pb2 -from truffle.os.truffleos_pb2_grpc import TruffleOSStub -from truffle.os.builder_pb2 import ( - StartBuildSessionRequest, - StartBuildSessionResponse, - FinishBuildSessionRequest, - FinishBuildSessionResponse, -) -from truffle.os.client_session_pb2 import ( - RegisterNewSessionRequest, - RegisterNewSessionResponse, +"""import surface for transport client APIs.""" + +from truffile.transport.client import ( + ExecResult, NewSessionStatus, + TruffleClient, + UploadResult, + resolve_mdns, ) -from truffle.os.client_metadata_pb2 import ClientMetadata -from truffle.os.app_queries_pb2 import GetAllAppsRequest, GetAllAppsResponse, DeleteAppRequest, DeleteAppResponse -from truffle.app.app_type_pb2 import AppType -from truffle.app.foreground_pb2 import ForegroundApp -from truffle.app.background_pb2 import BackgroundApp, BackgroundAppRuntimePolicy -from truffile.schedule import parse_runtime_policy - - -def get_client_metadata() -> ClientMetadata: - from truffile import __version__ - metadata = ClientMetadata() - metadata.device = platform.node() - metadata.platform = platform.platform() - metadata.version = f"truffile-{__version__}-{platform.python_version()}" - return metadata - - -async def resolve_mdns(hostname: str) -> str: - if ".local" not in hostname: - return hostname - loop = asyncio.get_event_loop() - try: - resolved = await loop.run_in_executor(None, socket.gethostbyname, hostname) - return resolved - except socket.gaierror as e: - raise RuntimeError(f"Failed to resolve {hostname} - is the device on the same network? ({e})") - - -@dataclass -class ExecResult: - exit_code: int - output: list[str] - - -@dataclass -class UploadResult: - path: str - bytes: int - sha256: str - - -class TruffleClient: - def __init__(self, address: str, token: str): - self.address = address - self.token = token - self.channel: aio.Channel | None = None - self.stub: TruffleOSStub | None = None - self.app_uuid: str | None = None - self.access_path: str | None = None - - @property - def http_base(self) -> str | None: - if not self.access_path: - return None - host = self.address if "://" in self.address else f"http://{self.address}" - return f"{host}/containers/{self.access_path}" - - @property - def _metadata(self) -> list: - return [("session", self.token)] - - async def connect(self, timeout: float = 15.0): - self.channel = aio.insecure_channel(self.address) - await asyncio.wait_for(self.channel.channel_ready(), timeout=timeout) - self.stub = TruffleOSStub(self.channel) - - def update_token(self, token: str): - self.token = token - - async def check_auth(self) -> bool: - if not self.stub or not self.token: - return False - try: - await self.stub.System_GetInfo(empty_pb2.Empty(), metadata=self._metadata) - return True - except aio.AioRpcError as e: - if e.code() == grpc.StatusCode.UNAUTHENTICATED: - return False - raise - - async def register_new_session(self, user_id: str) -> tuple[NewSessionStatus, str | None]: - if not self.stub: - raise RuntimeError("not connected") - req = RegisterNewSessionRequest() - req.user_id = user_id - req.metadata.CopyFrom(get_client_metadata()) - resp: RegisterNewSessionResponse = await self.stub.Client_RegisterNewSession(req) - if resp.status.error == NewSessionStatus.NEW_SESSION_SUCCESS: - self.token = resp.token - return resp.status, resp.token - return resp.status, None - - async def get_all_apps(self) -> tuple[list[ForegroundApp], list[BackgroundApp]]: - if not self.stub: - raise RuntimeError("not connected") - req = GetAllAppsRequest() - resp: GetAllAppsResponse = await self.stub.Apps_GetAll(req, metadata=self._metadata) - return list(resp.foreground_apps), list(resp.background_apps) - - async def delete_app(self, app_uuid: str) -> DeleteAppResponse: - if not self.stub: - raise RuntimeError("not connected") - req = DeleteAppRequest() - req.app_uuid = app_uuid - resp: DeleteAppResponse = await self.stub.Apps_DeleteApp(req, metadata=self._metadata) - return resp - - async def start_build(self, app_type: AppType = AppType.APP_TYPE_BACKGROUND) -> StartBuildSessionResponse: - if not self.stub: - raise RuntimeError("not connected") - req = StartBuildSessionRequest() - req.app_type = app_type - resp: StartBuildSessionResponse = await self.stub.Builder_StartBuildSession( - req, metadata=self._metadata - ) - self.app_uuid = resp.app_uuid - self.access_path = resp.access_path - return resp - - async def _sse_events(self, client: httpx.AsyncClient, url: str, body: dict) -> AsyncIterator[tuple[str, str]]: - async with client.stream("POST", url, json=body, timeout=None) as r: - r.raise_for_status() - event = "message" - data_parts = [] - async for raw in r.aiter_lines(): - if raw is None: - continue - line = raw.rstrip("\r") - if line == "": - if data_parts: - yield event, "\n".join(data_parts) - event, data_parts = "message", [] - continue - if line.startswith(":"): - continue - if line.startswith("event:"): - event = line[6:].strip() - elif line.startswith("data:"): - data_parts.append(line[5:].lstrip()) - if data_parts: - yield event, "\n".join(data_parts) - - async def exec(self, cmd: str, cwd: str = "/") -> ExecResult: - if not self.http_base: - raise RuntimeError("no active build session") - url = f"{self.http_base}/exec/stream" - body = {"cmd": ["bash", "-lc", f"cd {cwd} && {cmd}"], "cwd": cwd} - output = [] - exit_code = 0 - retries = 5 - backoff = 1.0 - async with httpx.AsyncClient(timeout=None) as client: - for attempt in range(retries): - try: - async for ev, data in self._sse_events(client, url, body): - if ev == "log": - try: - obj = json.loads(data) - line = obj.get("line", "") - except Exception: - line = data - output.append(line) - elif ev == "exit": - try: - exit_code = int(json.loads(data).get("code", 0)) - except Exception: - pass - return ExecResult(exit_code=exit_code, output=output) - except httpx.HTTPStatusError as e: - if e.response.status_code == 503 and attempt < retries - 1: - await asyncio.sleep(backoff * (attempt + 1)) - continue - raise - return ExecResult(exit_code=exit_code, output=output) - - async def exec_stream(self, cmd: str, cwd: str = "/") -> AsyncIterator[tuple[str, str]]: - if not self.http_base: - raise RuntimeError("no active build session") - url = f"{self.http_base}/exec/stream" - body = {"cmd": ["bash", "-lc", f"cd {cwd} && {cmd}"], "cwd": cwd} - retries = 5 - backoff = 1.0 - async with httpx.AsyncClient(timeout=None) as client: - for attempt in range(retries): - try: - async for ev, data in self._sse_events(client, url, body): - yield ev, data - return - except httpx.HTTPStatusError as e: - if e.response.status_code == 503 and attempt < retries - 1: - await asyncio.sleep(backoff * (attempt + 1)) - continue - raise - - async def upload(self, src: str | Path, dest: str) -> UploadResult: - if not self.http_base: - raise RuntimeError("no active build session") - path = Path(src).expanduser() - if not path.exists() or not path.is_file(): - raise FileNotFoundError(f"no such file: {path}") - url = f"{self.http_base}/upload" - retries = 5 - backoff = 1.0 - async with httpx.AsyncClient(timeout=None) as client: - for attempt in range(retries): - try: - with path.open("rb") as fh: - files = {"file": (path.name, fh)} - r = await client.post(url, params={"path": dest}, files=files) - r.raise_for_status() - data = r.json() - return UploadResult( - path=data.get("path", ""), - bytes=data.get("bytes", 0), - sha256=data.get("sha256", ""), - ) - except httpx.HTTPStatusError as e: - if e.response.status_code == 503 and attempt < retries - 1: - await asyncio.sleep(backoff * (attempt + 1)) - continue - raise - raise RuntimeError("upload failed after retries") - - def _load_icon(self, icon: str | Path | bytes | None) -> bytes | None: - if icon is None: - return None - if isinstance(icon, bytes): - return icon - path = Path(icon).expanduser() - if path.exists() and path.is_file(): - return path.read_bytes() - return None - - async def finish_foreground( - self, - name: str, - cmd: str, - args: list[str], - cwd: str = "/", - env: list[str] | None = None, - description: str = "", - icon: str | Path | bytes | None = None, - ) -> FinishBuildSessionResponse: - if not self.stub or not self.app_uuid: - raise RuntimeError("no active build session") - req = FinishBuildSessionRequest() - req.app_uuid = self.app_uuid - req.discard = False - req.foreground.metadata.name = name - if description: - req.foreground.metadata.description = description - icon_data = self._load_icon(icon) - if icon_data: - req.foreground.metadata.icon.png_data = icon_data - req.process.cmd = cmd - req.process.args.extend(args) - if env: - req.process.env.extend(env) - req.process.cwd = cwd - resp: FinishBuildSessionResponse = await self.stub.Builder_FinishBuildSession( - req, metadata=self._metadata - ) - self.app_uuid = None - self.access_path = None - if resp.HasField("error"): - raise RuntimeError(f"finish failed: {resp.error.error} - {resp.error.details}") - return resp - - async def finish_background( - self, - name: str, - cmd: str, - args: list[str], - cwd: str = "/", - env: list[str] | None = None, - description: str = "", - icon: str | Path | bytes | None = None, - default_schedule: dict | None = None, - ) -> FinishBuildSessionResponse: - if not self.stub or not self.app_uuid: - raise RuntimeError("no active build session") - req = FinishBuildSessionRequest() - req.app_uuid = self.app_uuid - req.discard = False - req.background.metadata.name = name - if description: - req.background.metadata.description = description - icon_data = self._load_icon(icon) - if icon_data: - req.background.metadata.icon.png_data = icon_data - - if default_schedule: - runtime_policy = parse_runtime_policy(default_schedule) - req.background.runtime_policy.CopyFrom(runtime_policy) - else: - req.background.runtime_policy.interval.duration.seconds = 60 - - req.process.cmd = cmd - req.process.args.extend(args) - if env: - req.process.env.extend(env) - req.process.cwd = cwd - resp: FinishBuildSessionResponse = await self.stub.Builder_FinishBuildSession( - req, metadata=self._metadata - ) - self.app_uuid = None - self.access_path = None - if resp.HasField("error"): - raise RuntimeError(f"finish failed: {resp.error.error} - {resp.error.details}") - return resp - - async def discard(self) -> FinishBuildSessionResponse | None: - if not self.stub or not self.app_uuid: - return None - req = FinishBuildSessionRequest() - req.app_uuid = self.app_uuid - req.discard = True - resp: FinishBuildSessionResponse = await self.stub.Builder_FinishBuildSession( - req, metadata=self._metadata - ) - self.app_uuid = None - self.access_path = None - return resp - - async def close(self): - if self.channel: - await self.channel.close() - self.channel = None - self.stub = None - - async def __aenter__(self): - await self.connect() - await self.start_build() - return self - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.discard() - await self.close() - return False +__all__ = [ + "ExecResult", + "NewSessionStatus", + "TruffleClient", + "UploadResult", + "resolve_mdns", +] diff --git a/truffile/deploy/__init__.py b/truffile/deploy/__init__.py new file mode 100644 index 0000000..bc40339 --- /dev/null +++ b/truffile/deploy/__init__.py @@ -0,0 +1,3 @@ +from .builder import build_deploy_plan, deploy_with_builder + +__all__ = ["build_deploy_plan", "deploy_with_builder"] diff --git a/truffile/deploy/builder.py b/truffile/deploy/builder.py new file mode 100644 index 0000000..561ccd4 --- /dev/null +++ b/truffile/deploy/builder.py @@ -0,0 +1,236 @@ +from __future__ import annotations + +import asyncio +import json +from pathlib import Path +from typing import Any, Callable + +from truffile.transport.client import TruffleClient + + +def _normalize_cmd(cmd_list: list[str]) -> tuple[str, list[str]]: + cmd = cmd_list[0] if cmd_list[0].startswith("/") else f"/usr/bin/{cmd_list[0]}" + return cmd, cmd_list[1:] + + +def _env_map_to_list(env_dict: dict[str, str] | None) -> list[str]: + if not env_dict: + return [] + return [f"{k}={v}" for k, v in env_dict.items()] + + +def _bundle_id_from_name(name: str) -> str: + raw = "".join(ch.lower() if ch.isalnum() else "." for ch in name).strip(".") + normalized = ".".join([part for part in raw.split(".") if part]) + return normalized or "truffle.app" + + +def _extract_process(process_cfg: dict[str, Any] | None) -> tuple[str, list[str], str, list[str]]: + proc = process_cfg or {} + cmd_list = list(proc.get("cmd", ["python", "app.py"])) + cmd, args = _normalize_cmd(cmd_list) + cwd = proc.get("working_directory", proc.get("cwd", "/")) + env = _env_map_to_list(proc.get("environment", proc.get("env"))) + return cmd, args, cwd, env + + +def build_deploy_plan( + *, + config: dict[str, Any], + app_dir: Path, + app_type: str, +) -> dict[str, Any]: + meta = config["metadata"] + name = meta["name"] + description = meta.get("description", "") + bundle_id = meta.get("bundle_id") or _bundle_id_from_name(name) + icon_file = meta.get("icon_file") + icon_path = (app_dir / icon_file) if icon_file and (app_dir / icon_file).exists() else None + + fg_cfg = meta.get("foreground") + bg_cfg = meta.get("background") + new_style = isinstance(fg_cfg, dict) or isinstance(bg_cfg, dict) + + if new_style: + has_fg = isinstance(fg_cfg, dict) + has_bg = isinstance(bg_cfg, dict) + else: + has_fg = app_type == "focus" + has_bg = app_type == "ambient" + + if not has_fg and not has_bg: + raise RuntimeError("App must define foreground and/or background process config") + + fg_payload = None + bg_payload = None + exec_cwd = "/" + if has_fg: + fg_process = fg_cfg.get("process") if isinstance(fg_cfg, dict) else meta.get("process") + fg_cmd, fg_args, fg_cwd, fg_env = _extract_process(fg_process) + fg_payload = {"cmd": fg_cmd, "args": fg_args, "cwd": fg_cwd, "env": fg_env} + exec_cwd = fg_cwd + if has_bg: + bg_process = bg_cfg.get("process") if isinstance(bg_cfg, dict) else meta.get("process") + bg_cmd, bg_args, bg_cwd, bg_env = _extract_process(bg_process) + bg_payload = {"cmd": bg_cmd, "args": bg_args, "cwd": bg_cwd, "env": bg_env} + if exec_cwd == "/" and bg_cwd: + exec_cwd = bg_cwd + + if has_fg and has_bg: + finish_label = "foreground+background" + elif has_fg: + finish_label = "foreground" + else: + finish_label = "background" + + default_schedule = None + if isinstance(bg_cfg, dict): + default_schedule = bg_cfg.get("default_schedule") + elif has_bg: + default_schedule = meta.get("default_schedule") + + files_to_upload = [] + for step in config.get("steps", []): + if isinstance(step, dict) and step.get("type") == "files": + files_to_upload.extend(step.get("files", [])) + files_to_upload.extend(config.get("files", [])) + + bash_commands = [] + for step in config.get("steps", []): + if isinstance(step, dict) and step.get("type") == "bash": + bash_commands.append((step.get("name", "bash"), step["run"])) + if config.get("run"): + bash_commands.append(("Install dependencies", config["run"])) + + return { + "name": name, + "description": description, + "bundle_id": bundle_id, + "icon_path": icon_path, + "fg_payload": fg_payload, + "bg_payload": bg_payload, + "exec_cwd": exec_cwd, + "finish_label": finish_label, + "default_schedule": default_schedule, + "files_to_upload": files_to_upload, + "bash_commands": bash_commands, + } + + +async def _wait_for_build_session_ready(client: TruffleClient, timeout_sec: float = 45.0) -> None: + deadline = asyncio.get_event_loop().time() + timeout_sec + last_error: Exception | None = None + while asyncio.get_event_loop().time() < deadline: + try: + result = await client.exec("echo ready", cwd="/") + if result.exit_code == 0: + return + except Exception as e: + last_error = e + await asyncio.sleep(1.0) + if last_error is not None: + raise RuntimeError(f"build session endpoint did not become ready in time: {last_error}") + raise RuntimeError("build session endpoint did not become ready in time") + + +async def deploy_with_builder( + *, + client: TruffleClient, + config: dict[str, Any], + app_dir: Path, + app_type: str, + device: str, + interactive: bool, + spinner_cls: Any, + scrolling_log_cls: Any, + info: Callable[[str], None], + success: Callable[[str], None], + error: Callable[[str], None], + color_dim: str, + color_reset: str, + color_bold: str, + arrow: str, + interactive_shell: Callable[[str], Any], +) -> int: + plan = build_deploy_plan(config=config, app_dir=app_dir, app_type=app_type) + name = plan["name"] + description = plan["description"] + bundle_id = plan["bundle_id"] + icon_path = plan["icon_path"] + fg_payload = plan["fg_payload"] + bg_payload = plan["bg_payload"] + exec_cwd = plan["exec_cwd"] + finish_label = plan["finish_label"] + default_schedule = plan["default_schedule"] + files_to_upload = plan["files_to_upload"] + bash_commands = plan["bash_commands"] + + spinner = spinner_cls(f"Connecting to {device}") + spinner.start() + await client.connect() + spinner.stop(success=True) + + spinner = spinner_cls("Starting build session") + spinner.start() + await client.start_build() + await _wait_for_build_session_ready(client) + spinner.stop(success=True) + print(f" {color_dim}Session: {client.app_uuid}{color_reset}") + + for f in files_to_upload: + src = app_dir / f["source"] + dest = f["destination"] + spinner = spinner_cls(f"Uploading {src.name} {arrow} {dest}") + spinner.start() + result = await client.upload(src, dest) + spinner.stop(success=True) + print(f" {color_dim}{result.bytes} bytes, sha256={result.sha256[:12]}...{color_reset}") + + for step_name, run_cmd in bash_commands: + info(f"Running: {step_name}") + log = scrolling_log_cls(height=6, prefix=" ") + exit_code = 0 + async for ev, data in client.exec_stream(run_cmd, cwd=exec_cwd): + if ev == "log": + try: + import json + obj = json.loads(data) + line = obj.get("line", "") + except Exception: + line = data + log.add(line) + elif ev == "exit": + try: + import json + exit_code = int(json.loads(data).get("code", 0)) + except (ValueError, KeyError): + pass + log.finish() + if exit_code != 0: + error(f"Step '{step_name}' failed with exit code {exit_code}") + raise RuntimeError(f"Step '{step_name}' failed with exit code {exit_code}") + + if interactive: + print() + info("Opening interactive shell (exit with Ctrl+D or 'exit' to finish deploy)") + ws_url = str(client.http_base or "").replace("http://", "ws://").replace("https://", "wss://") + "/term" + await interactive_shell(ws_url) + print() + + spinner = spinner_cls(f"Finishing as {finish_label} app") + spinner.start() + + await client.finish_app( + name=name, + bundle_id=bundle_id, + description=description, + icon=icon_path, + foreground=fg_payload, + background=bg_payload, + default_schedule=default_schedule, + ) + + spinner.stop(success=True) + print() + success(f"Deployed: {color_bold}{name}{color_reset} ({finish_label})") + return 0 diff --git a/truffile/infer/README.md b/truffile/infer/README.md deleted file mode 100644 index f267b20..0000000 --- a/truffile/infer/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Accessing Inference APIs on the Truffle - -The Truffle currently uses its own non-standard set of APIs for inference. - -Provided here is a proxy that both demonstrates the usage of these APIs and allows for easier compatibility with existing clients. - -This is experimental and may not be fully API compatible, but should serve as a good starting point for exploring the Truffle while core software improves. - -### Usage - -```bash -truffleinferproxy --truffle truffle-5970 --host 127.0.0.1 --port 8080 - -truffleinferproxy --help -``` - - - diff --git a/truffile/infer/__init__.py b/truffile/infer/__init__.py deleted file mode 100644 index d9b5eaa..0000000 --- a/truffile/infer/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Standalone OpenAI-compatible proxy for Truffle gRPC inference.""" diff --git a/truffile/infer/common.py b/truffile/infer/common.py deleted file mode 100644 index 53537dd..0000000 --- a/truffile/infer/common.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import annotations - -THINK_TAGS = ["", ""] - - -def clean_response(response: str) -> str: - return response.strip().replace("οΏ½", "") diff --git a/truffile/infer/prompts.py b/truffile/infer/prompts.py deleted file mode 100644 index 56daf62..0000000 --- a/truffile/infer/prompts.py +++ /dev/null @@ -1,100 +0,0 @@ -from __future__ import annotations - -from typing import List, Tuple -import json -import re - -from truffle.infer.gencfg_pb2 import ResponseFormat - -from .common import THINK_TAGS -from .tooling import Tool - - -TOOL_TAGS = ["", ""] -tool_tag_pattern = re.compile(f"{TOOL_TAGS[0]}(.*?){TOOL_TAGS[1]}", re.DOTALL) - - -class AgentPromptBuilder: - def extract_tool_calls(self, response: str) -> Tuple[List[dict], str]: - tool_calls: List[dict] = [] - matches = tool_tag_pattern.findall(response) - if not matches: - return tool_calls, response - for match in matches: - try: - tool_call = json.loads(match.strip()) - tool_calls.append(tool_call) - except json.JSONDecodeError: - continue - clean_response = tool_tag_pattern.sub("", response).strip() - return tool_calls, clean_response - - -def _build_tool_call_response_format_non_reasoning( - req, available_tools: List[Tool], allow_parallel: bool = False -) -> None: - def get_tag_for_tool(tool: Tool) -> dict: - begin = f"{TOOL_TAGS[0]}\n" + '{"tool": ' + f'"{tool.name}", "args": ' - end = "}" + f"{TOOL_TAGS[1]}\n" - return { - "begin": begin, - "content": {"type": "json_schema", "json_schema": tool.input_schema}, - "end": end, - } - - structural_tag = { - "type": "structural_tag", - "format": { - "type": "triggered_tags", - "triggers": [TOOL_TAGS[0]], - "tags": [get_tag_for_tool(tool) for tool in available_tools], - "stop_after_first": not allow_parallel, - }, - } - try: - fmt = ResponseFormat.STRUCTURAL_TAG - except AttributeError: - # older proto or server; fall back to prompt-only tool guidance. - return - req.cfg.response_format.format = fmt - req.cfg.response_format.schema = json.dumps(structural_tag, indent=0) - - -def _build_tool_call_response_format( - req, available_tools: List[Tool], allow_parallel: bool = False -) -> None: - def get_tag_for_tool(tool: Tool) -> dict: - begin = f"{TOOL_TAGS[0]}\n" + '{"tool": ' + f'"{tool.name}", "args": ' - end = "}" + f"{TOOL_TAGS[1]}\n" - return { - "begin": begin, - "content": {"type": "json_schema", "json_schema": tool.input_schema}, - "end": end, - } - structural_tag = { - "type": "structural_tag", - "format": { - "type": "sequence", - "elements": [ - { - "type": "tag", - "begin": "", - "content": {"type": "any_text"}, - "end": THINK_TAGS[1], - }, - { - "type": "triggered_tags", - "triggers": [TOOL_TAGS[0]], - "tags": [get_tag_for_tool(tool) for tool in available_tools], - "stop_after_first": not allow_parallel, - }, - ], - }, - } - try: - fmt = ResponseFormat.STRUCTURAL_TAG - except AttributeError: - # older proto or server; fall back to prompt-only tool guidance. - return - req.cfg.response_format.format = fmt - req.cfg.response_format.schema = json.dumps(structural_tag, indent=0) diff --git a/truffile/infer/proxy.py b/truffile/infer/proxy.py deleted file mode 100644 index 262d1f1..0000000 --- a/truffile/infer/proxy.py +++ /dev/null @@ -1,756 +0,0 @@ -#!/usr/bin/env python3 -"""Minimal OpenAI-compatible /v1/chat/completions proxy for Truffle gRPC inference.""" - -from __future__ import annotations - -import argparse -import json -import threading -import time -import uuid -import os -from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer -from typing import Any, Dict, List, Optional, Tuple - -import grpc - -from .common import THINK_TAGS, clean_response -from .prompts import ( - TOOL_TAGS, - AgentPromptBuilder, - _build_tool_call_response_format, - _build_tool_call_response_format_non_reasoning, -) -from .tooling import Tool - -from truffle.infer.convo.conversation_pb2 import Conversation, Message -from truffle.infer.finishreason_pb2 import FinishReason -from truffle.infer.gencfg_pb2 import ResponseFormat -from truffle.infer.irequest_pb2 import IRequest -from truffle.infer.infer_pb2_grpc import InferenceServiceStub -from truffle.infer.model_pb2 import GetModelListRequest, Model - - -_MODEL_LOCK = threading.Lock() -_MODEL_CACHE: Dict[str, Model] = {} -_MODEL_LIST: List[Model] = [] - - -def _now_ts() -> int: - return int(time.time()) - - -def _gen_id(prefix: str) -> str: - return f"{prefix}-{uuid.uuid4().hex}" - - -def _load_models(stub: InferenceServiceStub) -> None: - global _MODEL_CACHE, _MODEL_LIST - model_list = stub.GetModelList(GetModelListRequest(use_filter=False)) - models = [m for m in model_list.models if m.state == Model.MODEL_STATE_LOADED] - cache: Dict[str, Model] = {} - for m in models: - cache[m.uuid] = m - cache[m.name.lower()] = m - _MODEL_LIST = models - _MODEL_CACHE = cache - - -def _get_models(stub: InferenceServiceStub) -> List[Model]: - with _MODEL_LOCK: - if not _MODEL_LIST: - _load_models(stub) - return list(_MODEL_LIST) - - -def _resolve_model(stub: InferenceServiceStub, model_str: Optional[str]) -> Tuple[Model, bool]: - models = _get_models(stub) - model_key = (model_str or "").strip() - if model_key and model_key.lower() not in {"auto", "default"}: - with _MODEL_LOCK: - m = _MODEL_CACHE.get(model_key) or _MODEL_CACHE.get(model_key.lower()) - if m is not None: - return m, bool(m.config.info.has_chain_of_thought) - for m in models: - if m.config.info.has_chain_of_thought: - return m, True - if not models: - raise RuntimeError("No loaded models available") - return models[0], bool(models[0].config.info.has_chain_of_thought) - - -def _flatten_content(content: Any) -> str: - if content is None: - return "" - if isinstance(content, str): - return content - if isinstance(content, list): - parts: List[str] = [] - for p in content: - if isinstance(p, dict) and p.get("type") == "text": - parts.append(p.get("text") or "") - return "".join(parts) - return str(content) - - -def _build_tool_list(tools_spec: List[Dict[str, Any]]) -> List[Tool]: - tools: List[Tool] = [] - for t in tools_spec: - if t.get("type") != "function": - continue - fn = t.get("function", {}) - name = fn.get("name") - if not name: - continue - tools.append( - Tool( - name=name, - description=fn.get("description") or "", - input_schema=fn.get("parameters") or {"type": "object"}, - display_name=name, - ) - ) - return tools - - -def _tool_system_prompt(tools: List[Tool]) -> str: - tool_desc = "\n".join([t.get_for_system_prompt() for t in tools]) - return ( - "You have access to the following tools:\n" - f"{tool_desc}\n" - f"When you decide to use a tool, respond with a JSON object enclosed by {TOOL_TAGS[0]} and {TOOL_TAGS[1]} tags in this format:\n" - f"{TOOL_TAGS[0]}\n{{\n \"tool\": \"\",\n \"args\": {{}}\n}}\n{TOOL_TAGS[1]}\n" - "Only use tools listed above, and ensure your JSON is valid." - ) - - -def _apply_tool_prompt(messages: List[Dict[str, Any]], prompt: str) -> None: - for msg in messages: - if msg.get("role") == "system": - content = _flatten_content(msg.get("content")) - msg["content"] = content + "\n\n" + prompt - return - messages.insert(0, {"role": "system", "content": prompt}) - - -def _serialize_tool_calls(tool_calls: List[Dict[str, Any]]) -> str: - chunks: List[str] = [] - for tc in tool_calls: - if tc.get("type") != "function": - continue - fn = tc.get("function", {}) - name = fn.get("name") - args_raw = fn.get("arguments") - args: Any - if isinstance(args_raw, str): - try: - args = json.loads(args_raw) - except json.JSONDecodeError: - args = {"_raw": args_raw} - else: - args = args_raw or {} - payload = {"tool": name, "args": args} - chunks.append(f"{TOOL_TAGS[0]}\n{json.dumps(payload)}\n{TOOL_TAGS[1]}") - return "\n".join(chunks) - - -def _build_conversation(messages: List[Dict[str, Any]]) -> Conversation: - convo = Conversation() - tool_name_by_id: Dict[str, str] = {} - for msg in messages: - if msg.get("role") == "assistant": - for tc in msg.get("tool_calls", []) or []: - tc_id = tc.get("id") - fn = (tc.get("function") or {}) - if tc_id and fn.get("name"): - tool_name_by_id[tc_id] = fn["name"] - - for msg in messages: - role = msg.get("role") - content = _flatten_content(msg.get("content")) - if role == "assistant" and msg.get("tool_calls"): - tool_blob = _serialize_tool_calls(msg.get("tool_calls") or []) - content = (content + "\n" + tool_blob).strip() - elif role == "tool": - tool_name = msg.get("name") or tool_name_by_id.get(msg.get("tool_call_id"), "") - content = f" \"tool\" : \"{tool_name}\" \"output\": \"{content}\" " - - if role == "system": - convo.messages.add(role=Message.ROLE_SYSTEM, content=content) - elif role == "user": - convo.messages.add(role=Message.ROLE_USER, content=content) - elif role == "assistant": - convo.messages.add(role=Message.ROLE_ASSISTANT, content=content) - elif role == "tool": - convo.messages.add(role=Message.ROLE_TOOL, content=content) - - return convo - - -def _safe_parse_cot(raw: str) -> Tuple[str, str]: - if THINK_TAGS[1] in raw: - pre, post = raw.split(THINK_TAGS[1], 1) - cot = pre.replace(THINK_TAGS[0], "").replace(THINK_TAGS[1], "").strip() - return cot, post - return "", raw - - -def _map_finish_reason(fr: Optional[int]) -> Optional[str]: - if fr is None: - return None - if fr == FinishReason.FINISH_STOP: - return "stop" - if fr == FinishReason.FINISH_LENGTH: - return "length" - if fr == FinishReason.FINISH_TOOLCALLS: - return "tool_calls" - return "stop" - - -def _usage_to_openai(usage: Any) -> Dict[str, int]: - tokens = getattr(usage, "tokens", None) - if tokens is None: - return {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0} - prompt = int(getattr(tokens, "prompt", 0)) - completion = int(getattr(tokens, "completion", 0)) - return { - "prompt_tokens": prompt, - "completion_tokens": completion, - "total_tokens": prompt + completion, - } - - -def _set_structural_tag(req: IRequest, structural_tag: Dict[str, Any]) -> bool: - try: - fmt = ResponseFormat.STRUCTURAL_TAG - except AttributeError: - # older proto or server; fall back to prompt-only constraints. - return False - req.cfg.response_format.format = fmt - req.cfg.response_format.schema = json.dumps(structural_tag, indent=0) - return True - - -class _StreamFilter: - """Streaming filter that separates visible content, reasoning, and toolcall tags. - - ``feed()`` and ``finalize()`` return a ``(visible, reasoning)`` tuple so - that callers can emit ``delta.reasoning_content`` alongside ``delta.content`` - in OpenAI-compatible SSE chunks (matching DeepSeek / OpenAI convention). - """ - - def __init__(self, hide_cot: bool = False) -> None: - self._buffer = "" - self._mode = "normal" # normal | think | toolcall - self._max_tag = max(len(""), len(""), len(""), len("")) - self._passed_cot = not hide_cot - self._cot_open_stripped = False # whether we've consumed the opening in phase 1 - - def finalize(self) -> Tuple[str, str]: - """Flush remaining buffer. Returns ``(visible, reasoning)``.""" - if not self._passed_cot: - # Stream ended mid-thinking (never saw ). - reasoning = self._buffer - self._buffer = "" - return "", reasoning - if self._mode == "think": - reasoning = self._buffer - self._buffer = "" - return "", reasoning - if self._mode == "toolcall": - self._buffer = "" - return "", "" - tail = self._buffer - self._buffer = "" - return tail, "" - - def feed(self, chunk: str) -> Tuple[str, str]: - """Process *chunk* and return ``(visible, reasoning)`` text.""" - if not chunk: - return "", "" - buf = self._buffer + chunk - reasoning_parts: List[str] = [] - - # Phase 1: skip initial CoT block for reasoner models, capturing it. - if not self._passed_cot: - # Strip opening tag once, before emitting any reasoning. - if not self._cot_open_stripped: - tag_pos = buf.find("") - if tag_pos != -1: - buf = buf[tag_pos + len(""):] - buf = buf.lstrip("\n") # drop leading newline after - self._cot_open_stripped = True - else: - # Haven't seen the full opening tag yet β€” could be split. - # Keep buffering without emitting anything as reasoning. - keep = len("") - 1 - self._buffer = buf[-keep:] if keep > 0 else "" - return "", "" - - end = buf.find("") - if end == -1: - keep = len("") - 1 - # Everything except the safety buffer is reasoning. - if len(buf) > keep: - text = buf[:-keep] if keep > 0 else buf - reasoning_parts.append(text) - self._buffer = buf[-keep:] if keep > 0 else "" - return "", "".join(reasoning_parts) - # Capture everything before as reasoning. - cot_text = buf[:end] - if cot_text: - reasoning_parts.append(cot_text) - buf = buf[end + len(""):] - self._passed_cot = True - - # Phase 2: state-machine pass over visible / think / toolcall segments. - out_parts: List[str] = [] - while buf: - if self._mode == "think": - end = buf.find("") - if end == -1: - keep = self._max_tag - 1 - if len(buf) > keep: - reasoning_parts.append(buf[:-keep]) - self._buffer = buf[-keep:] - return "".join(out_parts), "".join(reasoning_parts) - reasoning_parts.append(buf[:end]) - buf = buf[end + len(""):] - self._mode = "normal" - continue - - if self._mode == "toolcall": - end = buf.find("") - if end == -1: - self._buffer = buf[-(self._max_tag - 1):] - return "".join(out_parts), "".join(reasoning_parts) - buf = buf[end + len(""):] - self._mode = "normal" - continue - - next_think = buf.find("") - next_tool = buf.find("") - if next_think == -1 and next_tool == -1: - if len(buf) >= self._max_tag: - out_parts.append(buf[:-(self._max_tag - 1)]) - self._buffer = buf[-(self._max_tag - 1):] - else: - self._buffer = buf - return "".join(out_parts), "".join(reasoning_parts) - - if next_think == -1 or (next_tool != -1 and next_tool < next_think): - if next_tool > 0: - out_parts.append(buf[:next_tool]) - buf = buf[next_tool + len(""):] - self._mode = "toolcall" - continue - - if next_think > 0: - out_parts.append(buf[:next_think]) - buf = buf[next_think + len(""):] - self._mode = "think" - - self._buffer = "" - return "".join(out_parts), "".join(reasoning_parts) - - -class OpenAIProxy: - def __init__(self, grpc_address: str, include_debug: bool = False) -> None: - self.grpc_address = grpc_address - self.include_debug = include_debug - self.channel = grpc.insecure_channel(grpc_address) - self.stub = InferenceServiceStub(self.channel) - self.prompt_builder = AgentPromptBuilder() - - def build_request(self, payload: Dict[str, Any]) -> Tuple[IRequest, Model, bool, List[Tool], bool]: - model_name = payload.get("model") - model, is_reasoner = _resolve_model(self.stub, model_name) - - messages = list(payload.get("messages") or []) - tools_spec = list(payload.get("tools") or []) - tool_choice = payload.get("tool_choice") - tool_choice_name = None - if isinstance(tool_choice, dict): - fn = tool_choice.get("function") or {} - tool_choice_name = fn.get("name") - allow_tools = tool_choice != "none" - - tools = _build_tool_list(tools_spec) if allow_tools else [] - if tool_choice_name: - tools = [t for t in tools if t.name == tool_choice_name] - - if tools: - _apply_tool_prompt(messages, _tool_system_prompt(tools)) - - convo = _build_conversation(messages) - convo.model_uuid = model.uuid - - req = IRequest() - req.id = _gen_id("openai-proxy") - req.model_uuid = model.uuid - req.convo.CopyFrom(convo) - - if payload.get("max_tokens", 0) > 0: - req.cfg.max_tokens = int(payload["max_tokens"]) - else: - req.cfg.max_tokens = 16384 - if payload.get("temperature") is not None: - req.cfg.temp = float(payload["temperature"]) - if payload.get("top_p") is not None: - req.cfg.top_p = float(payload["top_p"]) - - response_format = payload.get("response_format") or {"type": "text"} - rf_type = response_format.get("type") if isinstance(response_format, dict) else "text" - - if tools: - if is_reasoner: - _build_tool_call_response_format(req, tools) - else: - _build_tool_call_response_format_non_reasoning(req, tools) - elif rf_type in {"json_schema", "json_object"}: - if rf_type == "json_schema": - schema = response_format.get("json_schema") - else: - schema = {"type": "object"} - if is_reasoner: - structural_tag = { - "type": "structural_tag", - "format": { - "type": "sequence", - "elements": [ - { - "type": "tag", - "begin": "", - "content": {"type": "any_text"}, - "end": THINK_TAGS[1], - }, - { - "type": "tag", - "begin": "", - "content": {"type": "json_schema", "json_schema": schema}, - "end": "", - }, - ], - }, - } - _set_structural_tag(req, structural_tag) - else: - req.cfg.response_format.format = ResponseFormat.JSON - req.cfg.response_format.schema = json.dumps(schema) - - stream = bool(payload.get("stream")) - return req, model, is_reasoner, tools, stream - - def run_sync(self, req: IRequest) -> Any: - return self.stub.GenerateSync(req) - - def run_stream(self, req: IRequest): - return self.stub.Generate(req) - - -class OpenAIProxyHandler(BaseHTTPRequestHandler): - server_version = "TruffleOpenAIProxy/0.1" - def _set_cors_headers(self) -> None: - self.send_header("Access-Control-Allow-Origin", "*") - self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") - self.send_header("Access-Control-Allow-Headers", "Content-Type, Authorization") - - def _send_json(self, status: int, payload: Dict[str, Any]) -> None: - data = json.dumps(payload).encode("utf-8") - self.send_response(status) - self._set_cors_headers() - self.send_header("Content-Type", "application/json") - self.send_header("Content-Length", str(len(data))) - self.end_headers() - self.wfile.write(data) - def do_OPTIONS(self) -> None: - self.send_response(204) - self._set_cors_headers() - self.end_headers() - - def _read_body(self) -> Dict[str, Any]: - length = int(self.headers.get("Content-Length", "0")) - if length <= 0: - return {} - raw = self.rfile.read(length) - return json.loads(raw.decode("utf-8")) - - def _send_sse(self, payload: Dict[str, Any]) -> bool: - data = json.dumps(payload) - try: - self.wfile.write(f"data: {data}\n\n".encode("utf-8")) - self.wfile.flush() - except (BrokenPipeError, ConnectionResetError, OSError): - # Client disconnected; stop streaming gracefully. - self.close_connection = True - return False - return True - - def do_GET(self) -> None: - if self.path == "/health": - self._send_json(200, {"status": "ok"}) - return - if self.path in {"/v1/models", "/models"}: - proxy: OpenAIProxy = self.server.proxy # type: ignore[attr-defined] - models = _get_models(proxy.stub) - data = [ - {"id": m.uuid, "object": "model", "owned_by": m.provider or "truffle", "name": m.name} - for m in models - ] - self._send_json(200, {"object": "list", "data": data}) - return - if self.path.startswith("/v1/models/"): - proxy: OpenAIProxy = self.server.proxy # type: ignore[attr-defined] - model_id = self.path.split("/v1/models/", 1)[1] - models = _get_models(proxy.stub) - model = next((m for m in models if m.uuid == model_id or m.name == model_id), None) - if model is None: - self._send_json(404, {"error": {"message": "model not found", "type": "not_found_error"}}) - return - self._send_json( - 200, - { - "id": model.uuid, - "object": "model", - "owned_by": model.provider or "truffle", - "name": model.name, - }, - ) - return - self.send_error(404, "Not Found") - - def do_POST(self) -> None: - if self.path != "/v1/chat/completions": - self.send_error(404, "Not Found") - return - try: - payload = self._read_body() - except Exception as e: - print(f"\tError reading request body: {e}") - self._send_json(400, {"error": {"message": str(e), "type": "invalid_request_error"}}) - return - - proxy: OpenAIProxy = self.server.proxy # type: ignore[attr-defined] - - try: - req, model, is_reasoner, _tools, stream = proxy.build_request(payload) - except Exception as e: - print(f"\tError building request: {e}") - self._send_json(400, {"error": {"message": str(e), "type": "invalid_request_error"}}) - return - - if stream: - self.send_response(200) - self._set_cors_headers() - self.send_header("Content-Type", "text/event-stream; charset=utf-8") - self.send_header("Cache-Control", "no-cache, no-transform") - self.send_header("Connection", "keep-alive") - self.send_header("X-Accel-Buffering", "no") - self.end_headers() - - stream_id = _gen_id("chatcmpl") - created = _now_ts() - if not self._send_sse( - { - "id": stream_id, - "object": "chat.completion.chunk", - "created": created, - "model": model.name, - "choices": [ - {"index": 0, "delta": {"role": "assistant"}, "finish_reason": None} - ], - } - ): - return - - raw_content = "" - last_finish = None - filter_state = _StreamFilter(hide_cot=is_reasoner) - log_output = os.getenv("TRUFFLE_PROXY_LOG_STREAM_OUTPUT", "0") == "1" - if log_output: - print("Streaming output:") - for ir in proxy.run_stream(req): - raw_content += ir.content - if log_output: - print(ir.content, end="", flush=True) - if ir.HasField("finish_reason") and ir.finish_reason != FinishReason.FINISH_UNSPECIFIED: - last_finish = ir.finish_reason - visible, reasoning = filter_state.feed(ir.content) - delta: Dict[str, Any] = {} - if visible: - delta["content"] = visible - if reasoning: - delta["reasoning_content"] = reasoning - if delta: - if not self._send_sse( - { - "id": stream_id, - "object": "chat.completion.chunk", - "created": created, - "model": model.name, - "choices": [ - { - "index": 0, - "delta": delta, - "finish_reason": None, - } - ], - } - ): - return - tail, tail_reasoning = filter_state.finalize() - tail_delta: Dict[str, Any] = {} - if tail: - tail_delta["content"] = tail - if tail_reasoning: - tail_delta["reasoning_content"] = tail_reasoning - if tail_delta: - if not self._send_sse( - { - "id": stream_id, - "object": "chat.completion.chunk", - "created": created, - "model": model.name, - "choices": [ - { - "index": 0, - "delta": tail_delta, - "finish_reason": None, - } - ], - } - ): - return - _cot, after_cot = _safe_parse_cot(raw_content) - tool_calls, _clean = proxy.prompt_builder.extract_tool_calls(after_cot) - if tool_calls: - tc_list = [] - for i, tc in enumerate(tool_calls): - name = tc.get("tool") or "" - args = json.dumps(tc.get("args") or {}, separators=(",", ":")) - tc_list.append( - { - "id": f"call_{i+1}", - "type": "function", - "index": i, - "function": {"name": name, "arguments": args}, - } - ) - if not self._send_sse( - { - "id": stream_id, - "object": "chat.completion.chunk", - "created": created, - "model": model.name, - "choices": [ - { - "index": 0, - "delta": {"tool_calls": tc_list}, - "finish_reason": None, - } - ], - } - ): - return - finish_reason = _map_finish_reason(last_finish) or "stop" - if not self._send_sse( - { - "id": stream_id, - "object": "chat.completion.chunk", - "created": created, - "model": model.name, - "choices": [ - {"index": 0, "delta": {}, "finish_reason": finish_reason} - ], - } - ): - return - try: - self.wfile.write(b"data: [DONE]\n\n") - self.wfile.flush() - except (BrokenPipeError, ConnectionResetError, OSError): - self.close_connection = True - else: - self.close_connection = True - return - - resp = proxy.run_sync(req) - raw = resp.content - cot, after_cot = _safe_parse_cot(raw) - tool_calls, clean = proxy.prompt_builder.extract_tool_calls(after_cot) - message = clean_response(clean) - - finish_reason = _map_finish_reason(resp.finish_reason if resp.HasField("finish_reason") else None) - openai_tool_calls = [] - for i, tc in enumerate(tool_calls): - name = tc.get("tool") or "" - args = json.dumps(tc.get("args") or {}, separators=(",", ":")) - openai_tool_calls.append( - { - "id": f"call_{i+1}", - "type": "function", - "function": {"name": name, "arguments": args}, - } - ) - - msg: Dict[str, Any] = {"role": "assistant", "content": message} - if cot: - msg["reasoning_content"] = cot - if openai_tool_calls: - msg["tool_calls"] = openai_tool_calls - if not message: - msg["content"] = None - - response = { - "id": _gen_id("chatcmpl"), - "object": "chat.completion", - "created": _now_ts(), - "model": model.name, - "choices": [ - {"index": 0, "message": msg, "finish_reason": finish_reason} - ], - "usage": _usage_to_openai(resp.usage if resp.HasField("usage") else None), - } - - debug_req = bool(payload.get("debug") or payload.get("debug_reasoning")) - if proxy.include_debug or debug_req: - response["debug"] = {"reasoning": cot} - - self._send_json(200, response) - -def normalize_grpc_address(address: str, default_port : int = 80) -> str: - import socket - if '.local' in address: - try: - ip = socket.gethostbyname(address) - address = ip - except socket.gaierror as e: - raise RuntimeError(f"Failed to resolve mDNS address {address}: {e}") - if ':' not in address: - address += f":{default_port}" - return address - -def main() -> None: - parser = argparse.ArgumentParser(description="OpenAI-compatible proxy for Truffle gRPC inference") - parser.add_argument("--truffle", default="truffle-1234", help="truffle id: e.g. truffle-1234") - parser.add_argument("--host", default="127.0.0.1", help="HTTP host") - parser.add_argument("--port", type=int, default=8080, help="HTTP port") - parser.add_argument("--debug", action="store_true", help="Include debug.reasoning in responses") - args = parser.parse_args() - print(f"Connecting to {args.truffle}") - grpc_address = normalize_grpc_address(f"{args.truffle}.local", default_port=80) - print(f"Found {args.truffle} at {grpc_address}") - proxy = OpenAIProxy(grpc_address, include_debug=args.debug) - - class _Server(ThreadingHTTPServer): - def __init__(self, server_address, handler_cls): - super().__init__(server_address, handler_cls) - self.proxy = proxy - - server = _Server((args.host, args.port), OpenAIProxyHandler) - print(f"OpenAI proxy listening on http://{args.host}:{args.port} -> Truffle @ {grpc_address}") - server.serve_forever() - - -if __name__ == "__main__": - main() diff --git a/truffile/infer/tooling.py b/truffile/infer/tooling.py deleted file mode 100644 index 8d13889..0000000 --- a/truffile/infer/tooling.py +++ /dev/null @@ -1,19 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -from typing import Dict -import json - - -@dataclass -class Tool: - name: str - description: str - input_schema: Dict - display_name: str - - def schema_str(self, indent: int = 2) -> str: - return json.dumps(self.input_schema, indent=indent) - - def get_for_system_prompt(self) -> str: - return f"{self.name}: {self.description}\nArg Schema: {self.schema_str(indent=2)}" diff --git a/truffile/schema/__init__.py b/truffile/schema/__init__.py new file mode 100644 index 0000000..278b425 --- /dev/null +++ b/truffile/schema/__init__.py @@ -0,0 +1,4 @@ +from .app_config import validate_app_dir +from .runtime_policy import parse_runtime_policy + +__all__ = ["validate_app_dir", "parse_runtime_policy"] diff --git a/truffile/schema/app_config.py b/truffile/schema/app_config.py new file mode 100644 index 0000000..29bdd3e --- /dev/null +++ b/truffile/schema/app_config.py @@ -0,0 +1,178 @@ +from __future__ import annotations + +import ast +import re +from pathlib import Path +from typing import Any + +import yaml + + +def _check_python_syntax(file_path: Path) -> tuple[bool, str]: + try: + source = file_path.read_text(encoding="utf-8") + ast.parse(source) + return True, "" + except SyntaxError as e: + return False, f"Line {e.lineno}: {e.msg}" + + +_ENV_KEY_RE = re.compile(r"^[A-Za-z_][A-Za-z0-9_]*$") + + +def _validate_process_cfg( + process: Any, + *, + path: str, + warnings: list[str], + errors: list[str], +) -> None: + if not isinstance(process, dict): + errors.append(f"{path} must be an object") + return + + cmd = process.get("cmd") + if not isinstance(cmd, list) or len(cmd) == 0: + errors.append(f"{path}.cmd must be a non-empty list") + elif not all(isinstance(v, str) and v.strip() for v in cmd): + errors.append(f"{path}.cmd must be list[str] with non-empty values") + + for key in ("working_directory", "cwd"): + if key in process and not isinstance(process.get(key), str): + errors.append(f"{path}.{key} must be a string") + + env_obj = process.get("environment", process.get("env")) + if env_obj is None: + return + if not isinstance(env_obj, dict): + errors.append(f"{path}.environment must be a map") + return + for k, v in env_obj.items(): + if not isinstance(k, str): + errors.append(f"{path}.environment keys must be strings") + continue + if not _ENV_KEY_RE.match(k): + warnings.append(f"{path}.environment key '{k}' is non-standard") + if not isinstance(v, str): + errors.append(f"{path}.environment['{k}'] must be a string") + + +def validate_app_dir(app_dir: Path) -> tuple[bool, dict[str, Any] | None, str | None, list[str], list[str]]: + """Validate app directory and return (valid, config, app_type, warnings, errors).""" + warnings: list[str] = [] + errors: list[str] = [] + + truffile_path = app_dir / "truffile.yaml" + if not truffile_path.exists(): + errors.append(f"No truffile.yaml found in {app_dir}") + return False, None, None, warnings, errors + + try: + config = yaml.safe_load(truffile_path.read_text(encoding="utf-8")) + except yaml.YAMLError as e: + errors.append(f"Invalid truffile.yaml: {e}") + return False, None, None, warnings, errors + + if not isinstance(config, dict): + errors.append("truffile.yaml root must be a mapping") + return False, None, None, warnings, errors + + meta = config.get("metadata", {}) + if not isinstance(meta, dict): + errors.append("metadata must be a mapping") + return False, None, None, warnings, errors + + if not meta.get("name"): + errors.append("metadata.name is required in truffile.yaml") + return False, None, None, warnings, errors + + fg_cfg = meta.get("foreground") + bg_cfg = meta.get("background") + has_fg_cfg = isinstance(fg_cfg, dict) + has_bg_cfg = isinstance(bg_cfg, dict) + if has_fg_cfg or has_bg_cfg: + if has_fg_cfg and has_bg_cfg: + app_type = "hybrid" + elif has_fg_cfg: + app_type = "focus" + else: + app_type = "ambient" + else: + cfg_type = str(meta.get("type", "")).lower().strip() + if cfg_type in ("background", "ambient"): + app_type = "ambient" + elif cfg_type in ("foreground", "focus"): + app_type = "focus" + else: + app_type = "focus" + warnings.append("No type specified in truffile.yaml, defaulting to focus") + + if "bundle_id" not in meta: + warnings.append("No metadata.bundle_id specified; using derived default from metadata.name") + + if has_fg_cfg: + process = fg_cfg.get("process") + _validate_process_cfg( + process, + path="metadata.foreground.process", + warnings=warnings, + errors=errors, + ) + + if has_bg_cfg: + process = bg_cfg.get("process") + _validate_process_cfg( + process, + path="metadata.background.process", + warnings=warnings, + errors=errors, + ) + if not isinstance(bg_cfg.get("default_schedule"), dict): + errors.append("metadata.background.default_schedule must be an object") + if not has_fg_cfg and not has_bg_cfg: + process = meta.get("process") + _validate_process_cfg( + process, + path="metadata.process", + warnings=warnings, + errors=errors, + ) + if app_type == "ambient" and "default_schedule" in meta and not isinstance(meta.get("default_schedule"), dict): + errors.append("metadata.default_schedule must be an object when provided") + + icon_file = meta.get("icon_file") + if icon_file: + icon_path = app_dir / str(icon_file) + if not icon_path.exists(): + warnings.append(f"Icon file not found: {icon_file}") + else: + warnings.append("No icon specified in truffile.yaml") + + files_to_check: list[dict[str, Any]] = [] + for step in config.get("steps", []): + if isinstance(step, dict) and step.get("type") == "files": + step_files = step.get("files", []) + if isinstance(step_files, list): + files_to_check.extend([f for f in step_files if isinstance(f, dict)]) + + top_files = config.get("files", []) + if isinstance(top_files, list): + files_to_check.extend([f for f in top_files if isinstance(f, dict)]) + + for f in files_to_check: + source = f.get("source") + if not isinstance(source, str): + errors.append("files entries must include a string 'source'") + continue + + src = app_dir / source + if not src.exists(): + errors.append(f"Source file not found: {src}") + continue + + if src.suffix == ".py": + ok, err = _check_python_syntax(src) + if not ok: + errors.append(f"Syntax error in {src.name}: {err}") + + return len(errors) == 0, config, app_type, warnings, errors diff --git a/truffile/schema/runtime_policy.py b/truffile/schema/runtime_policy.py new file mode 100644 index 0000000..bb90bb9 --- /dev/null +++ b/truffile/schema/runtime_policy.py @@ -0,0 +1,215 @@ +import re +from typing import Any, Dict, List, Optional, Tuple +from google.protobuf.duration_pb2 import Duration +from truffle.app import background_pb2 + +_DAY_BIT = { + "sat": 0, + "fri": 1, + "thu": 2, + "wed": 3, + "tue": 4, + "mon": 5, + "sun": 6, +} + +_TIME_RE = re.compile(r"^\s*(\d{1,2}):(\d{2})(?::(\d{2}))?\s*$") + +def _parse_time_of_day(s: str, *, ctx: str): + m = _TIME_RE.match(s or "") + if not m: + raise ValueError(f"{ctx}: invalid time '{s}', expected HH:MM or HH:MM:SS") + hh = int(m.group(1)) + mm = int(m.group(2)) + ss = int(m.group(3) or "0") + if not (0 <= hh <= 23): raise ValueError(f"{ctx}: hour out of range: {hh}") + if not (0 <= mm <= 59): raise ValueError(f"{ctx}: minute out of range: {mm}") + if not (0 <= ss <= 59): raise ValueError(f"{ctx}: second out of range: {ss}") + return hh, mm, ss + +def _set_time_of_day(msg_time_of_day, s: str, *, ctx: str) -> None: + hh, mm, ss = _parse_time_of_day(s, ctx=ctx) + msg_time_of_day.hour = hh + msg_time_of_day.minute = mm + msg_time_of_day.second = ss + +def _parse_daily_window(v: Any, *, ctx: str) -> Optional[Tuple[str, str]]: + if v is None: + return None + if isinstance(v, str): + parts = v.split("-", 1) + if len(parts) != 2: + raise ValueError(f"{ctx}: daily_window must be 'HH:MM-HH:MM[:SS]'") + start_s = parts[0].strip() + end_s = parts[1].strip() + _parse_time_of_day(start_s, ctx=f"{ctx}.daily_window start") + _parse_time_of_day(end_s, ctx=f"{ctx}.daily_window end") + return start_s, end_s + if isinstance(v, dict): + start_s = v.get("start") + end_s = v.get("end") + if not isinstance(start_s, str) or not isinstance(end_s, str): + raise ValueError(f"{ctx}: daily_window dict must have string start/end") + _parse_time_of_day(start_s, ctx=f"{ctx}.daily_window start") + _parse_time_of_day(end_s, ctx=f"{ctx}.daily_window end") + return start_s, end_s + raise ValueError(f"{ctx}: daily_window must be string or object") + +def _day_mask_from_allowed_days(days: List[str], *, ctx: str) -> int: + forbidden = 0 + allowed_bits = set() + for d in days: + if not isinstance(d, str): + raise ValueError(f"{ctx}: day entries must be strings") + k = d.strip().lower()[:3] + if k not in _DAY_BIT: + raise ValueError(f"{ctx}: unknown day '{d}' (use sun/mon/tue/wed/thu/fri/sat)") + allowed_bits.add(_DAY_BIT[k]) + if not allowed_bits: + raise ValueError(f"{ctx}: allowed_days cannot be empty") + for k, bit in _DAY_BIT.items(): + if bit not in allowed_bits: + forbidden |= (1 << bit) + return forbidden + +def _day_mask_from_forbidden_days(days: List[str], *, ctx: str) -> int: + forbidden = 0 + for d in days: + if not isinstance(d, str): + raise ValueError(f"{ctx}: day entries must be strings") + k = d.strip().lower()[:3] + if k not in _DAY_BIT: + raise ValueError(f"{ctx}: unknown day '{d}' (use sun/mon/tue/wed/thu/fri/sat)") + forbidden |= (1 << _DAY_BIT[k]) + if forbidden == 0b1111111: + raise ValueError(f"{ctx}: forbidden_days forbids all days (invalid)") + return forbidden + +_DUR_RE = re.compile(r"^\s*(\d+)\s*(ms|s|m|h|d)\s*$", re.IGNORECASE) + +def _parse_duration(s: str, *, ctx: str) -> Duration: + if not isinstance(s, str): + raise ValueError(f"{ctx}: duration must be a string like '15m' or '2h'") + m = _DUR_RE.match(s) + if not m: + raise ValueError(f"{ctx}: invalid duration '{s}' (use ms/s/m/h/d)") + n = int(m.group(1)) + unit = m.group(2).lower() + seconds = 0 + nanos = 0 + if unit == "ms": + seconds = n // 1000 + nanos = (n % 1000) * 1_000_000 + elif unit == "s": + seconds = n + elif unit == "m": + seconds = n * 60 + elif unit == "h": + seconds = n * 3600 + elif unit == "d": + seconds = n * 86400 + dur = Duration() + dur.seconds = seconds + dur.nanos = nanos + return dur + + +def parse_runtime_policy(schedule_cfg_data: Dict[str, Any]) -> background_pb2.BackgroundAppRuntimePolicy: + if not isinstance(schedule_cfg_data, dict): + raise ValueError("default_schedule must be an object") + + policy_type = schedule_cfg_data.get("type") + if policy_type not in ("interval", "times", "always"): + raise ValueError(f"Invalid default_schedule.type: {policy_type}") + + runtime_policy = background_pb2.BackgroundAppRuntimePolicy() + + if policy_type == "always": + runtime_policy.always.SetInParent() + return runtime_policy + + if policy_type == "interval": + interval_obj = schedule_cfg_data.get("interval") + if not isinstance(interval_obj, dict): + raise ValueError("default_schedule.interval must be an object") + + dur_s = interval_obj.get("duration", None) + if not isinstance(dur_s, str): + raise ValueError("default_schedule.interval.duration must be a string") + runtime_policy.interval.duration.CopyFrom(_parse_duration(dur_s, ctx="default_schedule.interval.duration")) + + sched = interval_obj.get("schedule", {}) + if sched is None: + sched = {} + if not isinstance(sched, dict): + raise ValueError("default_schedule.interval.schedule must be an object") + + allowed_days = sched.get("allowed_days") + forbidden_days = sched.get("forbidden_days") + if allowed_days is not None and forbidden_days is not None: + raise ValueError("Provide only one of schedule.allowed_days or schedule.forbidden_days") + + if allowed_days is not None: + if not isinstance(allowed_days, list): + raise ValueError("schedule.allowed_days must be a list") + runtime_policy.interval.schedule.weekly_window.day_mask = _day_mask_from_allowed_days( + allowed_days, ctx="default_schedule.interval.schedule.allowed_days" + ) + elif forbidden_days is not None: + if not isinstance(forbidden_days, list): + raise ValueError("schedule.forbidden_days must be a list") + runtime_policy.interval.schedule.weekly_window.day_mask = _day_mask_from_forbidden_days( + forbidden_days, ctx="default_schedule.interval.schedule.forbidden_days" + ) + else: + runtime_policy.interval.schedule.weekly_window.day_mask = 0 + + dw = _parse_daily_window(sched.get("daily_window"), ctx="default_schedule.interval.schedule") + if dw is not None: + start_s, end_s = dw + runtime_policy.interval.schedule.daily_window.SetInParent() + _set_time_of_day(runtime_policy.interval.schedule.daily_window.daily_start_time, start_s, + ctx="default_schedule.interval.schedule.daily_window.start") + _set_time_of_day(runtime_policy.interval.schedule.daily_window.daily_end_time, end_s, + ctx="default_schedule.interval.schedule.daily_window.end") + + return runtime_policy + + if policy_type == "times": + times_obj = schedule_cfg_data.get("times") + if not isinstance(times_obj, dict): + raise ValueError("default_schedule.times must be an object") + + run_times = times_obj.get("run_times") + if not isinstance(run_times, list) or not run_times: + raise ValueError("default_schedule.times.run_times must be a non-empty list of time strings") + + for i, t in enumerate(run_times): + if not isinstance(t, str): + raise ValueError("default_schedule.times.run_times must contain strings") + tod = runtime_policy.times.run_times.add() + _set_time_of_day(tod, t, ctx=f"default_schedule.times.run_times[{i}]") + + allowed_days = times_obj.get("allowed_days") + forbidden_days = times_obj.get("forbidden_days") + if allowed_days is not None and forbidden_days is not None: + raise ValueError("Provide only one of times.allowed_days or times.forbidden_days") + + if allowed_days is not None: + if not isinstance(allowed_days, list): + raise ValueError("times.allowed_days must be a list") + runtime_policy.times.weekly_window.day_mask = _day_mask_from_allowed_days( + allowed_days, ctx="default_schedule.times.allowed_days" + ) + elif forbidden_days is not None: + if not isinstance(forbidden_days, list): + raise ValueError("times.forbidden_days must be a list") + runtime_policy.times.weekly_window.day_mask = _day_mask_from_forbidden_days( + forbidden_days, ctx="default_schedule.times.forbidden_days" + ) + else: + runtime_policy.times.weekly_window.day_mask = 0 + + return runtime_policy + + raise RuntimeError("unreachable") diff --git a/truffile/transport/__init__.py b/truffile/transport/__init__.py new file mode 100644 index 0000000..60b0d18 --- /dev/null +++ b/truffile/transport/__init__.py @@ -0,0 +1,15 @@ +from .client import ( + ExecResult, + NewSessionStatus, + TruffleClient, + UploadResult, + resolve_mdns, +) + +__all__ = [ + "ExecResult", + "NewSessionStatus", + "TruffleClient", + "UploadResult", + "resolve_mdns", +] diff --git a/truffile/transport/client.py b/truffile/transport/client.py new file mode 100644 index 0000000..e88b123 --- /dev/null +++ b/truffile/transport/client.py @@ -0,0 +1,425 @@ +import asyncio +import json +import platform +import socket +from dataclasses import dataclass +from pathlib import Path +from typing import AsyncIterator +import grpc +from grpc import aio +import httpx +from google.protobuf import empty_pb2 +from truffle.os.truffleos_pb2_grpc import TruffleOSStub +from truffle.os.builder_pb2 import ( + StartBuildSessionRequest, + StartBuildSessionResponse, + FinishBuildSessionRequest, + FinishBuildSessionResponse, +) +from truffle.os.client_session_pb2 import ( + RegisterNewSessionRequest, + RegisterNewSessionResponse, + NewSessionStatus, +) +from truffle.os.client_metadata_pb2 import ClientMetadata +from truffle.os.app_queries_pb2 import GetAllAppsRequest, GetAllAppsResponse, DeleteAppRequest, DeleteAppResponse +from truffle.app.app_pb2 import App +from truffle.app.background_pb2 import BackgroundApp, BackgroundAppRuntimePolicy +from truffile.schedule import parse_runtime_policy + + +def get_client_metadata() -> ClientMetadata: + from truffile import __version__ + metadata = ClientMetadata() + metadata.device = platform.node() + metadata.platform = platform.platform() + metadata.version = f"truffile-{__version__}-{platform.python_version()}" + return metadata + + +async def resolve_mdns(hostname: str) -> str: + if ".local" not in hostname: + return hostname + loop = asyncio.get_event_loop() + try: + resolved = await loop.run_in_executor(None, socket.gethostbyname, hostname) + return resolved + except socket.gaierror as e: + raise RuntimeError(f"Failed to resolve {hostname} - is the device on the same network? ({e})") + + +@dataclass +class ExecResult: + exit_code: int + output: list[str] + + +@dataclass +class UploadResult: + path: str + bytes: int + sha256: str + + +class TruffleClient: + def __init__(self, address: str, token: str): + self.address = address + self.token = token + self.channel: aio.Channel | None = None + self.stub: TruffleOSStub | None = None + self.app_uuid: str | None = None + self.access_path: str | None = None + + @property + def http_base(self) -> str | None: + if not self.access_path: + return None + host = self.address if "://" in self.address else f"http://{self.address}" + return f"{host}/containers/{self.access_path}" + + @property + def _metadata(self) -> list: + return [("session", self.token)] + + async def connect(self, timeout: float = 15.0): + self.channel = aio.insecure_channel(self.address) + await asyncio.wait_for(self.channel.channel_ready(), timeout=timeout) + self.stub = TruffleOSStub(self.channel) + + def update_token(self, token: str): + self.token = token + + async def check_auth(self) -> bool: + if not self.stub or not self.token: + return False + try: + await self.stub.System_GetInfo(empty_pb2.Empty(), metadata=self._metadata) + return True + except aio.AioRpcError as e: + if e.code() == grpc.StatusCode.UNAUTHENTICATED: + return False + raise + + async def register_new_session(self, user_id: str) -> tuple[NewSessionStatus, str | None]: + if not self.stub: + raise RuntimeError("not connected") + req = RegisterNewSessionRequest() + req.user_id = user_id + req.metadata.CopyFrom(get_client_metadata()) + resp: RegisterNewSessionResponse = await self.stub.Client_RegisterNewSession(req) + if resp.status.error == NewSessionStatus.NEW_SESSION_SUCCESS: + self.token = resp.token + return resp.status, resp.token + return resp.status, None + + async def get_all_apps(self) -> list[App]: + if not self.stub: + raise RuntimeError("not connected") + req = GetAllAppsRequest() + resp: GetAllAppsResponse = await self.stub.Apps_GetAll(req, metadata=self._metadata) + return list(resp.apps) + + async def delete_app(self, app_uuid: str) -> DeleteAppResponse: + if not self.stub: + raise RuntimeError("not connected") + req = DeleteAppRequest() + req.app_uuid = app_uuid + resp: DeleteAppResponse = await self.stub.Apps_DeleteApp(req, metadata=self._metadata) + return resp + + async def start_build(self) -> StartBuildSessionResponse: + if not self.stub: + raise RuntimeError("not connected") + req = StartBuildSessionRequest() + resp: StartBuildSessionResponse = await self.stub.Builder_StartBuildSession( + req, metadata=self._metadata + ) + self.app_uuid = resp.app_uuid + self.access_path = resp.access_path + return resp + + @staticmethod + def _build_bundle_id(name: str) -> str: + raw = "".join(ch.lower() if ch.isalnum() else "." for ch in name).strip(".") + normalized = ".".join([part for part in raw.split(".") if part]) + return normalized or "truffle.app" + + def _apply_metadata( + self, + *, + req: FinishBuildSessionRequest, + name: str, + bundle_id: str | None, + description: str, + icon: str | Path | bytes | None, + ) -> None: + req.metadata.name = name + req.metadata.bundle_id = (bundle_id or self._build_bundle_id(name)).strip() + if description: + req.metadata.description = description + icon_data = self._load_icon(icon) + if icon_data: + req.metadata.icon.png_data = icon_data + + @staticmethod + def _apply_process(process_pb, *, cmd: str, args: list[str], cwd: str, env: list[str] | None) -> None: + process_pb.cmd = cmd + process_pb.args.extend(args) + if env: + process_pb.env.extend(env) + process_pb.cwd = cwd + + async def _sse_events(self, client: httpx.AsyncClient, url: str, body: dict) -> AsyncIterator[tuple[str, str]]: + async with client.stream("POST", url, json=body, timeout=None) as r: + r.raise_for_status() + event = "message" + data_parts = [] + async for raw in r.aiter_lines(): + if raw is None: + continue + line = raw.rstrip("\r") + if line == "": + if data_parts: + yield event, "\n".join(data_parts) + event, data_parts = "message", [] + continue + if line.startswith(":"): + continue + if line.startswith("event:"): + event = line[6:].strip() + elif line.startswith("data:"): + data_parts.append(line[5:].lstrip()) + if data_parts: + yield event, "\n".join(data_parts) + + async def exec(self, cmd: str, cwd: str = "/") -> ExecResult: + if not self.http_base: + raise RuntimeError("no active build session") + url = f"{self.http_base}/exec/stream" + body = {"cmd": ["bash", "-lc", f"cd {cwd} && {cmd}"], "cwd": cwd} + output = [] + exit_code = 0 + retries = 5 + backoff = 1.0 + async with httpx.AsyncClient(timeout=None) as client: + for attempt in range(retries): + try: + async for ev, data in self._sse_events(client, url, body): + if ev == "log": + try: + obj = json.loads(data) + line = obj.get("line", "") + except Exception: + line = data + output.append(line) + elif ev == "exit": + try: + exit_code = int(json.loads(data).get("code", 0)) + except Exception: + pass + return ExecResult(exit_code=exit_code, output=output) + except httpx.HTTPStatusError as e: + if e.response.status_code == 503 and attempt < retries - 1: + await asyncio.sleep(backoff * (attempt + 1)) + continue + raise + return ExecResult(exit_code=exit_code, output=output) + + async def exec_stream(self, cmd: str, cwd: str = "/") -> AsyncIterator[tuple[str, str]]: + if not self.http_base: + raise RuntimeError("no active build session") + url = f"{self.http_base}/exec/stream" + body = {"cmd": ["bash", "-lc", f"cd {cwd} && {cmd}"], "cwd": cwd} + retries = 5 + backoff = 1.0 + async with httpx.AsyncClient(timeout=None) as client: + for attempt in range(retries): + try: + async for ev, data in self._sse_events(client, url, body): + yield ev, data + return + except httpx.HTTPStatusError as e: + if e.response.status_code == 503 and attempt < retries - 1: + await asyncio.sleep(backoff * (attempt + 1)) + continue + raise + + async def upload(self, src: str | Path, dest: str) -> UploadResult: + if not self.http_base: + raise RuntimeError("no active build session") + path = Path(src).expanduser() + if not path.exists() or not path.is_file(): + raise FileNotFoundError(f"no such file: {path}") + url = f"{self.http_base}/upload" + retries = 5 + backoff = 1.0 + async with httpx.AsyncClient(timeout=None) as client: + for attempt in range(retries): + try: + with path.open("rb") as fh: + files = {"file": (path.name, fh)} + r = await client.post(url, params={"path": dest}, files=files) + r.raise_for_status() + data = r.json() + return UploadResult( + path=data.get("path", ""), + bytes=data.get("bytes", 0), + sha256=data.get("sha256", ""), + ) + except httpx.HTTPStatusError as e: + if e.response.status_code == 503 and attempt < retries - 1: + await asyncio.sleep(backoff * (attempt + 1)) + continue + raise + raise RuntimeError("upload failed after retries") + + def _load_icon(self, icon: str | Path | bytes | None) -> bytes | None: + if icon is None: + return None + if isinstance(icon, bytes): + return icon + path = Path(icon).expanduser() + if path.exists() and path.is_file(): + return path.read_bytes() + return None + + async def finish_foreground( + self, + name: str, + bundle_id: str | None, + cmd: str, + args: list[str], + cwd: str = "/", + env: list[str] | None = None, + description: str = "", + icon: str | Path | bytes | None = None, + ) -> FinishBuildSessionResponse: + return await self.finish_app( + name=name, + bundle_id=bundle_id, + description=description, + icon=icon, + foreground={ + "cmd": cmd, + "args": args, + "cwd": cwd, + "env": env or [], + }, + background=None, + default_schedule=None, + ) + + async def finish_background( + self, + name: str, + bundle_id: str | None, + cmd: str, + args: list[str], + cwd: str = "/", + env: list[str] | None = None, + description: str = "", + icon: str | Path | bytes | None = None, + default_schedule: dict | None = None, + ) -> FinishBuildSessionResponse: + return await self.finish_app( + name=name, + bundle_id=bundle_id, + description=description, + icon=icon, + foreground=None, + background={ + "cmd": cmd, + "args": args, + "cwd": cwd, + "env": env or [], + }, + default_schedule=default_schedule, + ) + + async def finish_app( + self, + *, + name: str, + bundle_id: str | None, + description: str = "", + icon: str | Path | bytes | None = None, + foreground: dict | None, + background: dict | None, + default_schedule: dict | None, + ) -> FinishBuildSessionResponse: + if not self.stub or not self.app_uuid: + raise RuntimeError("no active build session") + if foreground is None and background is None: + raise ValueError("finish_app requires foreground and/or background config") + + req = FinishBuildSessionRequest() + req.app_uuid = self.app_uuid + req.discard = False + self._apply_metadata( + req=req, + name=name, + bundle_id=bundle_id, + description=description, + icon=icon, + ) + + if foreground is not None: + self._apply_process( + req.foreground.process, + cmd=foreground["cmd"], + args=list(foreground.get("args", [])), + cwd=foreground.get("cwd", "/"), + env=list(foreground.get("env", [])), + ) + + if background is not None: + self._apply_process( + req.background.process, + cmd=background["cmd"], + args=list(background.get("args", [])), + cwd=background.get("cwd", "/"), + env=list(background.get("env", [])), + ) + if default_schedule: + runtime_policy = parse_runtime_policy(default_schedule) + req.background.runtime_policy.CopyFrom(runtime_policy) + else: + req.background.runtime_policy.interval.duration.seconds = 60 + + resp: FinishBuildSessionResponse = await self.stub.Builder_FinishBuildSession( + req, metadata=self._metadata + ) + self.app_uuid = None + self.access_path = None + if resp.HasField("error"): + raise RuntimeError(f"finish failed: {resp.error.error} - {resp.error.details}") + return resp + + async def discard(self) -> FinishBuildSessionResponse | None: + if not self.stub or not self.app_uuid: + return None + req = FinishBuildSessionRequest() + req.app_uuid = self.app_uuid + req.discard = True + resp: FinishBuildSessionResponse = await self.stub.Builder_FinishBuildSession( + req, metadata=self._metadata + ) + self.app_uuid = None + self.access_path = None + return resp + + async def close(self): + if self.channel: + await self.channel.close() + self.channel = None + self.stub = None + + async def __aenter__(self): + await self.connect() + await self.start_build() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.discard() + await self.close() + return False diff --git a/truffle/app/app_build_pb2.py b/truffle/app/app_build_pb2.py index 28406bb..f371f65 100644 --- a/truffle/app/app_build_pb2.py +++ b/truffle/app/app_build_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/app/app_build.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/app/app_build.proto' ) diff --git a/truffle/app/app_build_pb2_grpc.py b/truffle/app/app_build_pb2_grpc.py index 8a82ac6..a7be4f2 100644 --- a/truffle/app/app_build_pb2_grpc.py +++ b/truffle/app/app_build_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/app_build_pb2_grpc.py depends on' + + ' but the generated code in truffle/app/app_build_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/app/app_install_pb2.py b/truffle/app/app_install_pb2.py index 1f90b58..84253bc 100644 --- a/truffle/app/app_install_pb2.py +++ b/truffle/app/app_install_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/app/app_install.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/app/app_install.proto' ) @@ -23,21 +23,26 @@ from truffle.os import installer_pb2 as truffle_dot_os_dot_installer__pb2 +try: + truffle_dot_app_dot_app__pb2 = truffle_dot_os_dot_installer__pb2.truffle_dot_app_dot_app__pb2 +except AttributeError: + truffle_dot_app_dot_app__pb2 = truffle_dot_os_dot_installer__pb2.truffle.app.app_pb2 from truffle.app import app_build_pb2 as truffle_dot_app_dot_app__build__pb2 from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 +from truffle.app import foreground_pb2 as truffle_dot_app_dot_foreground__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/app/app_install.proto\x12\x0btruffle.app\x1a\x1atruffle/os/installer.proto\x1a\x1btruffle/app/app_build.proto\x1a\x1ctruffle/app/background.proto\".\n\x1aGetFinalInstallInfoRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\"\xa6\x01\n\x1bGetFinalInstallInfoResponse\x12\x32\n\x0eprocess_config\x18\x01 \x01(\x0b\x32\x1a.truffle.app.ProcessConfig\x12\x42\n\x0c\x62g_rt_policy\x18\x02 \x01(\x0b\x32\'.truffle.app.BackgroundAppRuntimePolicyH\x00\x88\x01\x01\x42\x0f\n\r_bg_rt_policy2\xce\x01\n\x11\x41ppInstallService\x12O\n\nInstallApp\x12\x1d.truffle.os.AppInstallRequest\x1a\x1e.truffle.os.AppInstallResponse(\x01\x30\x01\x12h\n\x13GetFinalInstallInfo\x12\'.truffle.app.GetFinalInstallInfoRequest\x1a(.truffle.app.GetFinalInstallInfoResponseb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/app/app_install.proto\x12\x0btruffle.app\x1a\x1atruffle/os/installer.proto\x1a\x1btruffle/app/app_build.proto\x1a\x1ctruffle/app/background.proto\x1a\x1ctruffle/app/foreground.proto\".\n\x1aGetFinalInstallInfoRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\"\xc3\x01\n\x1bGetFinalInstallInfoResponse\x12?\n\rbg_build_info\x18\x01 \x01(\x0b\x32#.truffle.app.BackgroundAppBuildInfoH\x00\x88\x01\x01\x12?\n\rfg_build_info\x18\x02 \x01(\x0b\x32#.truffle.app.ForegroundAppBuildInfoH\x01\x88\x01\x01\x42\x10\n\x0e_bg_build_infoB\x10\n\x0e_fg_build_info2\xce\x01\n\x11\x41ppInstallService\x12O\n\nInstallApp\x12\x1d.truffle.os.AppInstallRequest\x1a\x1e.truffle.os.AppInstallResponse(\x01\x30\x01\x12h\n\x13GetFinalInstallInfo\x12\'.truffle.app.GetFinalInstallInfoRequest\x1a(.truffle.app.GetFinalInstallInfoResponseb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.app_install_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_GETFINALINSTALLINFOREQUEST']._serialized_start=133 - _globals['_GETFINALINSTALLINFOREQUEST']._serialized_end=179 - _globals['_GETFINALINSTALLINFORESPONSE']._serialized_start=182 - _globals['_GETFINALINSTALLINFORESPONSE']._serialized_end=348 - _globals['_APPINSTALLSERVICE']._serialized_start=351 - _globals['_APPINSTALLSERVICE']._serialized_end=557 + _globals['_GETFINALINSTALLINFOREQUEST']._serialized_start=163 + _globals['_GETFINALINSTALLINFOREQUEST']._serialized_end=209 + _globals['_GETFINALINSTALLINFORESPONSE']._serialized_start=212 + _globals['_GETFINALINSTALLINFORESPONSE']._serialized_end=407 + _globals['_APPINSTALLSERVICE']._serialized_start=410 + _globals['_APPINSTALLSERVICE']._serialized_end=616 # @@protoc_insertion_point(module_scope) diff --git a/truffle/app/app_install_pb2.pyi b/truffle/app/app_install_pb2.pyi index 1d428e6..989aa1b 100644 --- a/truffle/app/app_install_pb2.pyi +++ b/truffle/app/app_install_pb2.pyi @@ -1,6 +1,8 @@ from truffle.os import installer_pb2 as _installer_pb2 +from truffle.app import app_pb2 as _app_pb2 from truffle.app import app_build_pb2 as _app_build_pb2 from truffle.app import background_pb2 as _background_pb2 +from truffle.app import foreground_pb2 as _foreground_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from collections.abc import Mapping as _Mapping @@ -15,9 +17,9 @@ class GetFinalInstallInfoRequest(_message.Message): def __init__(self, app_uuid: _Optional[str] = ...) -> None: ... class GetFinalInstallInfoResponse(_message.Message): - __slots__ = ("process_config", "bg_rt_policy") - PROCESS_CONFIG_FIELD_NUMBER: _ClassVar[int] - BG_RT_POLICY_FIELD_NUMBER: _ClassVar[int] - process_config: _app_build_pb2.ProcessConfig - bg_rt_policy: _background_pb2.BackgroundAppRuntimePolicy - def __init__(self, process_config: _Optional[_Union[_app_build_pb2.ProcessConfig, _Mapping]] = ..., bg_rt_policy: _Optional[_Union[_background_pb2.BackgroundAppRuntimePolicy, _Mapping]] = ...) -> None: ... + __slots__ = ("bg_build_info", "fg_build_info") + BG_BUILD_INFO_FIELD_NUMBER: _ClassVar[int] + FG_BUILD_INFO_FIELD_NUMBER: _ClassVar[int] + bg_build_info: _background_pb2.BackgroundAppBuildInfo + fg_build_info: _foreground_pb2.ForegroundAppBuildInfo + def __init__(self, bg_build_info: _Optional[_Union[_background_pb2.BackgroundAppBuildInfo, _Mapping]] = ..., fg_build_info: _Optional[_Union[_foreground_pb2.ForegroundAppBuildInfo, _Mapping]] = ...) -> None: ... diff --git a/truffle/app/app_install_pb2_grpc.py b/truffle/app/app_install_pb2_grpc.py index 9170a0f..e888508 100644 --- a/truffle/app/app_install_pb2_grpc.py +++ b/truffle/app/app_install_pb2_grpc.py @@ -6,7 +6,7 @@ from truffle.app import app_install_pb2 as truffle_dot_app_dot_app__install__pb2 from truffle.os import installer_pb2 as truffle_dot_os_dot_installer__pb2 -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -19,7 +19,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/app_install_pb2_grpc.py depends on' + + ' but the generated code in truffle/app/app_install_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/app/app_pb2.py b/truffle/app/app_pb2.py new file mode 100644 index 0000000..bc62753 --- /dev/null +++ b/truffle/app/app_pb2.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: truffle/app/app.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'truffle/app/app.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from truffle.common import icon_pb2 as truffle_dot_common_dot_icon__pb2 +from truffle.app import foreground_pb2 as truffle_dot_app_dot_foreground__pb2 +from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15truffle/app/app.proto\x12\x0btruffle.app\x1a\x19truffle/common/icon.proto\x1a\x1ctruffle/app/foreground.proto\x1a\x1ctruffle/app/background.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"g\n\x0b\x41ppMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\"\n\x04icon\x18\x02 \x01(\x0b\x32\x14.truffle.common.Icon\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x11\n\tbundle_id\x18\x04 \x01(\t\"$\n\tAppConfig\x12\x17\n\x0f\x63\x61n_reconfigure\x18\x01 \x01(\x08\"\x8b\x03\n\x03\x41pp\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12*\n\x08metadata\x18\x02 \x01(\x0b\x32\x18.truffle.app.AppMetadata\x12\x33\n\nforeground\x18\x03 \x01(\x0b\x32\x1a.truffle.app.ForegroundAppH\x00\x88\x01\x01\x12\x33\n\nbackground\x18\x04 \x01(\x0b\x32\x1a.truffle.app.BackgroundAppH\x01\x88\x01\x01\x12)\n\x05\x65rror\x18\x05 \x01(\x0b\x32\x15.truffle.app.AppErrorH\x02\x88\x01\x01\x12&\n\x06\x63onfig\x18\x06 \x01(\x0b\x32\x16.truffle.app.AppConfig\x12\x30\n\x0cinstalled_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flast_updated_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\r\n\x0b_foregroundB\r\n\x0b_backgroundB\x08\n\x06_error\"\xc1\x01\n\x08\x41ppError\x12\x33\n\nerror_type\x18\x01 \x01(\x0e\x32\x1f.truffle.app.AppError.ErrorType\x12\x15\n\rerror_message\x18\x02 \x01(\t\"i\n\tErrorType\x12\x1a\n\x16\x41PP_ERROR_TYPE_INVALID\x10\x00\x12\x15\n\x11\x41PP_ERROR_RUNTIME\x10\x01\x12\x12\n\x0e\x41PP_ERROR_AUTH\x10\x02\x12\x15\n\x11\x41PP_ERROR_UNKNOWN\x10\x03\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.app_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_APPMETADATA']._serialized_start=158 + _globals['_APPMETADATA']._serialized_end=261 + _globals['_APPCONFIG']._serialized_start=263 + _globals['_APPCONFIG']._serialized_end=299 + _globals['_APP']._serialized_start=302 + _globals['_APP']._serialized_end=697 + _globals['_APPERROR']._serialized_start=700 + _globals['_APPERROR']._serialized_end=893 + _globals['_APPERROR_ERRORTYPE']._serialized_start=788 + _globals['_APPERROR_ERRORTYPE']._serialized_end=893 +# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/app_pb2.pyi b/truffle/app/app_pb2.pyi new file mode 100644 index 0000000..a9d03e7 --- /dev/null +++ b/truffle/app/app_pb2.pyi @@ -0,0 +1,69 @@ +import datetime + +from truffle.common import icon_pb2 as _icon_pb2 +from truffle.app import foreground_pb2 as _foreground_pb2 +from truffle.app import background_pb2 as _background_pb2 +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class AppMetadata(_message.Message): + __slots__ = ("name", "icon", "description", "bundle_id") + NAME_FIELD_NUMBER: _ClassVar[int] + ICON_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + BUNDLE_ID_FIELD_NUMBER: _ClassVar[int] + name: str + icon: _icon_pb2.Icon + description: str + bundle_id: str + def __init__(self, name: _Optional[str] = ..., icon: _Optional[_Union[_icon_pb2.Icon, _Mapping]] = ..., description: _Optional[str] = ..., bundle_id: _Optional[str] = ...) -> None: ... + +class AppConfig(_message.Message): + __slots__ = ("can_reconfigure",) + CAN_RECONFIGURE_FIELD_NUMBER: _ClassVar[int] + can_reconfigure: bool + def __init__(self, can_reconfigure: bool = ...) -> None: ... + +class App(_message.Message): + __slots__ = ("uuid", "metadata", "foreground", "background", "error", "config", "installed_at", "last_updated_at") + UUID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + FOREGROUND_FIELD_NUMBER: _ClassVar[int] + BACKGROUND_FIELD_NUMBER: _ClassVar[int] + ERROR_FIELD_NUMBER: _ClassVar[int] + CONFIG_FIELD_NUMBER: _ClassVar[int] + INSTALLED_AT_FIELD_NUMBER: _ClassVar[int] + LAST_UPDATED_AT_FIELD_NUMBER: _ClassVar[int] + uuid: str + metadata: AppMetadata + foreground: _foreground_pb2.ForegroundApp + background: _background_pb2.BackgroundApp + error: AppError + config: AppConfig + installed_at: _timestamp_pb2.Timestamp + last_updated_at: _timestamp_pb2.Timestamp + def __init__(self, uuid: _Optional[str] = ..., metadata: _Optional[_Union[AppMetadata, _Mapping]] = ..., foreground: _Optional[_Union[_foreground_pb2.ForegroundApp, _Mapping]] = ..., background: _Optional[_Union[_background_pb2.BackgroundApp, _Mapping]] = ..., error: _Optional[_Union[AppError, _Mapping]] = ..., config: _Optional[_Union[AppConfig, _Mapping]] = ..., installed_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., last_updated_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + +class AppError(_message.Message): + __slots__ = ("error_type", "error_message") + class ErrorType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + APP_ERROR_TYPE_INVALID: _ClassVar[AppError.ErrorType] + APP_ERROR_RUNTIME: _ClassVar[AppError.ErrorType] + APP_ERROR_AUTH: _ClassVar[AppError.ErrorType] + APP_ERROR_UNKNOWN: _ClassVar[AppError.ErrorType] + APP_ERROR_TYPE_INVALID: AppError.ErrorType + APP_ERROR_RUNTIME: AppError.ErrorType + APP_ERROR_AUTH: AppError.ErrorType + APP_ERROR_UNKNOWN: AppError.ErrorType + ERROR_TYPE_FIELD_NUMBER: _ClassVar[int] + ERROR_MESSAGE_FIELD_NUMBER: _ClassVar[int] + error_type: AppError.ErrorType + error_message: str + def __init__(self, error_type: _Optional[_Union[AppError.ErrorType, str]] = ..., error_message: _Optional[str] = ...) -> None: ... diff --git a/truffle/app/system_pb2_grpc.py b/truffle/app/app_pb2_grpc.py similarity index 87% rename from truffle/app/system_pb2_grpc.py rename to truffle/app/app_pb2_grpc.py index ea78c67..fa08a1a 100644 --- a/truffle/app/system_pb2_grpc.py +++ b/truffle/app/app_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/system_pb2_grpc.py depends on' + + ' but the generated code in truffle/app/app_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/app/app_runtime_pb2.py b/truffle/app/app_runtime_pb2.py new file mode 100644 index 0000000..127582a --- /dev/null +++ b/truffle/app/app_runtime_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: truffle/app/app_runtime.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'truffle/app/app_runtime.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from truffle.app import app_pb2 as truffle_dot_app_dot_app__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/app/app_runtime.proto\x12\x0btruffle.app\x1a\x15truffle/app/app.proto\"r\n\x1c\x41ppRuntimeReportErrorRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\x12$\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x15.truffle.app.AppError\x12\x1a\n\x12needs_intervention\x18\x03 \x01(\x08\"\x1f\n\x1d\x41ppRuntimeReportErrorResponse2y\n\x11\x41ppRuntimeService\x12\x64\n\x0bReportError\x12).truffle.app.AppRuntimeReportErrorRequest\x1a*.truffle.app.AppRuntimeReportErrorResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.app_runtime_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_APPRUNTIMEREPORTERRORREQUEST']._serialized_start=69 + _globals['_APPRUNTIMEREPORTERRORREQUEST']._serialized_end=183 + _globals['_APPRUNTIMEREPORTERRORRESPONSE']._serialized_start=185 + _globals['_APPRUNTIMEREPORTERRORRESPONSE']._serialized_end=216 + _globals['_APPRUNTIMESERVICE']._serialized_start=218 + _globals['_APPRUNTIMESERVICE']._serialized_end=339 +# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/app_runtime_pb2.pyi b/truffle/app/app_runtime_pb2.pyi new file mode 100644 index 0000000..764c6bf --- /dev/null +++ b/truffle/app/app_runtime_pb2.pyi @@ -0,0 +1,21 @@ +from truffle.app import app_pb2 as _app_pb2 +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class AppRuntimeReportErrorRequest(_message.Message): + __slots__ = ("app_uuid", "error", "needs_intervention") + APP_UUID_FIELD_NUMBER: _ClassVar[int] + ERROR_FIELD_NUMBER: _ClassVar[int] + NEEDS_INTERVENTION_FIELD_NUMBER: _ClassVar[int] + app_uuid: str + error: _app_pb2.AppError + needs_intervention: bool + def __init__(self, app_uuid: _Optional[str] = ..., error: _Optional[_Union[_app_pb2.AppError, _Mapping]] = ..., needs_intervention: bool = ...) -> None: ... + +class AppRuntimeReportErrorResponse(_message.Message): + __slots__ = () + def __init__(self) -> None: ... diff --git a/truffle/app/app_runtime_pb2_grpc.py b/truffle/app/app_runtime_pb2_grpc.py new file mode 100644 index 0000000..b95d456 --- /dev/null +++ b/truffle/app/app_runtime_pb2_grpc.py @@ -0,0 +1,97 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +from truffle.app import app_runtime_pb2 as truffle_dot_app_dot_app__runtime__pb2 + +GRPC_GENERATED_VERSION = '1.76.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + ' but the generated code in truffle/app/app_runtime_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class AppRuntimeServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ReportError = channel.unary_unary( + '/truffle.app.AppRuntimeService/ReportError', + request_serializer=truffle_dot_app_dot_app__runtime__pb2.AppRuntimeReportErrorRequest.SerializeToString, + response_deserializer=truffle_dot_app_dot_app__runtime__pb2.AppRuntimeReportErrorResponse.FromString, + _registered_method=True) + + +class AppRuntimeServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ReportError(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_AppRuntimeServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ReportError': grpc.unary_unary_rpc_method_handler( + servicer.ReportError, + request_deserializer=truffle_dot_app_dot_app__runtime__pb2.AppRuntimeReportErrorRequest.FromString, + response_serializer=truffle_dot_app_dot_app__runtime__pb2.AppRuntimeReportErrorResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'truffle.app.AppRuntimeService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('truffle.app.AppRuntimeService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class AppRuntimeService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ReportError(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/truffle.app.AppRuntimeService/ReportError', + truffle_dot_app_dot_app__runtime__pb2.AppRuntimeReportErrorRequest.SerializeToString, + truffle_dot_app_dot_app__runtime__pb2.AppRuntimeReportErrorResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/truffle/app/app_type_pb2.py b/truffle/app/app_type_pb2.py deleted file mode 100644 index 2c65e37..0000000 --- a/truffle/app/app_type_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/app/app_type.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/app/app_type.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/app/app_type.proto\x12\x0btruffle.app*f\n\x07\x41ppType\x12\x14\n\x10\x41PP_TYPE_INVALID\x10\x00\x12\x17\n\x13\x41PP_TYPE_FOREGROUND\x10\x01\x12\x17\n\x13\x41PP_TYPE_BACKGROUND\x10\x02\x12\x13\n\x0f\x41PP_TYPE_SYSTEM\x10\x03\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.app_type_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_APPTYPE']._serialized_start=43 - _globals['_APPTYPE']._serialized_end=145 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/app_type_pb2.pyi b/truffle/app/app_type_pb2.pyi deleted file mode 100644 index 9f2aeb0..0000000 --- a/truffle/app/app_type_pb2.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from typing import ClassVar as _ClassVar - -DESCRIPTOR: _descriptor.FileDescriptor - -class AppType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - APP_TYPE_INVALID: _ClassVar[AppType] - APP_TYPE_FOREGROUND: _ClassVar[AppType] - APP_TYPE_BACKGROUND: _ClassVar[AppType] - APP_TYPE_SYSTEM: _ClassVar[AppType] -APP_TYPE_INVALID: AppType -APP_TYPE_FOREGROUND: AppType -APP_TYPE_BACKGROUND: AppType -APP_TYPE_SYSTEM: AppType diff --git a/truffle/app/app_type_pb2_grpc.py b/truffle/app/app_type_pb2_grpc.py deleted file mode 100644 index 7886f3d..0000000 --- a/truffle/app/app_type_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/app_type_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/app/background_feed_pb2.py b/truffle/app/background_feed_pb2.py deleted file mode 100644 index abc9691..0000000 --- a/truffle/app/background_feed_pb2.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/app/background_feed.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/app/background_feed.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from truffle.common import content_pb2 as truffle_dot_common_dot_content__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!truffle/app/background_feed.proto\x12\x0btruffle.app\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1ctruffle/common/content.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xe3\x01\n\x08\x46\x65\x65\x64\x43\x61rd\x12\r\n\x05title\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\t\x12\x32\n\rmedia_sources\x18\x03 \x03(\x0b\x32\x1b.truffle.common.MediaSource\x12\x12\n\nsource_uri\x18\x04 \x01(\t\x12\x35\n\x11\x63ontent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x88\x01\x01\x42\x0b\n\t_metadata\"9\n\x0e\x42\x61\x63kgroundFeed\x12\'\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x16.truffle.app.FeedEntry\"\x8c\x01\n\tFeedEntry\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x10\n\x08\x61pp_uuid\x18\x02 \x01(\t\x12-\n\ttimestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12#\n\x04\x63\x61rd\x18\x05 \x01(\x0b\x32\x15.truffle.app.FeedCard\x12\r\n\x05likes\x18\x07 \x01(\x05\"?\n\x14\x46\x65\x65\x64\x45ntryTaskContext\x12\'\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x16.truffle.app.FeedEntryb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.background_feed_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_FEEDCARD']._serialized_start=144 - _globals['_FEEDCARD']._serialized_end=371 - _globals['_BACKGROUNDFEED']._serialized_start=373 - _globals['_BACKGROUNDFEED']._serialized_end=430 - _globals['_FEEDENTRY']._serialized_start=433 - _globals['_FEEDENTRY']._serialized_end=573 - _globals['_FEEDENTRYTASKCONTEXT']._serialized_start=575 - _globals['_FEEDENTRYTASKCONTEXT']._serialized_end=638 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/background_pb2.py b/truffle/app/background_pb2.py index 82b1a94..f481f34 100644 --- a/truffle/app/background_pb2.py +++ b/truffle/app/background_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/app/background.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/app/background.proto' ) @@ -25,11 +25,11 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 -from truffle.app import background_feed_pb2 as truffle_dot_app_dot_background__feed__pb2 from truffle.common import icon_pb2 as truffle_dot_common_dot_icon__pb2 +from truffle.app import app_build_pb2 as truffle_dot_app_dot_app__build__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/app/background.proto\x12\x0btruffle.app\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/descriptor.proto\x1a!truffle/app/background_feed.proto\x1a\x19truffle/common/icon.proto\"\x9b\x0b\n\x1a\x42\x61\x63kgroundAppRuntimePolicy\x12\x44\n\x08interval\x18\x01 \x01(\x0b\x32\x30.truffle.app.BackgroundAppRuntimePolicy.IntervalH\x00\x12\x46\n\x05times\x18\x02 \x01(\x0b\x32\x35.truffle.app.BackgroundAppRuntimePolicy.SpecificTimesH\x00\x12@\n\x06\x61lways\x18\x03 \x01(\x0b\x32..truffle.app.BackgroundAppRuntimePolicy.AlwaysH\x00\x12\x37\n\x14\x66\x65\x65\x64_entry_retention\x18\n \x01(\x0b\x32\x19.google.protobuf.Duration\x1a\x39\n\tTimeOfDay\x12\x0c\n\x04hour\x18\x01 \x01(\r\x12\x0e\n\x06minute\x18\x02 \x01(\r\x12\x0e\n\x06second\x18\x03 \x01(\r\x1a\xa5\x01\n\x0b\x44\x61ilyWindow\x12K\n\x10\x64\x61ily_start_time\x18\x01 \x01(\x0b\x32\x31.truffle.app.BackgroundAppRuntimePolicy.TimeOfDay\x12I\n\x0e\x64\x61ily_end_time\x18\x02 \x01(\x0b\x32\x31.truffle.app.BackgroundAppRuntimePolicy.TimeOfDay\x1a\x91\x03\n\x0cWeeklyWindow\x12\x10\n\x08\x64\x61y_mask\x18\x01 \x01(\r\"\xee\x02\n\x05Masks\x12\x19\n\x15WEEKLY_WINDOW_DEFAULT\x10\x00\x12\x1a\n\x16WEEKLY_WINDOW_ALL_DAYS\x10\x00\x12\x1a\n\x16WEEKLY_WINDOW_SATURDAY\x10\x01\x12\x18\n\x14WEEKLY_WINDOW_FRIDAY\x10\x02\x12\x1a\n\x16WEEKLY_WINDOW_THURSDAY\x10\x04\x12\x1b\n\x17WEEKLY_WINDOW_WEDNESDAY\x10\x08\x12\x19\n\x15WEEKLY_WINDOW_TUESDAY\x10\x10\x12\x18\n\x14WEEKLY_WINDOW_MONDAY\x10 \x12\x18\n\x14WEEKLY_WINDOW_SUNDAY\x10@\x12\x1a\n\x16WEEKLY_WINDOW_WEEKENDS\x10\x41\x12\x1a\n\x16WEEKLY_WINDOW_WEEKDAYS\x10>\x12\x19\n\x15WEEKLY_WINDOW_NO_DAYS\x10\x7f\x12\x19\n\x15WEEKLY_WINDOW_INVALID\x10\x7f\x1a\x02\x10\x01\x1a\xbf\x02\n\x08Interval\x12+\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12K\n\x08schedule\x18\x02 \x01(\x0b\x32\x39.truffle.app.BackgroundAppRuntimePolicy.Interval.Schedule\x1a\xb8\x01\n\x08Schedule\x12N\n\x0c\x64\x61ily_window\x18\x01 \x01(\x0b\x32\x33.truffle.app.BackgroundAppRuntimePolicy.DailyWindowH\x00\x88\x01\x01\x12K\n\rweekly_window\x18\x02 \x01(\x0b\x32\x34.truffle.app.BackgroundAppRuntimePolicy.WeeklyWindowB\x0f\n\r_daily_window\x1a\xa2\x01\n\rSpecificTimes\x12\x44\n\trun_times\x18\x01 \x03(\x0b\x32\x31.truffle.app.BackgroundAppRuntimePolicy.TimeOfDay\x12K\n\rweekly_window\x18\x02 \x01(\x0b\x32\x34.truffle.app.BackgroundAppRuntimePolicy.WeeklyWindow\x1a\x08\n\x06\x41lwaysB\x06\n\x04whenJ\x04\x08\x04\x10\n\"\xe8\x01\n\rBackgroundApp\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x35\n\x08metadata\x18\x02 \x01(\x0b\x32#.truffle.app.BackgroundApp.Metadata\x12?\n\x0eruntime_policy\x18\x03 \x01(\x0b\x32\'.truffle.app.BackgroundAppRuntimePolicy\x1aQ\n\x08Metadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\"\n\x04icon\x18\x02 \x01(\x0b\x32\x14.truffle.common.Icon\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\"\xdc\x01\n\x19\x42\x61\x63kgroundAppNotification\x12\x16\n\x0e\x66\x65\x65\x64_entry_ids\x18\x02 \x03(\x04\x12\x43\n\toperation\x18\x03 \x01(\x0e\x32\x30.truffle.app.BackgroundAppNotification.Operation\"b\n\tOperation\x12\x15\n\x11OPERATION_INVALID\x10\x00\x12\x11\n\rOPERATION_ADD\x10\x01\x12\x14\n\x10OPERATION_DELETE\x10\x02\x12\x15\n\x11OPERATION_REFRESH\x10\x03\"\x90\x01\n\x16\x42\x61\x63kgroundAppBuildInfo\x12\x35\n\x08metadata\x18\x01 \x01(\x0b\x32#.truffle.app.BackgroundApp.Metadata\x12?\n\x0eruntime_policy\x18\x02 \x01(\x0b\x32\'.truffle.app.BackgroundAppRuntimePolicy\"L\n%BackgroundAppSubmitFeedContentRequest\x12#\n\x04\x63\x61rd\x18\x02 \x01(\x0b\x32\x15.truffle.app.FeedCard\"?\n&BackgroundAppSubmitFeedContentResponse\x12\x15\n\rfeed_entry_id\x18\x01 \x01(\x04\"\x1b\n\x19\x42\x61\x63kgroundAppOnRunRequest\"\x1c\n\x1a\x42\x61\x63kgroundAppOnRunResponse\"\x1b\n\x19\x42\x61\x63kgroundAppYieldRequest\"Y\n\x1a\x42\x61\x63kgroundAppYieldResponse\x12;\n\x17next_scheduled_run_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xeb\x01\n\x12\x42\x61\x63kgroundAppError\x12=\n\nerror_type\x18\x01 \x01(\x0e\x32).truffle.app.BackgroundAppError.ErrorType\x12\x15\n\rerror_message\x18\x02 \x01(\t\"\x7f\n\tErrorType\x12\x1d\n\x19\x42G_APP_ERROR_TYPE_INVALID\x10\x00\x12\x1d\n\x19\x42G_APP_ERROR_TYPE_RUNTIME\x10\x01\x12\x15\n\x11\x42G_APP_ERROR_AUTH\x10\x02\x12\x1d\n\x19\x42G_APP_ERROR_TYPE_UNKNOWN\x10\x03\"m\n\x1f\x42\x61\x63kgroundAppReportErrorRequest\x12.\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x1f.truffle.app.BackgroundAppError\x12\x1a\n\x12needs_intervention\x18\x02 \x01(\x08\"\"\n BackgroundAppReportErrorResponse2\xb4\x03\n\x14\x42\x61\x63kgroundAppService\x12|\n\x11SubmitFeedContent\x12\x32.truffle.app.BackgroundAppSubmitFeedContentRequest\x1a\x33.truffle.app.BackgroundAppSubmitFeedContentResponse\x12X\n\x05OnRun\x12&.truffle.app.BackgroundAppOnRunRequest\x1a\'.truffle.app.BackgroundAppOnRunResponse\x12X\n\x05Yield\x12&.truffle.app.BackgroundAppYieldRequest\x1a\'.truffle.app.BackgroundAppYieldResponse\x12j\n\x0bReportError\x12,.truffle.app.BackgroundAppReportErrorRequest\x1a-.truffle.app.BackgroundAppReportErrorResponseb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/app/background.proto\x12\x0btruffle.app\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/descriptor.proto\x1a\x19truffle/common/icon.proto\x1a\x1btruffle/app/app_build.proto\"\x9b\x0b\n\x1a\x42\x61\x63kgroundAppRuntimePolicy\x12\x44\n\x08interval\x18\x01 \x01(\x0b\x32\x30.truffle.app.BackgroundAppRuntimePolicy.IntervalH\x00\x12\x46\n\x05times\x18\x02 \x01(\x0b\x32\x35.truffle.app.BackgroundAppRuntimePolicy.SpecificTimesH\x00\x12@\n\x06\x61lways\x18\x03 \x01(\x0b\x32..truffle.app.BackgroundAppRuntimePolicy.AlwaysH\x00\x12\x37\n\x14\x66\x65\x65\x64_entry_retention\x18\n \x01(\x0b\x32\x19.google.protobuf.Duration\x1a\x39\n\tTimeOfDay\x12\x0c\n\x04hour\x18\x01 \x01(\r\x12\x0e\n\x06minute\x18\x02 \x01(\r\x12\x0e\n\x06second\x18\x03 \x01(\r\x1a\xa5\x01\n\x0b\x44\x61ilyWindow\x12K\n\x10\x64\x61ily_start_time\x18\x01 \x01(\x0b\x32\x31.truffle.app.BackgroundAppRuntimePolicy.TimeOfDay\x12I\n\x0e\x64\x61ily_end_time\x18\x02 \x01(\x0b\x32\x31.truffle.app.BackgroundAppRuntimePolicy.TimeOfDay\x1a\x91\x03\n\x0cWeeklyWindow\x12\x10\n\x08\x64\x61y_mask\x18\x01 \x01(\r\"\xee\x02\n\x05Masks\x12\x19\n\x15WEEKLY_WINDOW_DEFAULT\x10\x00\x12\x1a\n\x16WEEKLY_WINDOW_ALL_DAYS\x10\x00\x12\x1a\n\x16WEEKLY_WINDOW_SATURDAY\x10\x01\x12\x18\n\x14WEEKLY_WINDOW_FRIDAY\x10\x02\x12\x1a\n\x16WEEKLY_WINDOW_THURSDAY\x10\x04\x12\x1b\n\x17WEEKLY_WINDOW_WEDNESDAY\x10\x08\x12\x19\n\x15WEEKLY_WINDOW_TUESDAY\x10\x10\x12\x18\n\x14WEEKLY_WINDOW_MONDAY\x10 \x12\x18\n\x14WEEKLY_WINDOW_SUNDAY\x10@\x12\x1a\n\x16WEEKLY_WINDOW_WEEKENDS\x10\x41\x12\x1a\n\x16WEEKLY_WINDOW_WEEKDAYS\x10>\x12\x19\n\x15WEEKLY_WINDOW_NO_DAYS\x10\x7f\x12\x19\n\x15WEEKLY_WINDOW_INVALID\x10\x7f\x1a\x02\x10\x01\x1a\xbf\x02\n\x08Interval\x12+\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12K\n\x08schedule\x18\x02 \x01(\x0b\x32\x39.truffle.app.BackgroundAppRuntimePolicy.Interval.Schedule\x1a\xb8\x01\n\x08Schedule\x12N\n\x0c\x64\x61ily_window\x18\x01 \x01(\x0b\x32\x33.truffle.app.BackgroundAppRuntimePolicy.DailyWindowH\x00\x88\x01\x01\x12K\n\rweekly_window\x18\x02 \x01(\x0b\x32\x34.truffle.app.BackgroundAppRuntimePolicy.WeeklyWindowB\x0f\n\r_daily_window\x1a\xa2\x01\n\rSpecificTimes\x12\x44\n\trun_times\x18\x01 \x03(\x0b\x32\x31.truffle.app.BackgroundAppRuntimePolicy.TimeOfDay\x12K\n\rweekly_window\x18\x02 \x01(\x0b\x32\x34.truffle.app.BackgroundAppRuntimePolicy.WeeklyWindow\x1a\x08\n\x06\x41lwaysB\x06\n\x04whenJ\x04\x08\x04\x10\n\"P\n\rBackgroundApp\x12?\n\x0eruntime_policy\x18\x01 \x01(\x0b\x32\'.truffle.app.BackgroundAppRuntimePolicy\"\x86\x01\n\x16\x42\x61\x63kgroundAppBuildInfo\x12+\n\x07process\x18\x01 \x01(\x0b\x32\x1a.truffle.app.ProcessConfig\x12?\n\x0eruntime_policy\x18\x02 \x01(\x0b\x32\'.truffle.app.BackgroundAppRuntimePolicy\"\xb8\x01\n\x11\x42\x61\x63kgroundContext\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x0c\n\x04uris\x18\x02 \x03(\t\x12\x39\n\x08priority\x18\x03 \x01(\x0e\x32\'.truffle.app.BackgroundContext.Priority\"I\n\x08Priority\x12\x18\n\x14PRIORITY_UNSPECIFIED\x10\x00\x12\x10\n\x0cPRIORITY_LOW\x10\x01\x12\x11\n\rPRIORITY_HIGH\x10\x03\"T\n!BackgroundAppSubmitContextRequest\x12/\n\x07\x63ontent\x18\x01 \x01(\x0b\x32\x1e.truffle.app.BackgroundContext\"$\n\"BackgroundAppSubmitContextResponse\"\x1b\n\x19\x42\x61\x63kgroundAppOnRunRequest\"\x1c\n\x1a\x42\x61\x63kgroundAppOnRunResponse\"\x1b\n\x19\x42\x61\x63kgroundAppYieldRequest\"Y\n\x1a\x42\x61\x63kgroundAppYieldResponse\x12;\n\x17next_scheduled_run_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\"\n BackgroundAppReportErrorResponse2\xb5\x02\n\x14\x42\x61\x63kgroundAppService\x12i\n\x06Submit\x12..truffle.app.BackgroundAppSubmitContextRequest\x1a/.truffle.app.BackgroundAppSubmitContextResponse\x12X\n\x05OnRun\x12&.truffle.app.BackgroundAppOnRunRequest\x1a\'.truffle.app.BackgroundAppOnRunResponse\x12X\n\x05Yield\x12&.truffle.app.BackgroundAppYieldRequest\x1a\'.truffle.app.BackgroundAppYieldResponseb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,54 +38,46 @@ DESCRIPTOR._loaded_options = None _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW_MASKS']._loaded_options = None _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW_MASKS']._serialized_options = b'\020\001' - _globals['_BACKGROUNDAPPRUNTIMEPOLICY']._serialized_start=207 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY']._serialized_end=1642 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_TIMEOFDAY']._serialized_start=502 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_TIMEOFDAY']._serialized_end=559 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_DAILYWINDOW']._serialized_start=562 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_DAILYWINDOW']._serialized_end=727 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW']._serialized_start=730 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW']._serialized_end=1131 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW_MASKS']._serialized_start=765 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW_MASKS']._serialized_end=1131 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL']._serialized_start=1134 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL']._serialized_end=1453 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL_SCHEDULE']._serialized_start=1269 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL_SCHEDULE']._serialized_end=1453 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_SPECIFICTIMES']._serialized_start=1456 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_SPECIFICTIMES']._serialized_end=1618 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_ALWAYS']._serialized_start=1620 - _globals['_BACKGROUNDAPPRUNTIMEPOLICY_ALWAYS']._serialized_end=1628 - _globals['_BACKGROUNDAPP']._serialized_start=1645 - _globals['_BACKGROUNDAPP']._serialized_end=1877 - _globals['_BACKGROUNDAPP_METADATA']._serialized_start=1796 - _globals['_BACKGROUNDAPP_METADATA']._serialized_end=1877 - _globals['_BACKGROUNDAPPNOTIFICATION']._serialized_start=1880 - _globals['_BACKGROUNDAPPNOTIFICATION']._serialized_end=2100 - _globals['_BACKGROUNDAPPNOTIFICATION_OPERATION']._serialized_start=2002 - _globals['_BACKGROUNDAPPNOTIFICATION_OPERATION']._serialized_end=2100 - _globals['_BACKGROUNDAPPBUILDINFO']._serialized_start=2103 - _globals['_BACKGROUNDAPPBUILDINFO']._serialized_end=2247 - _globals['_BACKGROUNDAPPSUBMITFEEDCONTENTREQUEST']._serialized_start=2249 - _globals['_BACKGROUNDAPPSUBMITFEEDCONTENTREQUEST']._serialized_end=2325 - _globals['_BACKGROUNDAPPSUBMITFEEDCONTENTRESPONSE']._serialized_start=2327 - _globals['_BACKGROUNDAPPSUBMITFEEDCONTENTRESPONSE']._serialized_end=2390 - _globals['_BACKGROUNDAPPONRUNREQUEST']._serialized_start=2392 - _globals['_BACKGROUNDAPPONRUNREQUEST']._serialized_end=2419 - _globals['_BACKGROUNDAPPONRUNRESPONSE']._serialized_start=2421 - _globals['_BACKGROUNDAPPONRUNRESPONSE']._serialized_end=2449 - _globals['_BACKGROUNDAPPYIELDREQUEST']._serialized_start=2451 - _globals['_BACKGROUNDAPPYIELDREQUEST']._serialized_end=2478 - _globals['_BACKGROUNDAPPYIELDRESPONSE']._serialized_start=2480 - _globals['_BACKGROUNDAPPYIELDRESPONSE']._serialized_end=2569 - _globals['_BACKGROUNDAPPERROR']._serialized_start=2572 - _globals['_BACKGROUNDAPPERROR']._serialized_end=2807 - _globals['_BACKGROUNDAPPERROR_ERRORTYPE']._serialized_start=2680 - _globals['_BACKGROUNDAPPERROR_ERRORTYPE']._serialized_end=2807 - _globals['_BACKGROUNDAPPREPORTERRORREQUEST']._serialized_start=2809 - _globals['_BACKGROUNDAPPREPORTERRORREQUEST']._serialized_end=2918 - _globals['_BACKGROUNDAPPREPORTERRORRESPONSE']._serialized_start=2920 - _globals['_BACKGROUNDAPPREPORTERRORRESPONSE']._serialized_end=2954 - _globals['_BACKGROUNDAPPSERVICE']._serialized_start=2957 - _globals['_BACKGROUNDAPPSERVICE']._serialized_end=3393 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY']._serialized_start=201 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY']._serialized_end=1636 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_TIMEOFDAY']._serialized_start=496 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_TIMEOFDAY']._serialized_end=553 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_DAILYWINDOW']._serialized_start=556 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_DAILYWINDOW']._serialized_end=721 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW']._serialized_start=724 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW']._serialized_end=1125 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW_MASKS']._serialized_start=759 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_WEEKLYWINDOW_MASKS']._serialized_end=1125 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL']._serialized_start=1128 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL']._serialized_end=1447 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL_SCHEDULE']._serialized_start=1263 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_INTERVAL_SCHEDULE']._serialized_end=1447 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_SPECIFICTIMES']._serialized_start=1450 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_SPECIFICTIMES']._serialized_end=1612 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_ALWAYS']._serialized_start=1614 + _globals['_BACKGROUNDAPPRUNTIMEPOLICY_ALWAYS']._serialized_end=1622 + _globals['_BACKGROUNDAPP']._serialized_start=1638 + _globals['_BACKGROUNDAPP']._serialized_end=1718 + _globals['_BACKGROUNDAPPBUILDINFO']._serialized_start=1721 + _globals['_BACKGROUNDAPPBUILDINFO']._serialized_end=1855 + _globals['_BACKGROUNDCONTEXT']._serialized_start=1858 + _globals['_BACKGROUNDCONTEXT']._serialized_end=2042 + _globals['_BACKGROUNDCONTEXT_PRIORITY']._serialized_start=1969 + _globals['_BACKGROUNDCONTEXT_PRIORITY']._serialized_end=2042 + _globals['_BACKGROUNDAPPSUBMITCONTEXTREQUEST']._serialized_start=2044 + _globals['_BACKGROUNDAPPSUBMITCONTEXTREQUEST']._serialized_end=2128 + _globals['_BACKGROUNDAPPSUBMITCONTEXTRESPONSE']._serialized_start=2130 + _globals['_BACKGROUNDAPPSUBMITCONTEXTRESPONSE']._serialized_end=2166 + _globals['_BACKGROUNDAPPONRUNREQUEST']._serialized_start=2168 + _globals['_BACKGROUNDAPPONRUNREQUEST']._serialized_end=2195 + _globals['_BACKGROUNDAPPONRUNRESPONSE']._serialized_start=2197 + _globals['_BACKGROUNDAPPONRUNRESPONSE']._serialized_end=2225 + _globals['_BACKGROUNDAPPYIELDREQUEST']._serialized_start=2227 + _globals['_BACKGROUNDAPPYIELDREQUEST']._serialized_end=2254 + _globals['_BACKGROUNDAPPYIELDRESPONSE']._serialized_start=2256 + _globals['_BACKGROUNDAPPYIELDRESPONSE']._serialized_end=2345 + _globals['_BACKGROUNDAPPREPORTERRORRESPONSE']._serialized_start=2347 + _globals['_BACKGROUNDAPPREPORTERRORRESPONSE']._serialized_end=2381 + _globals['_BACKGROUNDAPPSERVICE']._serialized_start=2384 + _globals['_BACKGROUNDAPPSERVICE']._serialized_end=2693 # @@protoc_insertion_point(module_scope) diff --git a/truffle/app/background_pb2.pyi b/truffle/app/background_pb2.pyi index 7847843..a22e289 100644 --- a/truffle/app/background_pb2.pyi +++ b/truffle/app/background_pb2.pyi @@ -1,8 +1,10 @@ +import datetime + from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf import duration_pb2 as _duration_pb2 from google.protobuf import descriptor_pb2 as _descriptor_pb2 -from truffle.app import background_feed_pb2 as _background_feed_pb2 from truffle.common import icon_pb2 as _icon_pb2 +from truffle.app import app_build_pb2 as _app_build_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -76,7 +78,7 @@ class BackgroundAppRuntimePolicy(_message.Message): SCHEDULE_FIELD_NUMBER: _ClassVar[int] duration: _duration_pb2.Duration schedule: BackgroundAppRuntimePolicy.Interval.Schedule - def __init__(self, duration: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., schedule: _Optional[_Union[BackgroundAppRuntimePolicy.Interval.Schedule, _Mapping]] = ...) -> None: ... + def __init__(self, duration: _Optional[_Union[datetime.timedelta, _duration_pb2.Duration, _Mapping]] = ..., schedule: _Optional[_Union[BackgroundAppRuntimePolicy.Interval.Schedule, _Mapping]] = ...) -> None: ... class SpecificTimes(_message.Message): __slots__ = ("run_times", "weekly_window") RUN_TIMES_FIELD_NUMBER: _ClassVar[int] @@ -95,64 +97,49 @@ class BackgroundAppRuntimePolicy(_message.Message): times: BackgroundAppRuntimePolicy.SpecificTimes always: BackgroundAppRuntimePolicy.Always feed_entry_retention: _duration_pb2.Duration - def __init__(self, interval: _Optional[_Union[BackgroundAppRuntimePolicy.Interval, _Mapping]] = ..., times: _Optional[_Union[BackgroundAppRuntimePolicy.SpecificTimes, _Mapping]] = ..., always: _Optional[_Union[BackgroundAppRuntimePolicy.Always, _Mapping]] = ..., feed_entry_retention: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... + def __init__(self, interval: _Optional[_Union[BackgroundAppRuntimePolicy.Interval, _Mapping]] = ..., times: _Optional[_Union[BackgroundAppRuntimePolicy.SpecificTimes, _Mapping]] = ..., always: _Optional[_Union[BackgroundAppRuntimePolicy.Always, _Mapping]] = ..., feed_entry_retention: _Optional[_Union[datetime.timedelta, _duration_pb2.Duration, _Mapping]] = ...) -> None: ... class BackgroundApp(_message.Message): - __slots__ = ("uuid", "metadata", "runtime_policy") - class Metadata(_message.Message): - __slots__ = ("name", "icon", "description") - NAME_FIELD_NUMBER: _ClassVar[int] - ICON_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - name: str - icon: _icon_pb2.Icon - description: str - def __init__(self, name: _Optional[str] = ..., icon: _Optional[_Union[_icon_pb2.Icon, _Mapping]] = ..., description: _Optional[str] = ...) -> None: ... - UUID_FIELD_NUMBER: _ClassVar[int] - METADATA_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("runtime_policy",) RUNTIME_POLICY_FIELD_NUMBER: _ClassVar[int] - uuid: str - metadata: BackgroundApp.Metadata runtime_policy: BackgroundAppRuntimePolicy - def __init__(self, uuid: _Optional[str] = ..., metadata: _Optional[_Union[BackgroundApp.Metadata, _Mapping]] = ..., runtime_policy: _Optional[_Union[BackgroundAppRuntimePolicy, _Mapping]] = ...) -> None: ... - -class BackgroundAppNotification(_message.Message): - __slots__ = ("feed_entry_ids", "operation") - class Operation(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - OPERATION_INVALID: _ClassVar[BackgroundAppNotification.Operation] - OPERATION_ADD: _ClassVar[BackgroundAppNotification.Operation] - OPERATION_DELETE: _ClassVar[BackgroundAppNotification.Operation] - OPERATION_REFRESH: _ClassVar[BackgroundAppNotification.Operation] - OPERATION_INVALID: BackgroundAppNotification.Operation - OPERATION_ADD: BackgroundAppNotification.Operation - OPERATION_DELETE: BackgroundAppNotification.Operation - OPERATION_REFRESH: BackgroundAppNotification.Operation - FEED_ENTRY_IDS_FIELD_NUMBER: _ClassVar[int] - OPERATION_FIELD_NUMBER: _ClassVar[int] - feed_entry_ids: _containers.RepeatedScalarFieldContainer[int] - operation: BackgroundAppNotification.Operation - def __init__(self, feed_entry_ids: _Optional[_Iterable[int]] = ..., operation: _Optional[_Union[BackgroundAppNotification.Operation, str]] = ...) -> None: ... + def __init__(self, runtime_policy: _Optional[_Union[BackgroundAppRuntimePolicy, _Mapping]] = ...) -> None: ... class BackgroundAppBuildInfo(_message.Message): - __slots__ = ("metadata", "runtime_policy") - METADATA_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("process", "runtime_policy") + PROCESS_FIELD_NUMBER: _ClassVar[int] RUNTIME_POLICY_FIELD_NUMBER: _ClassVar[int] - metadata: BackgroundApp.Metadata + process: _app_build_pb2.ProcessConfig runtime_policy: BackgroundAppRuntimePolicy - def __init__(self, metadata: _Optional[_Union[BackgroundApp.Metadata, _Mapping]] = ..., runtime_policy: _Optional[_Union[BackgroundAppRuntimePolicy, _Mapping]] = ...) -> None: ... + def __init__(self, process: _Optional[_Union[_app_build_pb2.ProcessConfig, _Mapping]] = ..., runtime_policy: _Optional[_Union[BackgroundAppRuntimePolicy, _Mapping]] = ...) -> None: ... + +class BackgroundContext(_message.Message): + __slots__ = ("content", "uris", "priority") + class Priority(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + PRIORITY_UNSPECIFIED: _ClassVar[BackgroundContext.Priority] + PRIORITY_LOW: _ClassVar[BackgroundContext.Priority] + PRIORITY_HIGH: _ClassVar[BackgroundContext.Priority] + PRIORITY_UNSPECIFIED: BackgroundContext.Priority + PRIORITY_LOW: BackgroundContext.Priority + PRIORITY_HIGH: BackgroundContext.Priority + CONTENT_FIELD_NUMBER: _ClassVar[int] + URIS_FIELD_NUMBER: _ClassVar[int] + PRIORITY_FIELD_NUMBER: _ClassVar[int] + content: str + uris: _containers.RepeatedScalarFieldContainer[str] + priority: BackgroundContext.Priority + def __init__(self, content: _Optional[str] = ..., uris: _Optional[_Iterable[str]] = ..., priority: _Optional[_Union[BackgroundContext.Priority, str]] = ...) -> None: ... -class BackgroundAppSubmitFeedContentRequest(_message.Message): - __slots__ = ("card",) - CARD_FIELD_NUMBER: _ClassVar[int] - card: _background_feed_pb2.FeedCard - def __init__(self, card: _Optional[_Union[_background_feed_pb2.FeedCard, _Mapping]] = ...) -> None: ... +class BackgroundAppSubmitContextRequest(_message.Message): + __slots__ = ("content",) + CONTENT_FIELD_NUMBER: _ClassVar[int] + content: BackgroundContext + def __init__(self, content: _Optional[_Union[BackgroundContext, _Mapping]] = ...) -> None: ... -class BackgroundAppSubmitFeedContentResponse(_message.Message): - __slots__ = ("feed_entry_id",) - FEED_ENTRY_ID_FIELD_NUMBER: _ClassVar[int] - feed_entry_id: int - def __init__(self, feed_entry_id: _Optional[int] = ...) -> None: ... +class BackgroundAppSubmitContextResponse(_message.Message): + __slots__ = () + def __init__(self) -> None: ... class BackgroundAppOnRunRequest(_message.Message): __slots__ = () @@ -170,33 +157,7 @@ class BackgroundAppYieldResponse(_message.Message): __slots__ = ("next_scheduled_run_time",) NEXT_SCHEDULED_RUN_TIME_FIELD_NUMBER: _ClassVar[int] next_scheduled_run_time: _timestamp_pb2.Timestamp - def __init__(self, next_scheduled_run_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... - -class BackgroundAppError(_message.Message): - __slots__ = ("error_type", "error_message") - class ErrorType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - BG_APP_ERROR_TYPE_INVALID: _ClassVar[BackgroundAppError.ErrorType] - BG_APP_ERROR_TYPE_RUNTIME: _ClassVar[BackgroundAppError.ErrorType] - BG_APP_ERROR_AUTH: _ClassVar[BackgroundAppError.ErrorType] - BG_APP_ERROR_TYPE_UNKNOWN: _ClassVar[BackgroundAppError.ErrorType] - BG_APP_ERROR_TYPE_INVALID: BackgroundAppError.ErrorType - BG_APP_ERROR_TYPE_RUNTIME: BackgroundAppError.ErrorType - BG_APP_ERROR_AUTH: BackgroundAppError.ErrorType - BG_APP_ERROR_TYPE_UNKNOWN: BackgroundAppError.ErrorType - ERROR_TYPE_FIELD_NUMBER: _ClassVar[int] - ERROR_MESSAGE_FIELD_NUMBER: _ClassVar[int] - error_type: BackgroundAppError.ErrorType - error_message: str - def __init__(self, error_type: _Optional[_Union[BackgroundAppError.ErrorType, str]] = ..., error_message: _Optional[str] = ...) -> None: ... - -class BackgroundAppReportErrorRequest(_message.Message): - __slots__ = ("error", "needs_intervention") - ERROR_FIELD_NUMBER: _ClassVar[int] - NEEDS_INTERVENTION_FIELD_NUMBER: _ClassVar[int] - error: BackgroundAppError - needs_intervention: bool - def __init__(self, error: _Optional[_Union[BackgroundAppError, _Mapping]] = ..., needs_intervention: bool = ...) -> None: ... + def __init__(self, next_scheduled_run_time: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class BackgroundAppReportErrorResponse(_message.Message): __slots__ = () diff --git a/truffle/app/background_pb2_grpc.py b/truffle/app/background_pb2_grpc.py index a731773..5f1f658 100644 --- a/truffle/app/background_pb2_grpc.py +++ b/truffle/app/background_pb2_grpc.py @@ -5,7 +5,7 @@ from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -18,7 +18,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/background_pb2_grpc.py depends on' + + ' but the generated code in truffle/app/background_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' @@ -37,10 +37,10 @@ def __init__(self, channel): Args: channel: A grpc.Channel. """ - self.SubmitFeedContent = channel.unary_unary( - '/truffle.app.BackgroundAppService/SubmitFeedContent', - request_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitFeedContentRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitFeedContentResponse.FromString, + self.Submit = channel.unary_unary( + '/truffle.app.BackgroundAppService/Submit', + request_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitContextRequest.SerializeToString, + response_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitContextResponse.FromString, _registered_method=True) self.OnRun = channel.unary_unary( '/truffle.app.BackgroundAppService/OnRun', @@ -52,11 +52,6 @@ def __init__(self, channel): request_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppYieldRequest.SerializeToString, response_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppYieldResponse.FromString, _registered_method=True) - self.ReportError = channel.unary_unary( - '/truffle.app.BackgroundAppService/ReportError', - request_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppReportErrorRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppReportErrorResponse.FromString, - _registered_method=True) class BackgroundAppServiceServicer(object): @@ -65,8 +60,8 @@ class BackgroundAppServiceServicer(object): gated by per app api key, available in the environment when the app is run. """ - def SubmitFeedContent(self, request, context): - """post to curator/feed + def Submit(self, request, context): + """post context """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -85,19 +80,13 @@ def Yield(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def ReportError(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - def add_BackgroundAppServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'SubmitFeedContent': grpc.unary_unary_rpc_method_handler( - servicer.SubmitFeedContent, - request_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitFeedContentRequest.FromString, - response_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitFeedContentResponse.SerializeToString, + 'Submit': grpc.unary_unary_rpc_method_handler( + servicer.Submit, + request_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitContextRequest.FromString, + response_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppSubmitContextResponse.SerializeToString, ), 'OnRun': grpc.unary_unary_rpc_method_handler( servicer.OnRun, @@ -109,11 +98,6 @@ def add_BackgroundAppServiceServicer_to_server(servicer, server): request_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppYieldRequest.FromString, response_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppYieldResponse.SerializeToString, ), - 'ReportError': grpc.unary_unary_rpc_method_handler( - servicer.ReportError, - request_deserializer=truffle_dot_app_dot_background__pb2.BackgroundAppReportErrorRequest.FromString, - response_serializer=truffle_dot_app_dot_background__pb2.BackgroundAppReportErrorResponse.SerializeToString, - ), } generic_handler = grpc.method_handlers_generic_handler( 'truffle.app.BackgroundAppService', rpc_method_handlers) @@ -129,7 +113,7 @@ class BackgroundAppService(object): """ @staticmethod - def SubmitFeedContent(request, + def Submit(request, target, options=(), channel_credentials=None, @@ -142,9 +126,9 @@ def SubmitFeedContent(request, return grpc.experimental.unary_unary( request, target, - '/truffle.app.BackgroundAppService/SubmitFeedContent', - truffle_dot_app_dot_background__pb2.BackgroundAppSubmitFeedContentRequest.SerializeToString, - truffle_dot_app_dot_background__pb2.BackgroundAppSubmitFeedContentResponse.FromString, + '/truffle.app.BackgroundAppService/Submit', + truffle_dot_app_dot_background__pb2.BackgroundAppSubmitContextRequest.SerializeToString, + truffle_dot_app_dot_background__pb2.BackgroundAppSubmitContextResponse.FromString, options, channel_credentials, insecure, @@ -208,30 +192,3 @@ def Yield(request, timeout, metadata, _registered_method=True) - - @staticmethod - def ReportError(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.BackgroundAppService/ReportError', - truffle_dot_app_dot_background__pb2.BackgroundAppReportErrorRequest.SerializeToString, - truffle_dot_app_dot_background__pb2.BackgroundAppReportErrorResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) diff --git a/truffle/app/curator_pb2.py b/truffle/app/curator_pb2.py deleted file mode 100644 index 3893185..0000000 --- a/truffle/app/curator_pb2.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/app/curator.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/app/curator.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from truffle.app import background_feed_pb2 as truffle_dot_app_dot_background__feed__pb2 -from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 -from truffle.os import background_feed_queries_pb2 as truffle_dot_os_dot_background__feed__queries__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19truffle/app/curator.proto\x12\x0btruffle.app\x1a!truffle/app/background_feed.proto\x1a\x1ctruffle/app/background.proto\x1a(truffle/os/background_feed_queries.proto\"\x16\n\x14TakeFeedbackResponse\"q\n\x13TakeFeedbackRequest\x12;\n\x08\x66\x65\x65\x64\x62\x61\x63k\x18\x01 \x01(\x0b\x32).truffle.os.BackgroundFeedFeedbackRequest\x12\x1d\n\x15\x66\x65\x65\x64\x62\x61\x63k_request_uuid\x18\x02 \x01(\t\"\xae\x01\n\rFeedOperation\x12\x15\n\rfeed_entry_id\x18\x01 \x01(\x04\x12\x43\n\toperation\x18\x02 \x01(\x0e\x32\x30.truffle.app.BackgroundAppNotification.Operation\x12\x30\n\x0cupdated_card\x18\x03 \x01(\x0b\x32\x15.truffle.app.FeedCardH\x00\x88\x01\x01\x42\x0f\n\r_updated_card\"@\n\x14HandleNewPostRequest\x12(\n\x08new_post\x18\x01 \x01(\x0b\x32\x16.truffle.app.FeedEntry\"\x17\n\x15HandleNewPostResponse\"\"\n\x0c\x43uratorState\x12\x12\n\nstate_json\x18\x01 \x01(\t\"\x87\x01\n\x12\x46\x65\x65\x64\x43ontrolRequest\x12\x38\n\x10last_known_state\x18\x01 \x01(\x0b\x32\x19.truffle.app.CuratorStateH\x00\x88\x01\x01\x12\"\n\x1a\x63urator_user_session_token\x18\x02 \x01(\tB\x13\n\x11_last_known_state\"2\n\x11\x46\x65\x65\x64\x62\x61\x63kProcessed\x12\x1d\n\x15\x66\x65\x65\x64\x62\x61\x63k_request_uuid\x18\x01 \x01(\t\"\xbe\x01\n\x13\x46\x65\x65\x64\x43ontrolResponse\x12/\n\toperation\x18\x01 \x01(\x0b\x32\x1a.truffle.app.FeedOperationH\x00\x12\x32\n\rstate_to_save\x18\x02 \x01(\x0b\x32\x19.truffle.app.CuratorStateH\x00\x12\x37\n\rfeedback_done\x18\x03 \x01(\x0b\x32\x1e.truffle.app.FeedbackProcessedH\x00\x42\t\n\x07\x63ontrol2\x91\x02\n\x0e\x43uratorService\x12S\n\x0cTakeFeedback\x12 .truffle.app.TakeFeedbackRequest\x1a!.truffle.app.TakeFeedbackResponse\x12V\n\rHandleNewPost\x12!.truffle.app.HandleNewPostRequest\x1a\".truffle.app.HandleNewPostResponse\x12R\n\x0b\x46\x65\x65\x64\x43ontrol\x12\x1f.truffle.app.FeedControlRequest\x1a .truffle.app.FeedControlResponse0\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.curator_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_TAKEFEEDBACKRESPONSE']._serialized_start=149 - _globals['_TAKEFEEDBACKRESPONSE']._serialized_end=171 - _globals['_TAKEFEEDBACKREQUEST']._serialized_start=173 - _globals['_TAKEFEEDBACKREQUEST']._serialized_end=286 - _globals['_FEEDOPERATION']._serialized_start=289 - _globals['_FEEDOPERATION']._serialized_end=463 - _globals['_HANDLENEWPOSTREQUEST']._serialized_start=465 - _globals['_HANDLENEWPOSTREQUEST']._serialized_end=529 - _globals['_HANDLENEWPOSTRESPONSE']._serialized_start=531 - _globals['_HANDLENEWPOSTRESPONSE']._serialized_end=554 - _globals['_CURATORSTATE']._serialized_start=556 - _globals['_CURATORSTATE']._serialized_end=590 - _globals['_FEEDCONTROLREQUEST']._serialized_start=593 - _globals['_FEEDCONTROLREQUEST']._serialized_end=728 - _globals['_FEEDBACKPROCESSED']._serialized_start=730 - _globals['_FEEDBACKPROCESSED']._serialized_end=780 - _globals['_FEEDCONTROLRESPONSE']._serialized_start=783 - _globals['_FEEDCONTROLRESPONSE']._serialized_end=973 - _globals['_CURATORSERVICE']._serialized_start=976 - _globals['_CURATORSERVICE']._serialized_end=1249 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/curator_pb2.pyi b/truffle/app/curator_pb2.pyi deleted file mode 100644 index 7317d86..0000000 --- a/truffle/app/curator_pb2.pyi +++ /dev/null @@ -1,71 +0,0 @@ -from truffle.app import background_feed_pb2 as _background_feed_pb2 -from truffle.app import background_pb2 as _background_pb2 -from truffle.os import background_feed_queries_pb2 as _background_feed_queries_pb2 -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class TakeFeedbackResponse(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class TakeFeedbackRequest(_message.Message): - __slots__ = ("feedback", "feedback_request_uuid") - FEEDBACK_FIELD_NUMBER: _ClassVar[int] - FEEDBACK_REQUEST_UUID_FIELD_NUMBER: _ClassVar[int] - feedback: _background_feed_queries_pb2.BackgroundFeedFeedbackRequest - feedback_request_uuid: str - def __init__(self, feedback: _Optional[_Union[_background_feed_queries_pb2.BackgroundFeedFeedbackRequest, _Mapping]] = ..., feedback_request_uuid: _Optional[str] = ...) -> None: ... - -class FeedOperation(_message.Message): - __slots__ = ("feed_entry_id", "operation", "updated_card") - FEED_ENTRY_ID_FIELD_NUMBER: _ClassVar[int] - OPERATION_FIELD_NUMBER: _ClassVar[int] - UPDATED_CARD_FIELD_NUMBER: _ClassVar[int] - feed_entry_id: int - operation: _background_pb2.BackgroundAppNotification.Operation - updated_card: _background_feed_pb2.FeedCard - def __init__(self, feed_entry_id: _Optional[int] = ..., operation: _Optional[_Union[_background_pb2.BackgroundAppNotification.Operation, str]] = ..., updated_card: _Optional[_Union[_background_feed_pb2.FeedCard, _Mapping]] = ...) -> None: ... - -class HandleNewPostRequest(_message.Message): - __slots__ = ("new_post",) - NEW_POST_FIELD_NUMBER: _ClassVar[int] - new_post: _background_feed_pb2.FeedEntry - def __init__(self, new_post: _Optional[_Union[_background_feed_pb2.FeedEntry, _Mapping]] = ...) -> None: ... - -class HandleNewPostResponse(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class CuratorState(_message.Message): - __slots__ = ("state_json",) - STATE_JSON_FIELD_NUMBER: _ClassVar[int] - state_json: str - def __init__(self, state_json: _Optional[str] = ...) -> None: ... - -class FeedControlRequest(_message.Message): - __slots__ = ("last_known_state", "curator_user_session_token") - LAST_KNOWN_STATE_FIELD_NUMBER: _ClassVar[int] - CURATOR_USER_SESSION_TOKEN_FIELD_NUMBER: _ClassVar[int] - last_known_state: CuratorState - curator_user_session_token: str - def __init__(self, last_known_state: _Optional[_Union[CuratorState, _Mapping]] = ..., curator_user_session_token: _Optional[str] = ...) -> None: ... - -class FeedbackProcessed(_message.Message): - __slots__ = ("feedback_request_uuid",) - FEEDBACK_REQUEST_UUID_FIELD_NUMBER: _ClassVar[int] - feedback_request_uuid: str - def __init__(self, feedback_request_uuid: _Optional[str] = ...) -> None: ... - -class FeedControlResponse(_message.Message): - __slots__ = ("operation", "state_to_save", "feedback_done") - OPERATION_FIELD_NUMBER: _ClassVar[int] - STATE_TO_SAVE_FIELD_NUMBER: _ClassVar[int] - FEEDBACK_DONE_FIELD_NUMBER: _ClassVar[int] - operation: FeedOperation - state_to_save: CuratorState - feedback_done: FeedbackProcessed - def __init__(self, operation: _Optional[_Union[FeedOperation, _Mapping]] = ..., state_to_save: _Optional[_Union[CuratorState, _Mapping]] = ..., feedback_done: _Optional[_Union[FeedbackProcessed, _Mapping]] = ...) -> None: ... diff --git a/truffle/app/curator_pb2_grpc.py b/truffle/app/curator_pb2_grpc.py deleted file mode 100644 index 000ab71..0000000 --- a/truffle/app/curator_pb2_grpc.py +++ /dev/null @@ -1,183 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - -from truffle.app import curator_pb2 as truffle_dot_app_dot_curator__pb2 - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/curator_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - - -class CuratorServiceStub(object): - """Missing associated documentation comment in .proto file.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.TakeFeedback = channel.unary_unary( - '/truffle.app.CuratorService/TakeFeedback', - request_serializer=truffle_dot_app_dot_curator__pb2.TakeFeedbackRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_curator__pb2.TakeFeedbackResponse.FromString, - _registered_method=True) - self.HandleNewPost = channel.unary_unary( - '/truffle.app.CuratorService/HandleNewPost', - request_serializer=truffle_dot_app_dot_curator__pb2.HandleNewPostRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_curator__pb2.HandleNewPostResponse.FromString, - _registered_method=True) - self.FeedControl = channel.unary_stream( - '/truffle.app.CuratorService/FeedControl', - request_serializer=truffle_dot_app_dot_curator__pb2.FeedControlRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_curator__pb2.FeedControlResponse.FromString, - _registered_method=True) - - -class CuratorServiceServicer(object): - """Missing associated documentation comment in .proto file.""" - - def TakeFeedback(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def HandleNewPost(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def FeedControl(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_CuratorServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'TakeFeedback': grpc.unary_unary_rpc_method_handler( - servicer.TakeFeedback, - request_deserializer=truffle_dot_app_dot_curator__pb2.TakeFeedbackRequest.FromString, - response_serializer=truffle_dot_app_dot_curator__pb2.TakeFeedbackResponse.SerializeToString, - ), - 'HandleNewPost': grpc.unary_unary_rpc_method_handler( - servicer.HandleNewPost, - request_deserializer=truffle_dot_app_dot_curator__pb2.HandleNewPostRequest.FromString, - response_serializer=truffle_dot_app_dot_curator__pb2.HandleNewPostResponse.SerializeToString, - ), - 'FeedControl': grpc.unary_stream_rpc_method_handler( - servicer.FeedControl, - request_deserializer=truffle_dot_app_dot_curator__pb2.FeedControlRequest.FromString, - response_serializer=truffle_dot_app_dot_curator__pb2.FeedControlResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'truffle.app.CuratorService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('truffle.app.CuratorService', rpc_method_handlers) - - - # This class is part of an EXPERIMENTAL API. -class CuratorService(object): - """Missing associated documentation comment in .proto file.""" - - @staticmethod - def TakeFeedback(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.CuratorService/TakeFeedback', - truffle_dot_app_dot_curator__pb2.TakeFeedbackRequest.SerializeToString, - truffle_dot_app_dot_curator__pb2.TakeFeedbackResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def HandleNewPost(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.CuratorService/HandleNewPost', - truffle_dot_app_dot_curator__pb2.HandleNewPostRequest.SerializeToString, - truffle_dot_app_dot_curator__pb2.HandleNewPostResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def FeedControl(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_stream( - request, - target, - '/truffle.app.CuratorService/FeedControl', - truffle_dot_app_dot_curator__pb2.FeedControlRequest.SerializeToString, - truffle_dot_app_dot_curator__pb2.FeedControlResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) diff --git a/truffle/app/default_app_manifest_pb2.py b/truffle/app/default_app_manifest_pb2.py index 1ae3978..a996440 100644 --- a/truffle/app/default_app_manifest_pb2.py +++ b/truffle/app/default_app_manifest_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/app/default_app_manifest.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/app/default_app_manifest.proto' ) @@ -22,20 +22,20 @@ _sym_db = _symbol_database.Default() -from truffle.app import app_type_pb2 as truffle_dot_app_dot_app__type__pb2 from truffle.common import icon_pb2 as truffle_dot_common_dot_icon__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from truffle.app import app_pb2 as truffle_dot_app_dot_app__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&truffle/app/default_app_manifest.proto\x12\x0btruffle.app\x1a\x1atruffle/app/app_type.proto\x1a\x19truffle/common/icon.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb7\x02\n\x12\x44\x65\x66\x61ultAppManifest\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x30\n\x0cgenerated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x04\x61pps\x18\x03 \x03(\x0b\x32*.truffle.app.DefaultAppManifest.DefaultApp\x1a\xa3\x01\n\nDefaultApp\x12&\n\x08\x61pp_type\x18\x01 \x01(\x0e\x32\x14.truffle.app.AppType\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\nbundle_url\x18\x03 \x01(\t\x12\"\n\x04icon\x18\x04 \x01(\x0b\x32\x14.truffle.common.Icon\x12\x12\n\nbundle_md5\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\tb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&truffle/app/default_app_manifest.proto\x12\x0btruffle.app\x1a\x19truffle/common/icon.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x15truffle/app/app.proto\"\xbd\x02\n\x12\x44\x65\x66\x61ultAppManifest\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x30\n\x0cgenerated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x04\x61pps\x18\x03 \x03(\x0b\x32*.truffle.app.DefaultAppManifest.DefaultApp\x1a\xa9\x01\n\nDefaultApp\x12\r\n\x05index\x18\x01 \x01(\r\x12\x12\n\nbundle_url\x18\x02 \x01(\t\x12*\n\x08metadata\x18\x03 \x01(\x0b\x32\x18.truffle.app.AppMetadata\x12\x12\n\nbundle_md5\x18\x04 \x01(\t\x12\x1b\n\x13provides_foreground\x18\x05 \x01(\x08\x12\x1b\n\x13provides_background\x18\x06 \x01(\x08\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.default_app_manifest_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_DEFAULTAPPMANIFEST']._serialized_start=144 - _globals['_DEFAULTAPPMANIFEST']._serialized_end=455 - _globals['_DEFAULTAPPMANIFEST_DEFAULTAPP']._serialized_start=292 - _globals['_DEFAULTAPPMANIFEST_DEFAULTAPP']._serialized_end=455 + _globals['_DEFAULTAPPMANIFEST']._serialized_start=139 + _globals['_DEFAULTAPPMANIFEST']._serialized_end=456 + _globals['_DEFAULTAPPMANIFEST_DEFAULTAPP']._serialized_start=287 + _globals['_DEFAULTAPPMANIFEST_DEFAULTAPP']._serialized_end=456 # @@protoc_insertion_point(module_scope) diff --git a/truffle/app/default_app_manifest_pb2.pyi b/truffle/app/default_app_manifest_pb2.pyi index a06eb47..aa96a8d 100644 --- a/truffle/app/default_app_manifest_pb2.pyi +++ b/truffle/app/default_app_manifest_pb2.pyi @@ -1,6 +1,8 @@ -from truffle.app import app_type_pb2 as _app_type_pb2 +import datetime + from truffle.common import icon_pb2 as _icon_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from truffle.app import app_pb2 as _app_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -12,24 +14,24 @@ DESCRIPTOR: _descriptor.FileDescriptor class DefaultAppManifest(_message.Message): __slots__ = ("version", "generated_at", "apps") class DefaultApp(_message.Message): - __slots__ = ("app_type", "name", "bundle_url", "icon", "bundle_md5", "description") - APP_TYPE_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("index", "bundle_url", "metadata", "bundle_md5", "provides_foreground", "provides_background") + INDEX_FIELD_NUMBER: _ClassVar[int] BUNDLE_URL_FIELD_NUMBER: _ClassVar[int] - ICON_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] BUNDLE_MD5_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - app_type: _app_type_pb2.AppType - name: str + PROVIDES_FOREGROUND_FIELD_NUMBER: _ClassVar[int] + PROVIDES_BACKGROUND_FIELD_NUMBER: _ClassVar[int] + index: int bundle_url: str - icon: _icon_pb2.Icon + metadata: _app_pb2.AppMetadata bundle_md5: str - description: str - def __init__(self, app_type: _Optional[_Union[_app_type_pb2.AppType, str]] = ..., name: _Optional[str] = ..., bundle_url: _Optional[str] = ..., icon: _Optional[_Union[_icon_pb2.Icon, _Mapping]] = ..., bundle_md5: _Optional[str] = ..., description: _Optional[str] = ...) -> None: ... + provides_foreground: bool + provides_background: bool + def __init__(self, index: _Optional[int] = ..., bundle_url: _Optional[str] = ..., metadata: _Optional[_Union[_app_pb2.AppMetadata, _Mapping]] = ..., bundle_md5: _Optional[str] = ..., provides_foreground: bool = ..., provides_background: bool = ...) -> None: ... VERSION_FIELD_NUMBER: _ClassVar[int] GENERATED_AT_FIELD_NUMBER: _ClassVar[int] APPS_FIELD_NUMBER: _ClassVar[int] version: str generated_at: _timestamp_pb2.Timestamp apps: _containers.RepeatedCompositeFieldContainer[DefaultAppManifest.DefaultApp] - def __init__(self, version: _Optional[str] = ..., generated_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., apps: _Optional[_Iterable[_Union[DefaultAppManifest.DefaultApp, _Mapping]]] = ...) -> None: ... + def __init__(self, version: _Optional[str] = ..., generated_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., apps: _Optional[_Iterable[_Union[DefaultAppManifest.DefaultApp, _Mapping]]] = ...) -> None: ... diff --git a/truffle/app/default_app_manifest_pb2_grpc.py b/truffle/app/default_app_manifest_pb2_grpc.py index 8507aef..f33f561 100644 --- a/truffle/app/default_app_manifest_pb2_grpc.py +++ b/truffle/app/default_app_manifest_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/default_app_manifest_pb2_grpc.py depends on' + + ' but the generated code in truffle/app/default_app_manifest_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/app/foreground_pb2.py b/truffle/app/foreground_pb2.py index 730276d..d358d53 100644 --- a/truffle/app/foreground_pb2.py +++ b/truffle/app/foreground_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/app/foreground.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/app/foreground.proto' ) @@ -22,20 +22,20 @@ _sym_db = _symbol_database.Default() -from truffle.common import icon_pb2 as truffle_dot_common_dot_icon__pb2 +from truffle.app import app_build_pb2 as truffle_dot_app_dot_app__build__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/app/foreground.proto\x12\x0btruffle.app\x1a\x19truffle/common/icon.proto\"\xa7\x01\n\rForegroundApp\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x35\n\x08metadata\x18\x02 \x01(\x0b\x32#.truffle.app.ForegroundApp.Metadata\x1aQ\n\x08Metadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\"\n\x04icon\x18\x02 \x01(\x0b\x32\x14.truffle.common.Icon\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\"O\n\x16\x46oregroundAppBuildInfo\x12\x35\n\x08metadata\x18\x01 \x01(\x0b\x32#.truffle.app.ForegroundApp.Metadatab\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/app/foreground.proto\x12\x0btruffle.app\x1a\x1btruffle/app/app_build.proto\"\xa5\x01\n\rForegroundApp\x12\x41\n\x0f\x61vailable_tools\x18\x01 \x03(\x0b\x32(.truffle.app.ForegroundApp.AvailableTool\x1aQ\n\rAvailableTool\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x18\n\x10tool_description\x18\x02 \x01(\t\x12\x13\n\x0b\x61rgs_schema\x18\x03 \x01(\t\"E\n\x16\x46oregroundAppBuildInfo\x12+\n\x07process\x18\x01 \x01(\x0b\x32\x1a.truffle.app.ProcessConfigb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.foreground_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_FOREGROUNDAPP']._serialized_start=73 + _globals['_FOREGROUNDAPP']._serialized_start=75 _globals['_FOREGROUNDAPP']._serialized_end=240 - _globals['_FOREGROUNDAPP_METADATA']._serialized_start=159 - _globals['_FOREGROUNDAPP_METADATA']._serialized_end=240 + _globals['_FOREGROUNDAPP_AVAILABLETOOL']._serialized_start=159 + _globals['_FOREGROUNDAPP_AVAILABLETOOL']._serialized_end=240 _globals['_FOREGROUNDAPPBUILDINFO']._serialized_start=242 - _globals['_FOREGROUNDAPPBUILDINFO']._serialized_end=321 + _globals['_FOREGROUNDAPPBUILDINFO']._serialized_end=311 # @@protoc_insertion_point(module_scope) diff --git a/truffle/app/foreground_pb2.pyi b/truffle/app/foreground_pb2.pyi index e946641..ffe121b 100644 --- a/truffle/app/foreground_pb2.pyi +++ b/truffle/app/foreground_pb2.pyi @@ -1,30 +1,29 @@ -from truffle.common import icon_pb2 as _icon_pb2 +from truffle.app import app_build_pb2 as _app_build_pb2 +from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from collections.abc import Mapping as _Mapping +from collections.abc import Iterable as _Iterable, Mapping as _Mapping from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor class ForegroundApp(_message.Message): - __slots__ = ("uuid", "metadata") - class Metadata(_message.Message): - __slots__ = ("name", "icon", "description") - NAME_FIELD_NUMBER: _ClassVar[int] - ICON_FIELD_NUMBER: _ClassVar[int] - DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - name: str - icon: _icon_pb2.Icon - description: str - def __init__(self, name: _Optional[str] = ..., icon: _Optional[_Union[_icon_pb2.Icon, _Mapping]] = ..., description: _Optional[str] = ...) -> None: ... - UUID_FIELD_NUMBER: _ClassVar[int] - METADATA_FIELD_NUMBER: _ClassVar[int] - uuid: str - metadata: ForegroundApp.Metadata - def __init__(self, uuid: _Optional[str] = ..., metadata: _Optional[_Union[ForegroundApp.Metadata, _Mapping]] = ...) -> None: ... + __slots__ = ("available_tools",) + class AvailableTool(_message.Message): + __slots__ = ("tool_name", "tool_description", "args_schema") + TOOL_NAME_FIELD_NUMBER: _ClassVar[int] + TOOL_DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + ARGS_SCHEMA_FIELD_NUMBER: _ClassVar[int] + tool_name: str + tool_description: str + args_schema: str + def __init__(self, tool_name: _Optional[str] = ..., tool_description: _Optional[str] = ..., args_schema: _Optional[str] = ...) -> None: ... + AVAILABLE_TOOLS_FIELD_NUMBER: _ClassVar[int] + available_tools: _containers.RepeatedCompositeFieldContainer[ForegroundApp.AvailableTool] + def __init__(self, available_tools: _Optional[_Iterable[_Union[ForegroundApp.AvailableTool, _Mapping]]] = ...) -> None: ... class ForegroundAppBuildInfo(_message.Message): - __slots__ = ("metadata",) - METADATA_FIELD_NUMBER: _ClassVar[int] - metadata: ForegroundApp.Metadata - def __init__(self, metadata: _Optional[_Union[ForegroundApp.Metadata, _Mapping]] = ...) -> None: ... + __slots__ = ("process",) + PROCESS_FIELD_NUMBER: _ClassVar[int] + process: _app_build_pb2.ProcessConfig + def __init__(self, process: _Optional[_Union[_app_build_pb2.ProcessConfig, _Mapping]] = ...) -> None: ... diff --git a/truffle/app/foreground_pb2_grpc.py b/truffle/app/foreground_pb2_grpc.py index b0258f3..31cdf1f 100644 --- a/truffle/app/foreground_pb2_grpc.py +++ b/truffle/app/foreground_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/foreground_pb2_grpc.py depends on' + + ' but the generated code in truffle/app/foreground_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/app/system_pb2.py b/truffle/app/system_pb2.py deleted file mode 100644 index fa84ae9..0000000 --- a/truffle/app/system_pb2.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/app/system.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/app/system.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 -from truffle.app import app_build_pb2 as truffle_dot_app_dot_app__build__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18truffle/app/system.proto\x12\x0btruffle.app\x1a\x1ctruffle/app/background.proto\x1a\x1btruffle/app/app_build.proto\"\xc5\x01\n\tSystemApp\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\nsource_dir\x18\x02 \x01(\t\x12+\n\x07process\x18\x03 \x01(\x0b\x32\x1a.truffle.app.ProcessConfig\x12\x0f\n\x07no_ckpt\x18\x04 \x01(\x08\x12\x45\n\x0fschedule_policy\x18\x05 \x01(\x0b\x32\'.truffle.app.BackgroundAppRuntimePolicyH\x00\x88\x01\x01\x42\x12\n\x10_schedule_policyb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.system_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_SYSTEMAPP']._serialized_start=101 - _globals['_SYSTEMAPP']._serialized_end=298 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/system_pb2.pyi b/truffle/app/system_pb2.pyi deleted file mode 100644 index 1529d8c..0000000 --- a/truffle/app/system_pb2.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from truffle.app import background_pb2 as _background_pb2 -from truffle.app import app_build_pb2 as _app_build_pb2 -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class SystemApp(_message.Message): - __slots__ = ("key", "source_dir", "process", "no_ckpt", "schedule_policy") - KEY_FIELD_NUMBER: _ClassVar[int] - SOURCE_DIR_FIELD_NUMBER: _ClassVar[int] - PROCESS_FIELD_NUMBER: _ClassVar[int] - NO_CKPT_FIELD_NUMBER: _ClassVar[int] - SCHEDULE_POLICY_FIELD_NUMBER: _ClassVar[int] - key: str - source_dir: str - process: _app_build_pb2.ProcessConfig - no_ckpt: bool - schedule_policy: _background_pb2.BackgroundAppRuntimePolicy - def __init__(self, key: _Optional[str] = ..., source_dir: _Optional[str] = ..., process: _Optional[_Union[_app_build_pb2.ProcessConfig, _Mapping]] = ..., no_ckpt: bool = ..., schedule_policy: _Optional[_Union[_background_pb2.BackgroundAppRuntimePolicy, _Mapping]] = ...) -> None: ... diff --git a/truffle/app/task_runtime_pb2.py b/truffle/app/task_runtime_pb2.py deleted file mode 100644 index 9a46ed1..0000000 --- a/truffle/app/task_runtime_pb2.py +++ /dev/null @@ -1,128 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/app/task_runtime.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/app/task_runtime.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from truffle.os import task_pb2 as truffle_dot_os_dot_task__pb2 -try: - truffle_dot_os_dot_task__info__pb2 = truffle_dot_os_dot_task__pb2.truffle_dot_os_dot_task__info__pb2 -except AttributeError: - truffle_dot_os_dot_task__info__pb2 = truffle_dot_os_dot_task__pb2.truffle.os.task_info_pb2 -try: - truffle_dot_os_dot_task__user__response__pb2 = truffle_dot_os_dot_task__pb2.truffle_dot_os_dot_task__user__response__pb2 -except AttributeError: - truffle_dot_os_dot_task__user__response__pb2 = truffle_dot_os_dot_task__pb2.truffle.os.task_user_response_pb2 -try: - truffle_dot_os_dot_task__step__pb2 = truffle_dot_os_dot_task__pb2.truffle_dot_os_dot_task__step__pb2 -except AttributeError: - truffle_dot_os_dot_task__step__pb2 = truffle_dot_os_dot_task__pb2.truffle.os.task_step_pb2 -try: - truffle_dot_common_dot_content__pb2 = truffle_dot_os_dot_task__pb2.truffle_dot_common_dot_content__pb2 -except AttributeError: - truffle_dot_common_dot_content__pb2 = truffle_dot_os_dot_task__pb2.truffle.common.content_pb2 -from truffle.os import task_user_response_pb2 as truffle_dot_os_dot_task__user__response__pb2 -from truffle.os import task_actions_pb2 as truffle_dot_os_dot_task__actions__pb2 -try: - truffle_dot_os_dot_task__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__pb2 -except AttributeError: - truffle_dot_os_dot_task__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_pb2 -try: - truffle_dot_os_dot_task__info__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__info__pb2 -except AttributeError: - truffle_dot_os_dot_task__info__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_info_pb2 -try: - truffle_dot_os_dot_task__user__response__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__user__response__pb2 -except AttributeError: - truffle_dot_os_dot_task__user__response__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_user_response_pb2 -try: - truffle_dot_os_dot_task__step__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__step__pb2 -except AttributeError: - truffle_dot_os_dot_task__step__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_step_pb2 -try: - truffle_dot_common_dot_content__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_common_dot_content__pb2 -except AttributeError: - truffle_dot_common_dot_content__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.common.content_pb2 -try: - truffle_dot_os_dot_task__target__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__target__pb2 -except AttributeError: - truffle_dot_os_dot_task__target__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_target_pb2 -try: - truffle_dot_os_dot_task__options__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__options__pb2 -except AttributeError: - truffle_dot_os_dot_task__options__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_options_pb2 -try: - truffle_dot_os_dot_task__user__response__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_os_dot_task__user__response__pb2 -except AttributeError: - truffle_dot_os_dot_task__user__response__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.os.task_user_response_pb2 -try: - truffle_dot_common_dot_tool__provider__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle_dot_common_dot_tool__provider__pb2 -except AttributeError: - truffle_dot_common_dot_tool__provider__pb2 = truffle_dot_os_dot_task__actions__pb2.truffle.common.tool_provider_pb2 -from truffle.os import task_step_pb2 as truffle_dot_os_dot_task__step__pb2 -try: - truffle_dot_common_dot_content__pb2 = truffle_dot_os_dot_task__step__pb2.truffle_dot_common_dot_content__pb2 -except AttributeError: - truffle_dot_common_dot_content__pb2 = truffle_dot_os_dot_task__step__pb2.truffle.common.content_pb2 -from truffle.infer.convo import conversation_pb2 as truffle_dot_infer_dot_convo_dot_conversation__pb2 -try: - truffle_dot_infer_dot_convo_dot_msg__pb2 = truffle_dot_infer_dot_convo_dot_conversation__pb2.truffle_dot_infer_dot_convo_dot_msg__pb2 -except AttributeError: - truffle_dot_infer_dot_convo_dot_msg__pb2 = truffle_dot_infer_dot_convo_dot_conversation__pb2.truffle.infer.convo.msg_pb2 -from truffle.app import background_feed_pb2 as truffle_dot_app_dot_background__feed__pb2 -from truffle.common import file_pb2 as truffle_dot_common_dot_file__pb2 -from truffle.os import task_options_pb2 as truffle_dot_os_dot_task__options__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1etruffle/app/task_runtime.proto\x12\x0btruffle.app\x1a\x15truffle/os/task.proto\x1a#truffle/os/task_user_response.proto\x1a\x1dtruffle/os/task_actions.proto\x1a\x1atruffle/os/task_step.proto\x1a&truffle/infer/convo/conversation.proto\x1a!truffle/app/background_feed.proto\x1a\x19truffle/common/file.proto\x1a\x1dtruffle/os/task_options.proto\"\xad\x01\n\rToolsProvider\x12:\n\nmcp_server\x18\x01 \x01(\x0b\x32$.truffle.app.ToolsProvider.MCPServerH\x00\x1aT\n\tMCPServer\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\x12\x11\n\x04path\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_pathB\n\n\x08provider\"K\n\x16\x41\x64\x64ToolProviderRequest\x12\x31\n\rtool_provider\x18\x01 \x01(\x0b\x32\x1a.truffle.app.ToolsProvider\"U\n\x17\x41\x64\x64ToolProviderResponse\x12:\n\x16\x63urrent_tool_providers\x18\x01 \x03(\x0b\x32\x1a.truffle.app.ToolsProvider\"2\n\x19RemoveToolProviderRequest\x12\x15\n\rprovider_uuid\x18\x01 \x01(\t\"X\n\x1aRemoveToolProviderResponse\x12:\n\x16\x63urrent_tool_providers\x18\x01 \x03(\x0b\x32\x1a.truffle.app.ToolsProvider\"h\n\x11TaskContextUpdate\x12\x37\n\x0clatest_convo\x18\x01 \x01(\x0b\x32!.truffle.infer.convo.Conversation\x12\x1a\n\x12\x61ssociated_node_id\x18\x02 \x01(\x05\"\xd5\x01\n\x07NewTask\x12-\n\x0cuser_message\x18\x01 \x01(\x0b\x32\x17.truffle.os.UserMessage\x12:\n\x0e\x61ttached_files\x18\x02 \x03(\x0b\x32\".truffle.common.AttachedFileIntent\x12\x45\n\x15\x61ttached_feed_entries\x18\x03 \x01(\x0b\x32!.truffle.app.FeedEntryTaskContextH\x00\x88\x01\x01\x42\x18\n\x16_attached_feed_entries\"b\n\x08PrevTask\x12\x1e\n\x04task\x18\x01 \x01(\x0b\x32\x10.truffle.os.Task\x12\x36\n\x0elatest_context\x18\x02 \x01(\x0b\x32\x1e.truffle.app.TaskContextUpdate\"\xd5\x01\n\x10StartTaskRequest\x12(\n\x08new_task\x18\x01 \x01(\x0b\x32\x14.truffle.app.NewTaskH\x00\x12*\n\tprev_task\x18\x02 \x01(\x0b\x32\x15.truffle.app.PrevTaskH\x00\x12(\n\x07options\x18\x03 \x01(\x0b\x32\x17.truffle.os.TaskOptions\x12\x32\n\x0etool_providers\x18\x04 \x03(\x0b\x32\x1a.truffle.app.ToolsProviderB\r\n\x0btask_source\"\x87\x01\n\x10TaskRuntimeError\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x14\n\x07\x64\x65tails\x18\x02 \x01(\tH\x00\x88\x01\x01\x12%\n\x18\x61ssociated_provider_uuid\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\n\n\x08_detailsB\x1b\n\x19_associated_provider_uuid\"\xc4\x01\n\x11TaskRuntimeUpdate\x12\x33\n\x0btask_update\x18\x01 \x01(\x0b\x32\x1c.truffle.os.TaskStreamUpdateH\x00\x12\x36\n\rruntime_error\x18\x02 \x01(\x0b\x32\x1d.truffle.app.TaskRuntimeErrorH\x00\x12\x38\n\x0e\x63ontext_update\x18\x03 \x01(\x0b\x32\x1e.truffle.app.TaskContextUpdateH\x00\x42\x08\n\x06update2\xd3\x03\n\x12TaskRuntimeService\x12K\n\x08OpenTask\x12\x1d.truffle.app.StartTaskRequest\x1a\x1e.truffle.app.TaskRuntimeUpdate0\x01\x12S\n\x0fHandleInterrupt\x12 .truffle.os.InterruptTaskRequest\x1a\x1e.truffle.os.TaskActionResponse\x12V\n\x12HandleUserResponse\x12 .truffle.os.RespondToTaskRequest\x1a\x1e.truffle.os.TaskActionResponse\x12\\\n\x0f\x41\x64\x64ToolProvider\x12#.truffle.app.AddToolProviderRequest\x1a$.truffle.app.AddToolProviderResponse\x12\x65\n\x12RemoveToolProvider\x12&.truffle.app.RemoveToolProviderRequest\x1a\'.truffle.app.RemoveToolProviderResponseb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.app.task_runtime_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_TOOLSPROVIDER']._serialized_start=300 - _globals['_TOOLSPROVIDER']._serialized_end=473 - _globals['_TOOLSPROVIDER_MCPSERVER']._serialized_start=377 - _globals['_TOOLSPROVIDER_MCPSERVER']._serialized_end=461 - _globals['_ADDTOOLPROVIDERREQUEST']._serialized_start=475 - _globals['_ADDTOOLPROVIDERREQUEST']._serialized_end=550 - _globals['_ADDTOOLPROVIDERRESPONSE']._serialized_start=552 - _globals['_ADDTOOLPROVIDERRESPONSE']._serialized_end=637 - _globals['_REMOVETOOLPROVIDERREQUEST']._serialized_start=639 - _globals['_REMOVETOOLPROVIDERREQUEST']._serialized_end=689 - _globals['_REMOVETOOLPROVIDERRESPONSE']._serialized_start=691 - _globals['_REMOVETOOLPROVIDERRESPONSE']._serialized_end=779 - _globals['_TASKCONTEXTUPDATE']._serialized_start=781 - _globals['_TASKCONTEXTUPDATE']._serialized_end=885 - _globals['_NEWTASK']._serialized_start=888 - _globals['_NEWTASK']._serialized_end=1101 - _globals['_PREVTASK']._serialized_start=1103 - _globals['_PREVTASK']._serialized_end=1201 - _globals['_STARTTASKREQUEST']._serialized_start=1204 - _globals['_STARTTASKREQUEST']._serialized_end=1417 - _globals['_TASKRUNTIMEERROR']._serialized_start=1420 - _globals['_TASKRUNTIMEERROR']._serialized_end=1555 - _globals['_TASKRUNTIMEUPDATE']._serialized_start=1558 - _globals['_TASKRUNTIMEUPDATE']._serialized_end=1754 - _globals['_TASKRUNTIMESERVICE']._serialized_start=1757 - _globals['_TASKRUNTIMESERVICE']._serialized_end=2224 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/task_runtime_pb2.pyi b/truffle/app/task_runtime_pb2.pyi deleted file mode 100644 index dce11c1..0000000 --- a/truffle/app/task_runtime_pb2.pyi +++ /dev/null @@ -1,124 +0,0 @@ -from truffle.os import task_pb2 as _task_pb2 -from truffle.os import task_info_pb2 as _task_info_pb2 -from truffle.os import task_user_response_pb2 as _task_user_response_pb2 -from truffle.os import task_step_pb2 as _task_step_pb2 -from truffle.os import task_user_response_pb2 as _task_user_response_pb2_1 -from truffle.os import task_actions_pb2 as _task_actions_pb2 -from truffle.os import task_pb2 as _task_pb2_1 -from truffle.os import task_target_pb2 as _task_target_pb2 -from truffle.os import task_options_pb2 as _task_options_pb2 -from truffle.os import task_user_response_pb2 as _task_user_response_pb2_1_1 -from truffle.common import tool_provider_pb2 as _tool_provider_pb2 -from truffle.os import task_step_pb2 as _task_step_pb2_1 -from truffle.common import content_pb2 as _content_pb2 -from truffle.infer.convo import conversation_pb2 as _conversation_pb2 -from truffle.infer.convo import msg_pb2 as _msg_pb2 -from truffle.app import background_feed_pb2 as _background_feed_pb2 -from truffle.common import file_pb2 as _file_pb2 -from truffle.os import task_options_pb2 as _task_options_pb2_1 -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class ToolsProvider(_message.Message): - __slots__ = ("mcp_server",) - class MCPServer(_message.Message): - __slots__ = ("uuid", "address", "port", "path") - UUID_FIELD_NUMBER: _ClassVar[int] - ADDRESS_FIELD_NUMBER: _ClassVar[int] - PORT_FIELD_NUMBER: _ClassVar[int] - PATH_FIELD_NUMBER: _ClassVar[int] - uuid: str - address: str - port: int - path: str - def __init__(self, uuid: _Optional[str] = ..., address: _Optional[str] = ..., port: _Optional[int] = ..., path: _Optional[str] = ...) -> None: ... - MCP_SERVER_FIELD_NUMBER: _ClassVar[int] - mcp_server: ToolsProvider.MCPServer - def __init__(self, mcp_server: _Optional[_Union[ToolsProvider.MCPServer, _Mapping]] = ...) -> None: ... - -class AddToolProviderRequest(_message.Message): - __slots__ = ("tool_provider",) - TOOL_PROVIDER_FIELD_NUMBER: _ClassVar[int] - tool_provider: ToolsProvider - def __init__(self, tool_provider: _Optional[_Union[ToolsProvider, _Mapping]] = ...) -> None: ... - -class AddToolProviderResponse(_message.Message): - __slots__ = ("current_tool_providers",) - CURRENT_TOOL_PROVIDERS_FIELD_NUMBER: _ClassVar[int] - current_tool_providers: _containers.RepeatedCompositeFieldContainer[ToolsProvider] - def __init__(self, current_tool_providers: _Optional[_Iterable[_Union[ToolsProvider, _Mapping]]] = ...) -> None: ... - -class RemoveToolProviderRequest(_message.Message): - __slots__ = ("provider_uuid",) - PROVIDER_UUID_FIELD_NUMBER: _ClassVar[int] - provider_uuid: str - def __init__(self, provider_uuid: _Optional[str] = ...) -> None: ... - -class RemoveToolProviderResponse(_message.Message): - __slots__ = ("current_tool_providers",) - CURRENT_TOOL_PROVIDERS_FIELD_NUMBER: _ClassVar[int] - current_tool_providers: _containers.RepeatedCompositeFieldContainer[ToolsProvider] - def __init__(self, current_tool_providers: _Optional[_Iterable[_Union[ToolsProvider, _Mapping]]] = ...) -> None: ... - -class TaskContextUpdate(_message.Message): - __slots__ = ("latest_convo", "associated_node_id") - LATEST_CONVO_FIELD_NUMBER: _ClassVar[int] - ASSOCIATED_NODE_ID_FIELD_NUMBER: _ClassVar[int] - latest_convo: _conversation_pb2.Conversation - associated_node_id: int - def __init__(self, latest_convo: _Optional[_Union[_conversation_pb2.Conversation, _Mapping]] = ..., associated_node_id: _Optional[int] = ...) -> None: ... - -class NewTask(_message.Message): - __slots__ = ("user_message", "attached_files", "attached_feed_entries") - USER_MESSAGE_FIELD_NUMBER: _ClassVar[int] - ATTACHED_FILES_FIELD_NUMBER: _ClassVar[int] - ATTACHED_FEED_ENTRIES_FIELD_NUMBER: _ClassVar[int] - user_message: _task_user_response_pb2_1_1.UserMessage - attached_files: _containers.RepeatedCompositeFieldContainer[_file_pb2.AttachedFileIntent] - attached_feed_entries: _background_feed_pb2.FeedEntryTaskContext - def __init__(self, user_message: _Optional[_Union[_task_user_response_pb2_1_1.UserMessage, _Mapping]] = ..., attached_files: _Optional[_Iterable[_Union[_file_pb2.AttachedFileIntent, _Mapping]]] = ..., attached_feed_entries: _Optional[_Union[_background_feed_pb2.FeedEntryTaskContext, _Mapping]] = ...) -> None: ... - -class PrevTask(_message.Message): - __slots__ = ("task", "latest_context") - TASK_FIELD_NUMBER: _ClassVar[int] - LATEST_CONTEXT_FIELD_NUMBER: _ClassVar[int] - task: _task_pb2_1.Task - latest_context: TaskContextUpdate - def __init__(self, task: _Optional[_Union[_task_pb2_1.Task, _Mapping]] = ..., latest_context: _Optional[_Union[TaskContextUpdate, _Mapping]] = ...) -> None: ... - -class StartTaskRequest(_message.Message): - __slots__ = ("new_task", "prev_task", "options", "tool_providers") - NEW_TASK_FIELD_NUMBER: _ClassVar[int] - PREV_TASK_FIELD_NUMBER: _ClassVar[int] - OPTIONS_FIELD_NUMBER: _ClassVar[int] - TOOL_PROVIDERS_FIELD_NUMBER: _ClassVar[int] - new_task: NewTask - prev_task: PrevTask - options: _task_options_pb2_1.TaskOptions - tool_providers: _containers.RepeatedCompositeFieldContainer[ToolsProvider] - def __init__(self, new_task: _Optional[_Union[NewTask, _Mapping]] = ..., prev_task: _Optional[_Union[PrevTask, _Mapping]] = ..., options: _Optional[_Union[_task_options_pb2_1.TaskOptions, _Mapping]] = ..., tool_providers: _Optional[_Iterable[_Union[ToolsProvider, _Mapping]]] = ...) -> None: ... - -class TaskRuntimeError(_message.Message): - __slots__ = ("error", "details", "associated_provider_uuid") - ERROR_FIELD_NUMBER: _ClassVar[int] - DETAILS_FIELD_NUMBER: _ClassVar[int] - ASSOCIATED_PROVIDER_UUID_FIELD_NUMBER: _ClassVar[int] - error: str - details: str - associated_provider_uuid: str - def __init__(self, error: _Optional[str] = ..., details: _Optional[str] = ..., associated_provider_uuid: _Optional[str] = ...) -> None: ... - -class TaskRuntimeUpdate(_message.Message): - __slots__ = ("task_update", "runtime_error", "context_update") - TASK_UPDATE_FIELD_NUMBER: _ClassVar[int] - RUNTIME_ERROR_FIELD_NUMBER: _ClassVar[int] - CONTEXT_UPDATE_FIELD_NUMBER: _ClassVar[int] - task_update: _task_pb2_1.TaskStreamUpdate - runtime_error: TaskRuntimeError - context_update: TaskContextUpdate - def __init__(self, task_update: _Optional[_Union[_task_pb2_1.TaskStreamUpdate, _Mapping]] = ..., runtime_error: _Optional[_Union[TaskRuntimeError, _Mapping]] = ..., context_update: _Optional[_Union[TaskContextUpdate, _Mapping]] = ...) -> None: ... diff --git a/truffle/app/task_runtime_pb2_grpc.py b/truffle/app/task_runtime_pb2_grpc.py deleted file mode 100644 index 0fe0e2b..0000000 --- a/truffle/app/task_runtime_pb2_grpc.py +++ /dev/null @@ -1,274 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - -from truffle.app import task_runtime_pb2 as truffle_dot_app_dot_task__runtime__pb2 -from truffle.os import task_actions_pb2 as truffle_dot_os_dot_task__actions__pb2 -from truffle.os import task_user_response_pb2 as truffle_dot_os_dot_task__user__response__pb2 - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/task_runtime_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - - -class TaskRuntimeServiceStub(object): - """task == foreground app, bear with me - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.OpenTask = channel.unary_stream( - '/truffle.app.TaskRuntimeService/OpenTask', - request_serializer=truffle_dot_app_dot_task__runtime__pb2.StartTaskRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_task__runtime__pb2.TaskRuntimeUpdate.FromString, - _registered_method=True) - self.HandleInterrupt = channel.unary_unary( - '/truffle.app.TaskRuntimeService/HandleInterrupt', - request_serializer=truffle_dot_os_dot_task__actions__pb2.InterruptTaskRequest.SerializeToString, - response_deserializer=truffle_dot_os_dot_task__actions__pb2.TaskActionResponse.FromString, - _registered_method=True) - self.HandleUserResponse = channel.unary_unary( - '/truffle.app.TaskRuntimeService/HandleUserResponse', - request_serializer=truffle_dot_os_dot_task__user__response__pb2.RespondToTaskRequest.SerializeToString, - response_deserializer=truffle_dot_os_dot_task__actions__pb2.TaskActionResponse.FromString, - _registered_method=True) - self.AddToolProvider = channel.unary_unary( - '/truffle.app.TaskRuntimeService/AddToolProvider', - request_serializer=truffle_dot_app_dot_task__runtime__pb2.AddToolProviderRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_task__runtime__pb2.AddToolProviderResponse.FromString, - _registered_method=True) - self.RemoveToolProvider = channel.unary_unary( - '/truffle.app.TaskRuntimeService/RemoveToolProvider', - request_serializer=truffle_dot_app_dot_task__runtime__pb2.RemoveToolProviderRequest.SerializeToString, - response_deserializer=truffle_dot_app_dot_task__runtime__pb2.RemoveToolProviderResponse.FromString, - _registered_method=True) - - -class TaskRuntimeServiceServicer(object): - """task == foreground app, bear with me - """ - - def OpenTask(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def HandleInterrupt(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def HandleUserResponse(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AddToolProvider(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RemoveToolProvider(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_TaskRuntimeServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'OpenTask': grpc.unary_stream_rpc_method_handler( - servicer.OpenTask, - request_deserializer=truffle_dot_app_dot_task__runtime__pb2.StartTaskRequest.FromString, - response_serializer=truffle_dot_app_dot_task__runtime__pb2.TaskRuntimeUpdate.SerializeToString, - ), - 'HandleInterrupt': grpc.unary_unary_rpc_method_handler( - servicer.HandleInterrupt, - request_deserializer=truffle_dot_os_dot_task__actions__pb2.InterruptTaskRequest.FromString, - response_serializer=truffle_dot_os_dot_task__actions__pb2.TaskActionResponse.SerializeToString, - ), - 'HandleUserResponse': grpc.unary_unary_rpc_method_handler( - servicer.HandleUserResponse, - request_deserializer=truffle_dot_os_dot_task__user__response__pb2.RespondToTaskRequest.FromString, - response_serializer=truffle_dot_os_dot_task__actions__pb2.TaskActionResponse.SerializeToString, - ), - 'AddToolProvider': grpc.unary_unary_rpc_method_handler( - servicer.AddToolProvider, - request_deserializer=truffle_dot_app_dot_task__runtime__pb2.AddToolProviderRequest.FromString, - response_serializer=truffle_dot_app_dot_task__runtime__pb2.AddToolProviderResponse.SerializeToString, - ), - 'RemoveToolProvider': grpc.unary_unary_rpc_method_handler( - servicer.RemoveToolProvider, - request_deserializer=truffle_dot_app_dot_task__runtime__pb2.RemoveToolProviderRequest.FromString, - response_serializer=truffle_dot_app_dot_task__runtime__pb2.RemoveToolProviderResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'truffle.app.TaskRuntimeService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('truffle.app.TaskRuntimeService', rpc_method_handlers) - - - # This class is part of an EXPERIMENTAL API. -class TaskRuntimeService(object): - """task == foreground app, bear with me - """ - - @staticmethod - def OpenTask(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_stream( - request, - target, - '/truffle.app.TaskRuntimeService/OpenTask', - truffle_dot_app_dot_task__runtime__pb2.StartTaskRequest.SerializeToString, - truffle_dot_app_dot_task__runtime__pb2.TaskRuntimeUpdate.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def HandleInterrupt(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.TaskRuntimeService/HandleInterrupt', - truffle_dot_os_dot_task__actions__pb2.InterruptTaskRequest.SerializeToString, - truffle_dot_os_dot_task__actions__pb2.TaskActionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def HandleUserResponse(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.TaskRuntimeService/HandleUserResponse', - truffle_dot_os_dot_task__user__response__pb2.RespondToTaskRequest.SerializeToString, - truffle_dot_os_dot_task__actions__pb2.TaskActionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def AddToolProvider(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.TaskRuntimeService/AddToolProvider', - truffle_dot_app_dot_task__runtime__pb2.AddToolProviderRequest.SerializeToString, - truffle_dot_app_dot_task__runtime__pb2.AddToolProviderResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def RemoveToolProvider(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.app.TaskRuntimeService/RemoveToolProvider', - truffle_dot_app_dot_task__runtime__pb2.RemoveToolProviderRequest.SerializeToString, - truffle_dot_app_dot_task__runtime__pb2.RemoveToolProviderResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) diff --git a/truffle/common/content_pb2.py b/truffle/common/content_pb2.py index 2eeffb8..075e214 100644 --- a/truffle/common/content_pb2.py +++ b/truffle/common/content_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/common/content.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/common/content.proto' ) diff --git a/truffle/common/content_pb2_grpc.py b/truffle/common/content_pb2_grpc.py index 308017e..9f80a64 100644 --- a/truffle/common/content_pb2_grpc.py +++ b/truffle/common/content_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/common/content_pb2_grpc.py depends on' + + ' but the generated code in truffle/common/content_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/common/file_pb2.py b/truffle/common/file_pb2.py index 604fc59..6d6ee4f 100644 --- a/truffle/common/file_pb2.py +++ b/truffle/common/file_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/common/file.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/common/file.proto' ) diff --git a/truffle/common/file_pb2_grpc.py b/truffle/common/file_pb2_grpc.py index d037a40..bfc7eb8 100644 --- a/truffle/common/file_pb2_grpc.py +++ b/truffle/common/file_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/common/file_pb2_grpc.py depends on' + + ' but the generated code in truffle/common/file_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/common/icon_pb2.py b/truffle/common/icon_pb2.py index 25b47f3..2318742 100644 --- a/truffle/common/icon_pb2.py +++ b/truffle/common/icon_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/common/icon.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/common/icon.proto' ) diff --git a/truffle/common/icon_pb2_grpc.py b/truffle/common/icon_pb2_grpc.py index 7c5c6b0..2d156fd 100644 --- a/truffle/common/icon_pb2_grpc.py +++ b/truffle/common/icon_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/common/icon_pb2_grpc.py depends on' + + ' but the generated code in truffle/common/icon_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/common/led_states_pb2.py b/truffle/common/led_states_pb2.py deleted file mode 100644 index 1ebad80..0000000 --- a/truffle/common/led_states_pb2.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/common/led_states.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/common/led_states.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ftruffle/common/led_states.proto\x12\x0etruffle.common\"C\n\tLedStatus\x12\'\n\x05state\x18\x01 \x01(\x0e\x32\x18.truffle.common.LedState\x12\r\n\x05\x63olor\x18\x02 \x01(\r*\xfd\x02\n\x08LedState\x12\x15\n\x11LED_STATE_INVALID\x10\x00\x12\x16\n\x12LED_STATE_DISABLED\x10\x00\x12\x11\n\rLED_STATE_OFF\x10\x01\x12\x15\n\x11LED_STATE_STARTUP\x10\x02\x12\x1e\n\x1aLED_STATE_READY_TO_CONNECT\x10\x03\x12\x18\n\x14LED_STATE_CONNECTING\x10\x04\x12\x17\n\x13LED_STATE_CONNECTED\x10\x05\x12\x13\n\x0fLED_STATE_ERROR\x10\x06\x12\x17\n\x13LED_STATE_REASONING\x10\x07\x12\x12\n\x0eLED_STATE_IDLE\x10\x08\x12\x14\n\x10LED_STATE_TYPING\x10\t\x12\x1d\n\x19LED_STATE_RESPOND_TO_USER\x10\n\x12\x15\n\x11LED_STATE_ONBOARD\x10\x0b\x12\x19\n\x15LED_STATE_ONCE_ACTION\x10\x0c\x12\x18\n\x14LED_STATE_ONCE_FLAIR\x10\r\x1a\x02\x10\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.common.led_states_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_LEDSTATE']._loaded_options = None - _globals['_LEDSTATE']._serialized_options = b'\020\001' - _globals['_LEDSTATE']._serialized_start=121 - _globals['_LEDSTATE']._serialized_end=502 - _globals['_LEDSTATUS']._serialized_start=51 - _globals['_LEDSTATUS']._serialized_end=118 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/common/led_states_pb2.pyi b/truffle/common/led_states_pb2.pyi deleted file mode 100644 index 3e2f122..0000000 --- a/truffle/common/led_states_pb2.pyi +++ /dev/null @@ -1,47 +0,0 @@ -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class LedState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - LED_STATE_INVALID: _ClassVar[LedState] - LED_STATE_DISABLED: _ClassVar[LedState] - LED_STATE_OFF: _ClassVar[LedState] - LED_STATE_STARTUP: _ClassVar[LedState] - LED_STATE_READY_TO_CONNECT: _ClassVar[LedState] - LED_STATE_CONNECTING: _ClassVar[LedState] - LED_STATE_CONNECTED: _ClassVar[LedState] - LED_STATE_ERROR: _ClassVar[LedState] - LED_STATE_REASONING: _ClassVar[LedState] - LED_STATE_IDLE: _ClassVar[LedState] - LED_STATE_TYPING: _ClassVar[LedState] - LED_STATE_RESPOND_TO_USER: _ClassVar[LedState] - LED_STATE_ONBOARD: _ClassVar[LedState] - LED_STATE_ONCE_ACTION: _ClassVar[LedState] - LED_STATE_ONCE_FLAIR: _ClassVar[LedState] -LED_STATE_INVALID: LedState -LED_STATE_DISABLED: LedState -LED_STATE_OFF: LedState -LED_STATE_STARTUP: LedState -LED_STATE_READY_TO_CONNECT: LedState -LED_STATE_CONNECTING: LedState -LED_STATE_CONNECTED: LedState -LED_STATE_ERROR: LedState -LED_STATE_REASONING: LedState -LED_STATE_IDLE: LedState -LED_STATE_TYPING: LedState -LED_STATE_RESPOND_TO_USER: LedState -LED_STATE_ONBOARD: LedState -LED_STATE_ONCE_ACTION: LedState -LED_STATE_ONCE_FLAIR: LedState - -class LedStatus(_message.Message): - __slots__ = ("state", "color") - STATE_FIELD_NUMBER: _ClassVar[int] - COLOR_FIELD_NUMBER: _ClassVar[int] - state: LedState - color: int - def __init__(self, state: _Optional[_Union[LedState, str]] = ..., color: _Optional[int] = ...) -> None: ... diff --git a/truffle/common/led_states_pb2_grpc.py b/truffle/common/led_states_pb2_grpc.py deleted file mode 100644 index 276c1ac..0000000 --- a/truffle/common/led_states_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/common/led_states_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/common/tool_provider_pb2.py b/truffle/common/tool_provider_pb2.py index 024bb84..a96461b 100644 --- a/truffle/common/tool_provider_pb2.py +++ b/truffle/common/tool_provider_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/common/tool_provider.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/common/tool_provider.proto' ) diff --git a/truffle/common/tool_provider_pb2_grpc.py b/truffle/common/tool_provider_pb2_grpc.py index 1d2335c..a578d03 100644 --- a/truffle/common/tool_provider_pb2_grpc.py +++ b/truffle/common/tool_provider_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/common/tool_provider_pb2_grpc.py depends on' + + ' but the generated code in truffle/common/tool_provider_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/infer/__init__.py b/truffle/infer/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/truffle/infer/convo/__init__.py b/truffle/infer/convo/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/truffle/infer/convo/conversation_pb2.py b/truffle/infer/convo/conversation_pb2.py deleted file mode 100644 index 5ba3cc2..0000000 --- a/truffle/infer/convo/conversation_pb2.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/convo/conversation.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/convo/conversation.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from truffle.infer.convo import msg_pb2 as truffle_dot_infer_dot_convo_dot_msg__pb2 - -from truffle.infer.convo.msg_pb2 import * - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&truffle/infer/convo/conversation.proto\x12\x13truffle.infer.convo\x1a\x1dtruffle/infer/convo/msg.proto\"R\n\x0c\x43onversation\x12.\n\x08messages\x18\x01 \x03(\x0b\x32\x1c.truffle.infer.convo.Message\x12\x12\n\nmodel_uuid\x18\x03 \x01(\t\"3\n\x0c\x42uiltContext\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\t\x12\x12\n\nmodel_uuid\x18\x02 \x01(\tP\x00\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.convo.conversation_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_CONVERSATION']._serialized_start=94 - _globals['_CONVERSATION']._serialized_end=176 - _globals['_BUILTCONTEXT']._serialized_start=178 - _globals['_BUILTCONTEXT']._serialized_end=229 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/convo/conversation_pb2.pyi b/truffle/infer/convo/conversation_pb2.pyi deleted file mode 100644 index 8366e78..0000000 --- a/truffle/infer/convo/conversation_pb2.pyi +++ /dev/null @@ -1,25 +0,0 @@ -from truffle.infer.convo import msg_pb2 as _msg_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union -from truffle.infer.convo.msg_pb2 import Message as Message - -DESCRIPTOR: _descriptor.FileDescriptor - -class Conversation(_message.Message): - __slots__ = ("messages", "model_uuid") - MESSAGES_FIELD_NUMBER: _ClassVar[int] - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - messages: _containers.RepeatedCompositeFieldContainer[_msg_pb2.Message] - model_uuid: str - def __init__(self, messages: _Optional[_Iterable[_Union[_msg_pb2.Message, _Mapping]]] = ..., model_uuid: _Optional[str] = ...) -> None: ... - -class BuiltContext(_message.Message): - __slots__ = ("context", "model_uuid") - CONTEXT_FIELD_NUMBER: _ClassVar[int] - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - context: str - model_uuid: str - def __init__(self, context: _Optional[str] = ..., model_uuid: _Optional[str] = ...) -> None: ... diff --git a/truffle/infer/convo/conversation_pb2_grpc.py b/truffle/infer/convo/conversation_pb2_grpc.py deleted file mode 100644 index e3a3b17..0000000 --- a/truffle/infer/convo/conversation_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/convo/conversation_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/convo/msg_pb2.py b/truffle/infer/convo/msg_pb2.py deleted file mode 100644 index 2ec02cd..0000000 --- a/truffle/infer/convo/msg_pb2.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/convo/msg.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/convo/msg.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/infer/convo/msg.proto\x12\x13truffle.infer.convo\"\xa8\x01\n\x07Message\x12/\n\x04role\x18\x01 \x01(\x0e\x32!.truffle.infer.convo.Message.Role\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\"[\n\x04Role\x12\x10\n\x0cROLE_INVALID\x10\x00\x12\x0f\n\x0bROLE_SYSTEM\x10\x01\x12\r\n\tROLE_USER\x10\x02\x12\x12\n\x0eROLE_ASSISTANT\x10\x03\x12\r\n\tROLE_TOOL\x10\x04\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.convo.msg_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_MESSAGE']._serialized_start=55 - _globals['_MESSAGE']._serialized_end=223 - _globals['_MESSAGE_ROLE']._serialized_start=132 - _globals['_MESSAGE_ROLE']._serialized_end=223 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/convo/msg_pb2.pyi b/truffle/infer/convo/msg_pb2.pyi deleted file mode 100644 index 1594747..0000000 --- a/truffle/infer/convo/msg_pb2.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class Message(_message.Message): - __slots__ = ("role", "content") - class Role(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - ROLE_INVALID: _ClassVar[Message.Role] - ROLE_SYSTEM: _ClassVar[Message.Role] - ROLE_USER: _ClassVar[Message.Role] - ROLE_ASSISTANT: _ClassVar[Message.Role] - ROLE_TOOL: _ClassVar[Message.Role] - ROLE_INVALID: Message.Role - ROLE_SYSTEM: Message.Role - ROLE_USER: Message.Role - ROLE_ASSISTANT: Message.Role - ROLE_TOOL: Message.Role - ROLE_FIELD_NUMBER: _ClassVar[int] - CONTENT_FIELD_NUMBER: _ClassVar[int] - role: Message.Role - content: str - def __init__(self, role: _Optional[_Union[Message.Role, str]] = ..., content: _Optional[str] = ...) -> None: ... diff --git a/truffle/infer/convo/msg_pb2_grpc.py b/truffle/infer/convo/msg_pb2_grpc.py deleted file mode 100644 index db9c591..0000000 --- a/truffle/infer/convo/msg_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/convo/msg_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/embedding_pb2.py b/truffle/infer/embedding_pb2.py deleted file mode 100644 index 42cda6d..0000000 --- a/truffle/infer/embedding_pb2.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/embedding.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/embedding.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/infer/embedding.proto\x12\rtruffle.infer\")\n\nEmbeddable\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\x08\"V\n\x10\x45mbeddingRequest\x12.\n\x0b\x65mbeddables\x18\x01 \x03(\x0b\x32\x19.truffle.infer.Embeddable\x12\x12\n\nmodel_uuid\x18\x02 \x01(\t\"H\n\x11\x45mbeddingResponse\x12\x12\n\nembeddings\x18\x01 \x03(\x0c\x12\x0b\n\x03\x64im\x18\x02 \x01(\x04\x12\x12\n\ndtype_size\x18\x03 \x01(\x04\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.embedding_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_EMBEDDABLE']._serialized_start=48 - _globals['_EMBEDDABLE']._serialized_end=89 - _globals['_EMBEDDINGREQUEST']._serialized_start=91 - _globals['_EMBEDDINGREQUEST']._serialized_end=177 - _globals['_EMBEDDINGRESPONSE']._serialized_start=179 - _globals['_EMBEDDINGRESPONSE']._serialized_end=251 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/embedding_pb2.pyi b/truffle/infer/embedding_pb2.pyi deleted file mode 100644 index 6608a08..0000000 --- a/truffle/infer/embedding_pb2.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class Embeddable(_message.Message): - __slots__ = ("text", "query") - TEXT_FIELD_NUMBER: _ClassVar[int] - QUERY_FIELD_NUMBER: _ClassVar[int] - text: str - query: bool - def __init__(self, text: _Optional[str] = ..., query: bool = ...) -> None: ... - -class EmbeddingRequest(_message.Message): - __slots__ = ("embeddables", "model_uuid") - EMBEDDABLES_FIELD_NUMBER: _ClassVar[int] - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - embeddables: _containers.RepeatedCompositeFieldContainer[Embeddable] - model_uuid: str - def __init__(self, embeddables: _Optional[_Iterable[_Union[Embeddable, _Mapping]]] = ..., model_uuid: _Optional[str] = ...) -> None: ... - -class EmbeddingResponse(_message.Message): - __slots__ = ("embeddings", "dim", "dtype_size") - EMBEDDINGS_FIELD_NUMBER: _ClassVar[int] - DIM_FIELD_NUMBER: _ClassVar[int] - DTYPE_SIZE_FIELD_NUMBER: _ClassVar[int] - embeddings: _containers.RepeatedScalarFieldContainer[bytes] - dim: int - dtype_size: int - def __init__(self, embeddings: _Optional[_Iterable[bytes]] = ..., dim: _Optional[int] = ..., dtype_size: _Optional[int] = ...) -> None: ... diff --git a/truffle/infer/embedding_pb2_grpc.py b/truffle/infer/embedding_pb2_grpc.py deleted file mode 100644 index b32d5d9..0000000 --- a/truffle/infer/embedding_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/embedding_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/finishreason_pb2.py b/truffle/infer/finishreason_pb2.py deleted file mode 100644 index ea0eba0..0000000 --- a/truffle/infer/finishreason_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/finishreason.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/finishreason.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n truffle/infer/finishreason.proto\x12\rtruffle.infer*\xab\x01\n\x0c\x46inishReason\x12\x16\n\x12\x46INISH_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x46INISH_STOP\x10\x01\x12\x11\n\rFINISH_LENGTH\x10\x02\x12\x14\n\x10\x46INISH_TOOLCALLS\x10\x03\x12\x10\n\x0c\x46INISH_ERROR\x10\x04\x12\x10\n\x0c\x46INISH_ABORT\x10\x05\x12\x12\n\x0e\x46INISH_UNKNOWN\x10\x06\x12\x11\n\rFINISH_GOAWAY\x10\x07\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.finishreason_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_FINISHREASON']._serialized_start=52 - _globals['_FINISHREASON']._serialized_end=223 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/finishreason_pb2.pyi b/truffle/infer/finishreason_pb2.pyi deleted file mode 100644 index c244ecd..0000000 --- a/truffle/infer/finishreason_pb2.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from typing import ClassVar as _ClassVar - -DESCRIPTOR: _descriptor.FileDescriptor - -class FinishReason(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - FINISH_UNSPECIFIED: _ClassVar[FinishReason] - FINISH_STOP: _ClassVar[FinishReason] - FINISH_LENGTH: _ClassVar[FinishReason] - FINISH_TOOLCALLS: _ClassVar[FinishReason] - FINISH_ERROR: _ClassVar[FinishReason] - FINISH_ABORT: _ClassVar[FinishReason] - FINISH_UNKNOWN: _ClassVar[FinishReason] - FINISH_GOAWAY: _ClassVar[FinishReason] -FINISH_UNSPECIFIED: FinishReason -FINISH_STOP: FinishReason -FINISH_LENGTH: FinishReason -FINISH_TOOLCALLS: FinishReason -FINISH_ERROR: FinishReason -FINISH_ABORT: FinishReason -FINISH_UNKNOWN: FinishReason -FINISH_GOAWAY: FinishReason diff --git a/truffle/infer/finishreason_pb2_grpc.py b/truffle/infer/finishreason_pb2_grpc.py deleted file mode 100644 index 853a3ef..0000000 --- a/truffle/infer/finishreason_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/finishreason_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/gencfg_pb2.py b/truffle/infer/gencfg_pb2.py deleted file mode 100644 index 1218626..0000000 --- a/truffle/infer/gencfg_pb2.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/gencfg.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/gencfg.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/infer/gencfg.proto\x12\rtruffle.infer\"\xcf\x02\n\x0eResponseFormat\x12\x34\n\x06\x66ormat\x18\x01 \x01(\x0e\x32$.truffle.infer.ResponseFormat.Format\x12\x13\n\x06schema\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x45\n\x0c\x65xperimental\x18\x03 \x01(\x0b\x32*.truffle.infer.ResponseFormat.ExperimentalH\x01\x88\x01\x01\x1aS\n\x0c\x45xperimental\x12\x17\n\x0f\x61\x64\x64itional_ebnf\x18\x01 \x01(\t\x12\x14\n\x0cold_root_key\x18\x02 \x01(\t\x12\x14\n\x0cnew_root_key\x18\x03 \x01(\t\":\n\x06\x46ormat\x12\x08\n\x04TEXT\x10\x00\x12\x08\n\x04JSON\x10\x01\x12\x08\n\x04\x45\x42NF\x10\x02\x12\x12\n\x0eSTRUCTURAL_TAG\x10\x03\x42\t\n\x07_schemaB\x0f\n\r_experimental\"\xda\x03\n\x10GenerationConfig\x12\x11\n\x04temp\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x12\n\x05top_p\x18\x02 \x01(\x01H\x01\x88\x01\x01\x12\x19\n\x0c\x66req_penalty\x18\x03 \x01(\x01H\x02\x88\x01\x01\x12\x19\n\x0cpres_penalty\x18\x04 \x01(\x01H\x03\x88\x01\x01\x12\x18\n\x0brep_penalty\x18\x05 \x01(\x01H\x04\x88\x01\x01\x12\x11\n\x04seed\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x17\n\nmax_tokens\x18\x07 \x01(\rH\x06\x88\x01\x01\x12\x11\n\tstop_strs\x18\x08 \x03(\t\x12\x10\n\x08stop_ids\x18\t \x03(\r\x12\x36\n\x0fresponse_format\x18\n \x01(\x0b\x32\x1d.truffle.infer.ResponseFormat\x12\x34\n\x05\x64\x65\x62ug\x18\x0b \x01(\x0b\x32%.truffle.infer.GenerationConfig.Debug\x1a\x33\n\x05\x44\x65\x62ug\x12\x12\n\nignore_eos\x18\x01 \x01(\x08\x12\x16\n\x0epinned_context\x18\x02 \x01(\x08\x42\x07\n\x05_tempB\x08\n\x06_top_pB\x0f\n\r_freq_penaltyB\x0f\n\r_pres_penaltyB\x0e\n\x0c_rep_penaltyB\x07\n\x05_seedB\r\n\x0b_max_tokens\"Y\n\x15ValidateConfigRequest\x12,\n\x03\x63\x66g\x18\x01 \x01(\x0b\x32\x1f.truffle.infer.GenerationConfig\x12\x12\n\nmodel_uuid\x18\x02 \x01(\t\"H\n\x16ValidateConfigResponse\x12\r\n\x05valid\x18\x01 \x01(\x08\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12\x10\n\x08warnings\x18\x03 \x03(\tb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.gencfg_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_RESPONSEFORMAT']._serialized_start=46 - _globals['_RESPONSEFORMAT']._serialized_end=381 - _globals['_RESPONSEFORMAT_EXPERIMENTAL']._serialized_start=210 - _globals['_RESPONSEFORMAT_EXPERIMENTAL']._serialized_end=293 - _globals['_RESPONSEFORMAT_FORMAT']._serialized_start=295 - _globals['_RESPONSEFORMAT_FORMAT']._serialized_end=353 - _globals['_GENERATIONCONFIG']._serialized_start=384 - _globals['_GENERATIONCONFIG']._serialized_end=858 - _globals['_GENERATIONCONFIG_DEBUG']._serialized_start=714 - _globals['_GENERATIONCONFIG_DEBUG']._serialized_end=765 - _globals['_VALIDATECONFIGREQUEST']._serialized_start=860 - _globals['_VALIDATECONFIGREQUEST']._serialized_end=949 - _globals['_VALIDATECONFIGRESPONSE']._serialized_start=951 - _globals['_VALIDATECONFIGRESPONSE']._serialized_end=1023 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/gencfg_pb2.pyi b/truffle/infer/gencfg_pb2.pyi deleted file mode 100644 index a2a6830..0000000 --- a/truffle/infer/gencfg_pb2.pyi +++ /dev/null @@ -1,88 +0,0 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class ResponseFormat(_message.Message): - __slots__ = ("format", "schema", "experimental") - class Format(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - TEXT: _ClassVar[ResponseFormat.Format] - JSON: _ClassVar[ResponseFormat.Format] - EBNF: _ClassVar[ResponseFormat.Format] - STRUCTURAL_TAG: _ClassVar[ResponseFormat.Format] - TEXT: ResponseFormat.Format - JSON: ResponseFormat.Format - EBNF: ResponseFormat.Format - STRUCTURAL_TAG: ResponseFormat.Format - class Experimental(_message.Message): - __slots__ = ("additional_ebnf", "old_root_key", "new_root_key") - ADDITIONAL_EBNF_FIELD_NUMBER: _ClassVar[int] - OLD_ROOT_KEY_FIELD_NUMBER: _ClassVar[int] - NEW_ROOT_KEY_FIELD_NUMBER: _ClassVar[int] - additional_ebnf: str - old_root_key: str - new_root_key: str - def __init__(self, additional_ebnf: _Optional[str] = ..., old_root_key: _Optional[str] = ..., new_root_key: _Optional[str] = ...) -> None: ... - FORMAT_FIELD_NUMBER: _ClassVar[int] - SCHEMA_FIELD_NUMBER: _ClassVar[int] - EXPERIMENTAL_FIELD_NUMBER: _ClassVar[int] - format: ResponseFormat.Format - schema: str - experimental: ResponseFormat.Experimental - def __init__(self, format: _Optional[_Union[ResponseFormat.Format, str]] = ..., schema: _Optional[str] = ..., experimental: _Optional[_Union[ResponseFormat.Experimental, _Mapping]] = ...) -> None: ... - -class GenerationConfig(_message.Message): - __slots__ = ("temp", "top_p", "freq_penalty", "pres_penalty", "rep_penalty", "seed", "max_tokens", "stop_strs", "stop_ids", "response_format", "debug") - class Debug(_message.Message): - __slots__ = ("ignore_eos", "pinned_context") - IGNORE_EOS_FIELD_NUMBER: _ClassVar[int] - PINNED_CONTEXT_FIELD_NUMBER: _ClassVar[int] - ignore_eos: bool - pinned_context: bool - def __init__(self, ignore_eos: bool = ..., pinned_context: bool = ...) -> None: ... - TEMP_FIELD_NUMBER: _ClassVar[int] - TOP_P_FIELD_NUMBER: _ClassVar[int] - FREQ_PENALTY_FIELD_NUMBER: _ClassVar[int] - PRES_PENALTY_FIELD_NUMBER: _ClassVar[int] - REP_PENALTY_FIELD_NUMBER: _ClassVar[int] - SEED_FIELD_NUMBER: _ClassVar[int] - MAX_TOKENS_FIELD_NUMBER: _ClassVar[int] - STOP_STRS_FIELD_NUMBER: _ClassVar[int] - STOP_IDS_FIELD_NUMBER: _ClassVar[int] - RESPONSE_FORMAT_FIELD_NUMBER: _ClassVar[int] - DEBUG_FIELD_NUMBER: _ClassVar[int] - temp: float - top_p: float - freq_penalty: float - pres_penalty: float - rep_penalty: float - seed: int - max_tokens: int - stop_strs: _containers.RepeatedScalarFieldContainer[str] - stop_ids: _containers.RepeatedScalarFieldContainer[int] - response_format: ResponseFormat - debug: GenerationConfig.Debug - def __init__(self, temp: _Optional[float] = ..., top_p: _Optional[float] = ..., freq_penalty: _Optional[float] = ..., pres_penalty: _Optional[float] = ..., rep_penalty: _Optional[float] = ..., seed: _Optional[int] = ..., max_tokens: _Optional[int] = ..., stop_strs: _Optional[_Iterable[str]] = ..., stop_ids: _Optional[_Iterable[int]] = ..., response_format: _Optional[_Union[ResponseFormat, _Mapping]] = ..., debug: _Optional[_Union[GenerationConfig.Debug, _Mapping]] = ...) -> None: ... - -class ValidateConfigRequest(_message.Message): - __slots__ = ("cfg", "model_uuid") - CFG_FIELD_NUMBER: _ClassVar[int] - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - cfg: GenerationConfig - model_uuid: str - def __init__(self, cfg: _Optional[_Union[GenerationConfig, _Mapping]] = ..., model_uuid: _Optional[str] = ...) -> None: ... - -class ValidateConfigResponse(_message.Message): - __slots__ = ("valid", "error", "warnings") - VALID_FIELD_NUMBER: _ClassVar[int] - ERROR_FIELD_NUMBER: _ClassVar[int] - WARNINGS_FIELD_NUMBER: _ClassVar[int] - valid: bool - error: str - warnings: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, valid: bool = ..., error: _Optional[str] = ..., warnings: _Optional[_Iterable[str]] = ...) -> None: ... diff --git a/truffle/infer/infer_pb2.py b/truffle/infer/infer_pb2.py deleted file mode 100644 index dec4d15..0000000 --- a/truffle/infer/infer_pb2.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/infer.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/infer.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from truffle.infer import irequest_pb2 as truffle_dot_infer_dot_irequest__pb2 -from truffle.infer import iresponse_pb2 as truffle_dot_infer_dot_iresponse__pb2 -from truffle.infer import model_pb2 as truffle_dot_infer_dot_model__pb2 -from truffle.infer import embedding_pb2 as truffle_dot_infer_dot_embedding__pb2 -from truffle.infer.convo import conversation_pb2 as truffle_dot_infer_dot_convo_dot_conversation__pb2 -try: - truffle_dot_infer_dot_convo_dot_msg__pb2 = truffle_dot_infer_dot_convo_dot_conversation__pb2.truffle_dot_infer_dot_convo_dot_msg__pb2 -except AttributeError: - truffle_dot_infer_dot_convo_dot_msg__pb2 = truffle_dot_infer_dot_convo_dot_conversation__pb2.truffle.infer.convo.msg_pb2 -from truffle.infer import tokenize_pb2 as truffle_dot_infer_dot_tokenize__pb2 -from truffle.infer import gencfg_pb2 as truffle_dot_infer_dot_gencfg__pb2 - -from truffle.infer.irequest_pb2 import * -from truffle.infer.iresponse_pb2 import * -from truffle.infer.model_pb2 import * -from truffle.infer.embedding_pb2 import * -from truffle.infer.convo.conversation_pb2 import * -from truffle.infer.tokenize_pb2 import * -from truffle.infer.gencfg_pb2 import * - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19truffle/infer/infer.proto\x12\rtruffle.infer\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1ctruffle/infer/irequest.proto\x1a\x1dtruffle/infer/iresponse.proto\x1a\x19truffle/infer/model.proto\x1a\x1dtruffle/infer/embedding.proto\x1a&truffle/infer/convo/conversation.proto\x1a\x1ctruffle/infer/tokenize.proto\x1a\x1atruffle/infer/gencfg.proto2\xfb\x08\n\x10InferenceService\x12?\n\x08Generate\x12\x17.truffle.infer.IRequest\x1a\x18.truffle.infer.IResponse0\x01\x12\x41\n\x0cGenerateSync\x12\x17.truffle.infer.IRequest\x1a\x18.truffle.infer.IResponse\x12L\n\rGenerateBatch\x12\x1c.truffle.infer.BatchIRequest\x1a\x1d.truffle.infer.BatchIResponse\x12J\n\x05\x45mbed\x12\x1f.truffle.infer.EmbeddingRequest\x1a .truffle.infer.EmbeddingResponse\x12Q\n\x0c\x45mbedQueries\x12\x1f.truffle.infer.EmbeddingRequest\x1a .truffle.infer.EmbeddingResponse\x12L\n\x0cGetModelList\x12\".truffle.infer.GetModelListRequest\x1a\x18.truffle.infer.ModelList\x12@\n\x08GetModel\x12\x1e.truffle.infer.GetModelRequest\x1a\x14.truffle.infer.Model\x12N\n\tSetModels\x12\x1f.truffle.infer.SetModelsRequest\x1a .truffle.infer.SetModelsResponse\x12P\n\rGetModelState\x12\x1e.truffle.infer.GetModelRequest\x1a\x1f.truffle.infer.ModelStateUpdate\x12W\n\x12OnModelStateChange\x12\x1e.truffle.infer.GetModelRequest\x1a\x1f.truffle.infer.ModelStateUpdate0\x01\x12R\n\x15GetEmbeddingModelList\x12\x16.google.protobuf.Empty\x1a!.truffle.infer.EmbeddingModelList\x12Z\n\x15GetEmbeddingModelInfo\x12\x1e.truffle.infer.GetModelRequest\x1a!.truffle.infer.EmbeddingModelInfo\x12R\n\nBuildConvo\x12!.truffle.infer.convo.Conversation\x1a!.truffle.infer.convo.BuiltContext\x12g\n\x18ValidateGenerationConfig\x12$.truffle.infer.ValidateConfigRequest\x1a%.truffle.infer.ValidateConfigResponseP\x01P\x02P\x03P\x04P\x05P\x06P\x07\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.infer_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_INFERENCESERVICE']._serialized_start=291 - _globals['_INFERENCESERVICE']._serialized_end=1438 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/infer_pb2.pyi b/truffle/infer/infer_pb2.pyi deleted file mode 100644 index 27a1470..0000000 --- a/truffle/infer/infer_pb2.pyi +++ /dev/null @@ -1,43 +0,0 @@ -from google.protobuf import empty_pb2 as _empty_pb2 -from truffle.infer import irequest_pb2 as _irequest_pb2 -from truffle.infer import iresponse_pb2 as _iresponse_pb2 -from truffle.infer import model_pb2 as _model_pb2 -from truffle.infer import embedding_pb2 as _embedding_pb2 -from truffle.infer.convo import conversation_pb2 as _conversation_pb2 -from truffle.infer.convo import msg_pb2 as _msg_pb2 -from truffle.infer import tokenize_pb2 as _tokenize_pb2 -from truffle.infer import gencfg_pb2 as _gencfg_pb2 -from google.protobuf import descriptor as _descriptor -from typing import ClassVar as _ClassVar -from truffle.infer.irequest_pb2 import IRequest as IRequest -from truffle.infer.irequest_pb2 import BatchIRequest as BatchIRequest -from truffle.infer.irequest_pb2 import RequestPriority as RequestPriority -from truffle.infer.iresponse_pb2 import IResponse as IResponse -from truffle.infer.iresponse_pb2 import BatchIResponse as BatchIResponse -from truffle.infer.model_pb2 import EmbeddingModelInfo as EmbeddingModelInfo -from truffle.infer.model_pb2 import EmbeddingModelList as EmbeddingModelList -from truffle.infer.model_pb2 import ModelConfig as ModelConfig -from truffle.infer.model_pb2 import Model as Model -from truffle.infer.model_pb2 import ModelStateUpdate as ModelStateUpdate -from truffle.infer.model_pb2 import ModelList as ModelList -from truffle.infer.model_pb2 import GetModelRequest as GetModelRequest -from truffle.infer.model_pb2 import GetModelListRequest as GetModelListRequest -from truffle.infer.model_pb2 import SetModelsResponse as SetModelsResponse -from truffle.infer.model_pb2 import SetModelsRequest as SetModelsRequest -from truffle.infer.embedding_pb2 import Embeddable as Embeddable -from truffle.infer.embedding_pb2 import EmbeddingRequest as EmbeddingRequest -from truffle.infer.embedding_pb2 import EmbeddingResponse as EmbeddingResponse -from truffle.infer.convo.conversation_pb2 import Conversation as Conversation -from truffle.infer.convo.conversation_pb2 import BuiltContext as BuiltContext -from truffle.infer.tokenize_pb2 import TokenizeRequest as TokenizeRequest -from truffle.infer.tokenize_pb2 import TokenizeResponse as TokenizeResponse -from truffle.infer.gencfg_pb2 import ResponseFormat as ResponseFormat -from truffle.infer.gencfg_pb2 import GenerationConfig as GenerationConfig -from truffle.infer.gencfg_pb2 import ValidateConfigRequest as ValidateConfigRequest -from truffle.infer.gencfg_pb2 import ValidateConfigResponse as ValidateConfigResponse - -DESCRIPTOR: _descriptor.FileDescriptor -REQUEST_PRIORITY_UNSPECIFIED: _irequest_pb2.RequestPriority -REQUEST_PRIORITY_LOW: _irequest_pb2.RequestPriority -REQUEST_PRIORITY_NORMAL: _irequest_pb2.RequestPriority -REQUEST_PRIORITY_REALTIME: _irequest_pb2.RequestPriority diff --git a/truffle/infer/infer_pb2_grpc.py b/truffle/infer/infer_pb2_grpc.py deleted file mode 100644 index 6c6b6a4..0000000 --- a/truffle/infer/infer_pb2_grpc.py +++ /dev/null @@ -1,701 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from truffle.infer.convo import conversation_pb2 as truffle_dot_infer_dot_convo_dot_conversation__pb2 -from truffle.infer import embedding_pb2 as truffle_dot_infer_dot_embedding__pb2 -from truffle.infer import gencfg_pb2 as truffle_dot_infer_dot_gencfg__pb2 -from truffle.infer import irequest_pb2 as truffle_dot_infer_dot_irequest__pb2 -from truffle.infer import iresponse_pb2 as truffle_dot_infer_dot_iresponse__pb2 -from truffle.infer import model_pb2 as truffle_dot_infer_dot_model__pb2 - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/infer_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - - -class InferenceServiceStub(object): - """ - Defines the main gRPC service for all AI inference operations. - This service is the primary entry point for clients to interact with generative - models, create embeddings, manage model configurations, and use other related - utility functions. It consolidates all necessary data structures from other - .proto files into a single, cohesive API. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Generate = channel.unary_stream( - '/truffle.infer.InferenceService/Generate', - request_serializer=truffle_dot_infer_dot_irequest__pb2.IRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_iresponse__pb2.IResponse.FromString, - _registered_method=True) - self.GenerateSync = channel.unary_unary( - '/truffle.infer.InferenceService/GenerateSync', - request_serializer=truffle_dot_infer_dot_irequest__pb2.IRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_iresponse__pb2.IResponse.FromString, - _registered_method=True) - self.GenerateBatch = channel.unary_unary( - '/truffle.infer.InferenceService/GenerateBatch', - request_serializer=truffle_dot_infer_dot_irequest__pb2.BatchIRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_iresponse__pb2.BatchIResponse.FromString, - _registered_method=True) - self.Embed = channel.unary_unary( - '/truffle.infer.InferenceService/Embed', - request_serializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingResponse.FromString, - _registered_method=True) - self.EmbedQueries = channel.unary_unary( - '/truffle.infer.InferenceService/EmbedQueries', - request_serializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingResponse.FromString, - _registered_method=True) - self.GetModelList = channel.unary_unary( - '/truffle.infer.InferenceService/GetModelList', - request_serializer=truffle_dot_infer_dot_model__pb2.GetModelListRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.ModelList.FromString, - _registered_method=True) - self.GetModel = channel.unary_unary( - '/truffle.infer.InferenceService/GetModel', - request_serializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.Model.FromString, - _registered_method=True) - self.SetModels = channel.unary_unary( - '/truffle.infer.InferenceService/SetModels', - request_serializer=truffle_dot_infer_dot_model__pb2.SetModelsRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.SetModelsResponse.FromString, - _registered_method=True) - self.GetModelState = channel.unary_unary( - '/truffle.infer.InferenceService/GetModelState', - request_serializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.ModelStateUpdate.FromString, - _registered_method=True) - self.OnModelStateChange = channel.unary_stream( - '/truffle.infer.InferenceService/OnModelStateChange', - request_serializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.ModelStateUpdate.FromString, - _registered_method=True) - self.GetEmbeddingModelList = channel.unary_unary( - '/truffle.infer.InferenceService/GetEmbeddingModelList', - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.EmbeddingModelList.FromString, - _registered_method=True) - self.GetEmbeddingModelInfo = channel.unary_unary( - '/truffle.infer.InferenceService/GetEmbeddingModelInfo', - request_serializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_model__pb2.EmbeddingModelInfo.FromString, - _registered_method=True) - self.BuildConvo = channel.unary_unary( - '/truffle.infer.InferenceService/BuildConvo', - request_serializer=truffle_dot_infer_dot_convo_dot_conversation__pb2.Conversation.SerializeToString, - response_deserializer=truffle_dot_infer_dot_convo_dot_conversation__pb2.BuiltContext.FromString, - _registered_method=True) - self.ValidateGenerationConfig = channel.unary_unary( - '/truffle.infer.InferenceService/ValidateGenerationConfig', - request_serializer=truffle_dot_infer_dot_gencfg__pb2.ValidateConfigRequest.SerializeToString, - response_deserializer=truffle_dot_infer_dot_gencfg__pb2.ValidateConfigResponse.FromString, - _registered_method=True) - - -class InferenceServiceServicer(object): - """ - Defines the main gRPC service for all AI inference operations. - This service is the primary entry point for clients to interact with generative - models, create embeddings, manage model configurations, and use other related - utility functions. It consolidates all necessary data structures from other - .proto files into a single, cohesive API. - """ - - def Generate(self, request, context): - """Starts a generation task that streams responses back to the client. - This is suitable for interactive applications where responses are displayed - as they are generated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GenerateSync(self, request, context): - """Performs a generation task and returns the full response in a single message. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GenerateBatch(self, request, context): - """Processes a batch of inference requests in parallel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Embed(self, request, context): - """Generates embeddings for a given set of inputs. Embeddings are numerical - representations of text that can be used for semantic search, clustering, - and other machine learning tasks. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def EmbedQueries(self, request, context): - """A specialized version of Embed for generating embeddings for - search queries. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetModelList(self, request, context): - """Retrieves a list of all available models. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetModel(self, request, context): - """Fetches detailed information about a specific model. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def SetModels(self, request, context): - """Configures model parameters such as context length, batch size etc. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetModelState(self, request, context): - """Gets the current state of a model, such as loading, loaded, unloaded etc. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def OnModelStateChange(self, request, context): - """Subscribes to updates on the state of a model to avoid polling. - pass an empty ID to get updates for all models. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetEmbeddingModelList(self, request, context): - """Retrieves a list of all available embedding models. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetEmbeddingModelInfo(self, request, context): - """Gets detailed information about a specific embedding model (input length, dimension size etc). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def BuildConvo(self, request, context): - """Builds a context from a conversation. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ValidateGenerationConfig(self, request, context): - """Validates a generation configuration to ensure that it is compatible with - the models. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_InferenceServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Generate': grpc.unary_stream_rpc_method_handler( - servicer.Generate, - request_deserializer=truffle_dot_infer_dot_irequest__pb2.IRequest.FromString, - response_serializer=truffle_dot_infer_dot_iresponse__pb2.IResponse.SerializeToString, - ), - 'GenerateSync': grpc.unary_unary_rpc_method_handler( - servicer.GenerateSync, - request_deserializer=truffle_dot_infer_dot_irequest__pb2.IRequest.FromString, - response_serializer=truffle_dot_infer_dot_iresponse__pb2.IResponse.SerializeToString, - ), - 'GenerateBatch': grpc.unary_unary_rpc_method_handler( - servicer.GenerateBatch, - request_deserializer=truffle_dot_infer_dot_irequest__pb2.BatchIRequest.FromString, - response_serializer=truffle_dot_infer_dot_iresponse__pb2.BatchIResponse.SerializeToString, - ), - 'Embed': grpc.unary_unary_rpc_method_handler( - servicer.Embed, - request_deserializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingRequest.FromString, - response_serializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingResponse.SerializeToString, - ), - 'EmbedQueries': grpc.unary_unary_rpc_method_handler( - servicer.EmbedQueries, - request_deserializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingRequest.FromString, - response_serializer=truffle_dot_infer_dot_embedding__pb2.EmbeddingResponse.SerializeToString, - ), - 'GetModelList': grpc.unary_unary_rpc_method_handler( - servicer.GetModelList, - request_deserializer=truffle_dot_infer_dot_model__pb2.GetModelListRequest.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.ModelList.SerializeToString, - ), - 'GetModel': grpc.unary_unary_rpc_method_handler( - servicer.GetModel, - request_deserializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.Model.SerializeToString, - ), - 'SetModels': grpc.unary_unary_rpc_method_handler( - servicer.SetModels, - request_deserializer=truffle_dot_infer_dot_model__pb2.SetModelsRequest.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.SetModelsResponse.SerializeToString, - ), - 'GetModelState': grpc.unary_unary_rpc_method_handler( - servicer.GetModelState, - request_deserializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.ModelStateUpdate.SerializeToString, - ), - 'OnModelStateChange': grpc.unary_stream_rpc_method_handler( - servicer.OnModelStateChange, - request_deserializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.ModelStateUpdate.SerializeToString, - ), - 'GetEmbeddingModelList': grpc.unary_unary_rpc_method_handler( - servicer.GetEmbeddingModelList, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.EmbeddingModelList.SerializeToString, - ), - 'GetEmbeddingModelInfo': grpc.unary_unary_rpc_method_handler( - servicer.GetEmbeddingModelInfo, - request_deserializer=truffle_dot_infer_dot_model__pb2.GetModelRequest.FromString, - response_serializer=truffle_dot_infer_dot_model__pb2.EmbeddingModelInfo.SerializeToString, - ), - 'BuildConvo': grpc.unary_unary_rpc_method_handler( - servicer.BuildConvo, - request_deserializer=truffle_dot_infer_dot_convo_dot_conversation__pb2.Conversation.FromString, - response_serializer=truffle_dot_infer_dot_convo_dot_conversation__pb2.BuiltContext.SerializeToString, - ), - 'ValidateGenerationConfig': grpc.unary_unary_rpc_method_handler( - servicer.ValidateGenerationConfig, - request_deserializer=truffle_dot_infer_dot_gencfg__pb2.ValidateConfigRequest.FromString, - response_serializer=truffle_dot_infer_dot_gencfg__pb2.ValidateConfigResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'truffle.infer.InferenceService', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('truffle.infer.InferenceService', rpc_method_handlers) - - - # This class is part of an EXPERIMENTAL API. -class InferenceService(object): - """ - Defines the main gRPC service for all AI inference operations. - This service is the primary entry point for clients to interact with generative - models, create embeddings, manage model configurations, and use other related - utility functions. It consolidates all necessary data structures from other - .proto files into a single, cohesive API. - """ - - @staticmethod - def Generate(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_stream( - request, - target, - '/truffle.infer.InferenceService/Generate', - truffle_dot_infer_dot_irequest__pb2.IRequest.SerializeToString, - truffle_dot_infer_dot_iresponse__pb2.IResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GenerateSync(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GenerateSync', - truffle_dot_infer_dot_irequest__pb2.IRequest.SerializeToString, - truffle_dot_infer_dot_iresponse__pb2.IResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GenerateBatch(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GenerateBatch', - truffle_dot_infer_dot_irequest__pb2.BatchIRequest.SerializeToString, - truffle_dot_infer_dot_iresponse__pb2.BatchIResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def Embed(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/Embed', - truffle_dot_infer_dot_embedding__pb2.EmbeddingRequest.SerializeToString, - truffle_dot_infer_dot_embedding__pb2.EmbeddingResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def EmbedQueries(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/EmbedQueries', - truffle_dot_infer_dot_embedding__pb2.EmbeddingRequest.SerializeToString, - truffle_dot_infer_dot_embedding__pb2.EmbeddingResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetModelList(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GetModelList', - truffle_dot_infer_dot_model__pb2.GetModelListRequest.SerializeToString, - truffle_dot_infer_dot_model__pb2.ModelList.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetModel(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GetModel', - truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - truffle_dot_infer_dot_model__pb2.Model.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def SetModels(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/SetModels', - truffle_dot_infer_dot_model__pb2.SetModelsRequest.SerializeToString, - truffle_dot_infer_dot_model__pb2.SetModelsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetModelState(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GetModelState', - truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - truffle_dot_infer_dot_model__pb2.ModelStateUpdate.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def OnModelStateChange(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_stream( - request, - target, - '/truffle.infer.InferenceService/OnModelStateChange', - truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - truffle_dot_infer_dot_model__pb2.ModelStateUpdate.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetEmbeddingModelList(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GetEmbeddingModelList', - google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - truffle_dot_infer_dot_model__pb2.EmbeddingModelList.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetEmbeddingModelInfo(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/GetEmbeddingModelInfo', - truffle_dot_infer_dot_model__pb2.GetModelRequest.SerializeToString, - truffle_dot_infer_dot_model__pb2.EmbeddingModelInfo.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def BuildConvo(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/BuildConvo', - truffle_dot_infer_dot_convo_dot_conversation__pb2.Conversation.SerializeToString, - truffle_dot_infer_dot_convo_dot_conversation__pb2.BuiltContext.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def ValidateGenerationConfig(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.infer.InferenceService/ValidateGenerationConfig', - truffle_dot_infer_dot_gencfg__pb2.ValidateConfigRequest.SerializeToString, - truffle_dot_infer_dot_gencfg__pb2.ValidateConfigResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) diff --git a/truffle/infer/irequest_pb2.py b/truffle/infer/irequest_pb2.py deleted file mode 100644 index df2bafa..0000000 --- a/truffle/infer/irequest_pb2.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/irequest.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/irequest.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from truffle.infer import gencfg_pb2 as truffle_dot_infer_dot_gencfg__pb2 -from truffle.infer.convo import conversation_pb2 as truffle_dot_infer_dot_convo_dot_conversation__pb2 -try: - truffle_dot_infer_dot_convo_dot_msg__pb2 = truffle_dot_infer_dot_convo_dot_conversation__pb2.truffle_dot_infer_dot_convo_dot_msg__pb2 -except AttributeError: - truffle_dot_infer_dot_convo_dot_msg__pb2 = truffle_dot_infer_dot_convo_dot_conversation__pb2.truffle.infer.convo.msg_pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/infer/irequest.proto\x12\rtruffle.infer\x1a\x1atruffle/infer/gencfg.proto\x1a&truffle/infer/convo/conversation.proto\"\xd8\x01\n\x08IRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x03raw\x18\x02 \x01(\tH\x00\x12\x32\n\x05\x63onvo\x18\x03 \x01(\x0b\x32!.truffle.infer.convo.ConversationH\x00\x12,\n\x03\x63\x66g\x18\x04 \x01(\x0b\x32\x1f.truffle.infer.GenerationConfig\x12\x12\n\nmodel_uuid\x18\x05 \x01(\t\x12\x30\n\x08priority\x18\x06 \x01(\x0e\x32\x1e.truffle.infer.RequestPriorityB\t\n\x07\x63ontext\":\n\rBatchIRequest\x12)\n\x08requests\x18\x01 \x03(\x0b\x32\x17.truffle.infer.IRequest*\x89\x01\n\x0fRequestPriority\x12 \n\x1cREQUEST_PRIORITY_UNSPECIFIED\x10\x00\x12\x18\n\x14REQUEST_PRIORITY_LOW\x10\x01\x12\x1b\n\x17REQUEST_PRIORITY_NORMAL\x10\x02\x12\x1d\n\x19REQUEST_PRIORITY_REALTIME\x10\x03\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.irequest_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_REQUESTPRIORITY']._serialized_start=395 - _globals['_REQUESTPRIORITY']._serialized_end=532 - _globals['_IREQUEST']._serialized_start=116 - _globals['_IREQUEST']._serialized_end=332 - _globals['_BATCHIREQUEST']._serialized_start=334 - _globals['_BATCHIREQUEST']._serialized_end=392 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/irequest_pb2.pyi b/truffle/infer/irequest_pb2.pyi deleted file mode 100644 index 8332e8c..0000000 --- a/truffle/infer/irequest_pb2.pyi +++ /dev/null @@ -1,44 +0,0 @@ -from truffle.infer import gencfg_pb2 as _gencfg_pb2 -from truffle.infer.convo import conversation_pb2 as _conversation_pb2 -from truffle.infer.convo import msg_pb2 as _msg_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class RequestPriority(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - REQUEST_PRIORITY_UNSPECIFIED: _ClassVar[RequestPriority] - REQUEST_PRIORITY_LOW: _ClassVar[RequestPriority] - REQUEST_PRIORITY_NORMAL: _ClassVar[RequestPriority] - REQUEST_PRIORITY_REALTIME: _ClassVar[RequestPriority] -REQUEST_PRIORITY_UNSPECIFIED: RequestPriority -REQUEST_PRIORITY_LOW: RequestPriority -REQUEST_PRIORITY_NORMAL: RequestPriority -REQUEST_PRIORITY_REALTIME: RequestPriority - -class IRequest(_message.Message): - __slots__ = ("id", "raw", "convo", "cfg", "model_uuid", "priority") - ID_FIELD_NUMBER: _ClassVar[int] - RAW_FIELD_NUMBER: _ClassVar[int] - CONVO_FIELD_NUMBER: _ClassVar[int] - CFG_FIELD_NUMBER: _ClassVar[int] - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - PRIORITY_FIELD_NUMBER: _ClassVar[int] - id: str - raw: str - convo: _conversation_pb2.Conversation - cfg: _gencfg_pb2.GenerationConfig - model_uuid: str - priority: RequestPriority - def __init__(self, id: _Optional[str] = ..., raw: _Optional[str] = ..., convo: _Optional[_Union[_conversation_pb2.Conversation, _Mapping]] = ..., cfg: _Optional[_Union[_gencfg_pb2.GenerationConfig, _Mapping]] = ..., model_uuid: _Optional[str] = ..., priority: _Optional[_Union[RequestPriority, str]] = ...) -> None: ... - -class BatchIRequest(_message.Message): - __slots__ = ("requests",) - REQUESTS_FIELD_NUMBER: _ClassVar[int] - requests: _containers.RepeatedCompositeFieldContainer[IRequest] - def __init__(self, requests: _Optional[_Iterable[_Union[IRequest, _Mapping]]] = ...) -> None: ... diff --git a/truffle/infer/irequest_pb2_grpc.py b/truffle/infer/irequest_pb2_grpc.py deleted file mode 100644 index de17794..0000000 --- a/truffle/infer/irequest_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/irequest_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/iresponse_pb2.py b/truffle/infer/iresponse_pb2.py deleted file mode 100644 index 5c6ad2a..0000000 --- a/truffle/infer/iresponse_pb2.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/iresponse.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/iresponse.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from truffle.infer import usage_pb2 as truffle_dot_infer_dot_usage__pb2 -from truffle.infer import finishreason_pb2 as truffle_dot_infer_dot_finishreason__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/infer/iresponse.proto\x12\rtruffle.infer\x1a\x19truffle/infer/usage.proto\x1a truffle/infer/finishreason.proto\"\xa7\x01\n\tIResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\t\x12(\n\x05usage\x18\x03 \x01(\x0b\x32\x14.truffle.infer.UsageH\x00\x88\x01\x01\x12\x37\n\rfinish_reason\x18\x04 \x01(\x0e\x32\x1b.truffle.infer.FinishReasonH\x01\x88\x01\x01\x42\x08\n\x06_usageB\x10\n\x0e_finish_reason\"=\n\x0e\x42\x61tchIResponse\x12+\n\tresponses\x18\x01 \x03(\x0b\x32\x18.truffle.infer.IResponseb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.iresponse_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_IRESPONSE']._serialized_start=110 - _globals['_IRESPONSE']._serialized_end=277 - _globals['_BATCHIRESPONSE']._serialized_start=279 - _globals['_BATCHIRESPONSE']._serialized_end=340 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/iresponse_pb2.pyi b/truffle/infer/iresponse_pb2.pyi deleted file mode 100644 index 43d8a58..0000000 --- a/truffle/infer/iresponse_pb2.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from truffle.infer import usage_pb2 as _usage_pb2 -from truffle.infer import finishreason_pb2 as _finishreason_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class IResponse(_message.Message): - __slots__ = ("id", "content", "usage", "finish_reason") - ID_FIELD_NUMBER: _ClassVar[int] - CONTENT_FIELD_NUMBER: _ClassVar[int] - USAGE_FIELD_NUMBER: _ClassVar[int] - FINISH_REASON_FIELD_NUMBER: _ClassVar[int] - id: str - content: str - usage: _usage_pb2.Usage - finish_reason: _finishreason_pb2.FinishReason - def __init__(self, id: _Optional[str] = ..., content: _Optional[str] = ..., usage: _Optional[_Union[_usage_pb2.Usage, _Mapping]] = ..., finish_reason: _Optional[_Union[_finishreason_pb2.FinishReason, str]] = ...) -> None: ... - -class BatchIResponse(_message.Message): - __slots__ = ("responses",) - RESPONSES_FIELD_NUMBER: _ClassVar[int] - responses: _containers.RepeatedCompositeFieldContainer[IResponse] - def __init__(self, responses: _Optional[_Iterable[_Union[IResponse, _Mapping]]] = ...) -> None: ... diff --git a/truffle/infer/iresponse_pb2_grpc.py b/truffle/infer/iresponse_pb2_grpc.py deleted file mode 100644 index 1be00b7..0000000 --- a/truffle/infer/iresponse_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/iresponse_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/model_pb2.py b/truffle/infer/model_pb2.py deleted file mode 100644 index 7253b61..0000000 --- a/truffle/infer/model_pb2.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/model.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/model.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19truffle/infer/model.proto\x12\rtruffle.infer\"\xda\x01\n\x12\x45mbeddingModelInfo\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x38\n\x06\x63onfig\x18\x04 \x01(\x0b\x32(.truffle.infer.EmbeddingModelInfo.Config\x12\x0c\n\x04uuid\x18\x05 \x01(\t\x1aQ\n\x06\x43onfig\x12\x18\n\x10max_input_length\x18\x01 \x01(\x03\x12\x16\n\x0emax_batch_size\x18\x02 \x01(\x03\x12\x15\n\rembedding_dim\x18\x03 \x01(\x03\"G\n\x12\x45mbeddingModelList\x12\x31\n\x06models\x18\x01 \x03(\x0b\x32!.truffle.infer.EmbeddingModelInfo\"\xba\x04\n\x0bModelConfig\x12=\n\x04info\x18\x01 \x01(\x0b\x32*.truffle.infer.ModelConfig.ModelConfigInfoH\x00\x88\x01\x01\x12\x1b\n\x0e\x63ontext_length\x18\x02 \x01(\rH\x01\x88\x01\x01\x12\x1b\n\x0emax_batch_size\x18\x03 \x01(\rH\x02\x88\x01\x01\x12\x16\n\tdata_type\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06loaded\x18\x05 \x01(\x08H\x04\x88\x01\x01\x1a\xbc\x02\n\x0fModelConfigInfo\x12 \n\x18\x63ontext_length_limit_max\x18\x01 \x01(\r\x12 \n\x18\x63ontext_length_limit_min\x18\x02 \x01(\r\x12\x1c\n\x14model_context_length\x18\n \x01(\r\x12\x1c\n\x14\x62\x61tch_size_limit_max\x18\x03 \x01(\r\x12\x1c\n\x14\x62\x61tch_size_limit_min\x18\x04 \x01(\r\x12\x1c\n\x14has_chain_of_thought\x18\x05 \x01(\x08\x12\x12\n\nis_agentic\x18\x06 \x01(\x08\x12\x1b\n\x13memory_usage_params\x18\x07 \x01(\x03\x12\x1e\n\x16memory_usage_inference\x18\x08 \x01(\x03\x12\x1c\n\x14\x61vailable_data_types\x18\t \x03(\tB\x07\n\x05_infoB\x11\n\x0f_context_lengthB\x11\n\x0f_max_batch_sizeB\x0c\n\n_data_typeB\t\n\x07_loaded\"\xa0\x02\n\x05Model\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08provider\x18\x03 \x01(\t\x12*\n\x06\x63onfig\x18\x04 \x01(\x0b\x32\x1a.truffle.infer.ModelConfig\x12.\n\x05state\x18\x05 \x01(\x0e\x32\x1f.truffle.infer.Model.ModelState\"\x8c\x01\n\nModelState\x12\x17\n\x13MODEL_STATE_INVALID\x10\x00\x12\x19\n\x15MODEL_STATE_AVAILABLE\x10\x01\x12\x17\n\x13MODEL_STATE_LOADING\x10\x02\x12\x19\n\x15MODEL_STATE_UNLOADING\x10\x03\x12\x16\n\x12MODEL_STATE_LOADED\x10\x04\"z\n\x10ModelStateUpdate\x12\x12\n\nmodel_uuid\x18\x01 \x01(\t\x12.\n\x05state\x18\x02 \x01(\x0e\x32\x1f.truffle.infer.Model.ModelState\x12\x15\n\x08progress\x18\x03 \x01(\x05H\x00\x88\x01\x01\x42\x0b\n\t_progress\"\\\n\tModelList\x12$\n\x06models\x18\x01 \x03(\x0b\x32\x14.truffle.infer.Model\x12\x14\n\x0ctotal_memory\x18\x02 \x01(\x04\x12\x13\n\x0bused_memory\x18\x03 \x01(\x04\"%\n\x0fGetModelRequest\x12\x12\n\nmodel_uuid\x18\x01 \x01(\t\"m\n\x13GetModelListRequest\x12\x12\n\nuse_filter\x18\x01 \x01(\x08\x12\x13\n\tavailable\x18\x02 \x01(\x08H\x00\x12\x10\n\x06loaded\x18\x03 \x01(\x08H\x00\x12\x11\n\x07\x61gentic\x18\x04 \x01(\x08H\x00\x42\x08\n\x06\x66ilter\"\x88\x02\n\x11SetModelsResponse\x12\x41\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x33.truffle.infer.SetModelsResponse.SetModelsErrorCode\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x33\n\x0cupdated_list\x18\x03 \x01(\x0b\x32\x18.truffle.infer.ModelListH\x00\x88\x01\x01\"Y\n\x12SetModelsErrorCode\x12\x06\n\x02OK\x10\x00\x12\x12\n\x0eINVALID_CONFIG\x10\x01\x12\x15\n\x11NOT_ENOUGH_MEMORY\x10\x02\x12\x10\n\x0cMODEL_IN_USE\x10\x03\x42\x0f\n\r_updated_list\"\x9d\x01\n\x10SetModelsRequest\x12=\n\x07updates\x18\x01 \x03(\x0b\x32,.truffle.infer.SetModelsRequest.UpdatesEntry\x1aJ\n\x0cUpdatesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.truffle.infer.ModelConfig:\x02\x38\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.model_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_SETMODELSREQUEST_UPDATESENTRY']._loaded_options = None - _globals['_SETMODELSREQUEST_UPDATESENTRY']._serialized_options = b'8\001' - _globals['_EMBEDDINGMODELINFO']._serialized_start=45 - _globals['_EMBEDDINGMODELINFO']._serialized_end=263 - _globals['_EMBEDDINGMODELINFO_CONFIG']._serialized_start=182 - _globals['_EMBEDDINGMODELINFO_CONFIG']._serialized_end=263 - _globals['_EMBEDDINGMODELLIST']._serialized_start=265 - _globals['_EMBEDDINGMODELLIST']._serialized_end=336 - _globals['_MODELCONFIG']._serialized_start=339 - _globals['_MODELCONFIG']._serialized_end=909 - _globals['_MODELCONFIG_MODELCONFIGINFO']._serialized_start=521 - _globals['_MODELCONFIG_MODELCONFIGINFO']._serialized_end=837 - _globals['_MODEL']._serialized_start=912 - _globals['_MODEL']._serialized_end=1200 - _globals['_MODEL_MODELSTATE']._serialized_start=1060 - _globals['_MODEL_MODELSTATE']._serialized_end=1200 - _globals['_MODELSTATEUPDATE']._serialized_start=1202 - _globals['_MODELSTATEUPDATE']._serialized_end=1324 - _globals['_MODELLIST']._serialized_start=1326 - _globals['_MODELLIST']._serialized_end=1418 - _globals['_GETMODELREQUEST']._serialized_start=1420 - _globals['_GETMODELREQUEST']._serialized_end=1457 - _globals['_GETMODELLISTREQUEST']._serialized_start=1459 - _globals['_GETMODELLISTREQUEST']._serialized_end=1568 - _globals['_SETMODELSRESPONSE']._serialized_start=1571 - _globals['_SETMODELSRESPONSE']._serialized_end=1835 - _globals['_SETMODELSRESPONSE_SETMODELSERRORCODE']._serialized_start=1729 - _globals['_SETMODELSRESPONSE_SETMODELSERRORCODE']._serialized_end=1818 - _globals['_SETMODELSREQUEST']._serialized_start=1838 - _globals['_SETMODELSREQUEST']._serialized_end=1995 - _globals['_SETMODELSREQUEST_UPDATESENTRY']._serialized_start=1921 - _globals['_SETMODELSREQUEST_UPDATESENTRY']._serialized_end=1995 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/model_pb2.pyi b/truffle/infer/model_pb2.pyi deleted file mode 100644 index 218c5d7..0000000 --- a/truffle/infer/model_pb2.pyi +++ /dev/null @@ -1,171 +0,0 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class EmbeddingModelInfo(_message.Message): - __slots__ = ("id", "name", "version", "config", "uuid") - class Config(_message.Message): - __slots__ = ("max_input_length", "max_batch_size", "embedding_dim") - MAX_INPUT_LENGTH_FIELD_NUMBER: _ClassVar[int] - MAX_BATCH_SIZE_FIELD_NUMBER: _ClassVar[int] - EMBEDDING_DIM_FIELD_NUMBER: _ClassVar[int] - max_input_length: int - max_batch_size: int - embedding_dim: int - def __init__(self, max_input_length: _Optional[int] = ..., max_batch_size: _Optional[int] = ..., embedding_dim: _Optional[int] = ...) -> None: ... - ID_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] - VERSION_FIELD_NUMBER: _ClassVar[int] - CONFIG_FIELD_NUMBER: _ClassVar[int] - UUID_FIELD_NUMBER: _ClassVar[int] - id: int - name: str - version: str - config: EmbeddingModelInfo.Config - uuid: str - def __init__(self, id: _Optional[int] = ..., name: _Optional[str] = ..., version: _Optional[str] = ..., config: _Optional[_Union[EmbeddingModelInfo.Config, _Mapping]] = ..., uuid: _Optional[str] = ...) -> None: ... - -class EmbeddingModelList(_message.Message): - __slots__ = ("models",) - MODELS_FIELD_NUMBER: _ClassVar[int] - models: _containers.RepeatedCompositeFieldContainer[EmbeddingModelInfo] - def __init__(self, models: _Optional[_Iterable[_Union[EmbeddingModelInfo, _Mapping]]] = ...) -> None: ... - -class ModelConfig(_message.Message): - __slots__ = ("info", "context_length", "max_batch_size", "data_type", "loaded") - class ModelConfigInfo(_message.Message): - __slots__ = ("context_length_limit_max", "context_length_limit_min", "model_context_length", "batch_size_limit_max", "batch_size_limit_min", "has_chain_of_thought", "is_agentic", "memory_usage_params", "memory_usage_inference", "available_data_types") - CONTEXT_LENGTH_LIMIT_MAX_FIELD_NUMBER: _ClassVar[int] - CONTEXT_LENGTH_LIMIT_MIN_FIELD_NUMBER: _ClassVar[int] - MODEL_CONTEXT_LENGTH_FIELD_NUMBER: _ClassVar[int] - BATCH_SIZE_LIMIT_MAX_FIELD_NUMBER: _ClassVar[int] - BATCH_SIZE_LIMIT_MIN_FIELD_NUMBER: _ClassVar[int] - HAS_CHAIN_OF_THOUGHT_FIELD_NUMBER: _ClassVar[int] - IS_AGENTIC_FIELD_NUMBER: _ClassVar[int] - MEMORY_USAGE_PARAMS_FIELD_NUMBER: _ClassVar[int] - MEMORY_USAGE_INFERENCE_FIELD_NUMBER: _ClassVar[int] - AVAILABLE_DATA_TYPES_FIELD_NUMBER: _ClassVar[int] - context_length_limit_max: int - context_length_limit_min: int - model_context_length: int - batch_size_limit_max: int - batch_size_limit_min: int - has_chain_of_thought: bool - is_agentic: bool - memory_usage_params: int - memory_usage_inference: int - available_data_types: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, context_length_limit_max: _Optional[int] = ..., context_length_limit_min: _Optional[int] = ..., model_context_length: _Optional[int] = ..., batch_size_limit_max: _Optional[int] = ..., batch_size_limit_min: _Optional[int] = ..., has_chain_of_thought: bool = ..., is_agentic: bool = ..., memory_usage_params: _Optional[int] = ..., memory_usage_inference: _Optional[int] = ..., available_data_types: _Optional[_Iterable[str]] = ...) -> None: ... - INFO_FIELD_NUMBER: _ClassVar[int] - CONTEXT_LENGTH_FIELD_NUMBER: _ClassVar[int] - MAX_BATCH_SIZE_FIELD_NUMBER: _ClassVar[int] - DATA_TYPE_FIELD_NUMBER: _ClassVar[int] - LOADED_FIELD_NUMBER: _ClassVar[int] - info: ModelConfig.ModelConfigInfo - context_length: int - max_batch_size: int - data_type: str - loaded: bool - def __init__(self, info: _Optional[_Union[ModelConfig.ModelConfigInfo, _Mapping]] = ..., context_length: _Optional[int] = ..., max_batch_size: _Optional[int] = ..., data_type: _Optional[str] = ..., loaded: bool = ...) -> None: ... - -class Model(_message.Message): - __slots__ = ("uuid", "name", "provider", "config", "state") - class ModelState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - MODEL_STATE_INVALID: _ClassVar[Model.ModelState] - MODEL_STATE_AVAILABLE: _ClassVar[Model.ModelState] - MODEL_STATE_LOADING: _ClassVar[Model.ModelState] - MODEL_STATE_UNLOADING: _ClassVar[Model.ModelState] - MODEL_STATE_LOADED: _ClassVar[Model.ModelState] - MODEL_STATE_INVALID: Model.ModelState - MODEL_STATE_AVAILABLE: Model.ModelState - MODEL_STATE_LOADING: Model.ModelState - MODEL_STATE_UNLOADING: Model.ModelState - MODEL_STATE_LOADED: Model.ModelState - UUID_FIELD_NUMBER: _ClassVar[int] - NAME_FIELD_NUMBER: _ClassVar[int] - PROVIDER_FIELD_NUMBER: _ClassVar[int] - CONFIG_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - uuid: str - name: str - provider: str - config: ModelConfig - state: Model.ModelState - def __init__(self, uuid: _Optional[str] = ..., name: _Optional[str] = ..., provider: _Optional[str] = ..., config: _Optional[_Union[ModelConfig, _Mapping]] = ..., state: _Optional[_Union[Model.ModelState, str]] = ...) -> None: ... - -class ModelStateUpdate(_message.Message): - __slots__ = ("model_uuid", "state", "progress") - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - PROGRESS_FIELD_NUMBER: _ClassVar[int] - model_uuid: str - state: Model.ModelState - progress: int - def __init__(self, model_uuid: _Optional[str] = ..., state: _Optional[_Union[Model.ModelState, str]] = ..., progress: _Optional[int] = ...) -> None: ... - -class ModelList(_message.Message): - __slots__ = ("models", "total_memory", "used_memory") - MODELS_FIELD_NUMBER: _ClassVar[int] - TOTAL_MEMORY_FIELD_NUMBER: _ClassVar[int] - USED_MEMORY_FIELD_NUMBER: _ClassVar[int] - models: _containers.RepeatedCompositeFieldContainer[Model] - total_memory: int - used_memory: int - def __init__(self, models: _Optional[_Iterable[_Union[Model, _Mapping]]] = ..., total_memory: _Optional[int] = ..., used_memory: _Optional[int] = ...) -> None: ... - -class GetModelRequest(_message.Message): - __slots__ = ("model_uuid",) - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - model_uuid: str - def __init__(self, model_uuid: _Optional[str] = ...) -> None: ... - -class GetModelListRequest(_message.Message): - __slots__ = ("use_filter", "available", "loaded", "agentic") - USE_FILTER_FIELD_NUMBER: _ClassVar[int] - AVAILABLE_FIELD_NUMBER: _ClassVar[int] - LOADED_FIELD_NUMBER: _ClassVar[int] - AGENTIC_FIELD_NUMBER: _ClassVar[int] - use_filter: bool - available: bool - loaded: bool - agentic: bool - def __init__(self, use_filter: bool = ..., available: bool = ..., loaded: bool = ..., agentic: bool = ...) -> None: ... - -class SetModelsResponse(_message.Message): - __slots__ = ("code", "message", "updated_list") - class SetModelsErrorCode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - OK: _ClassVar[SetModelsResponse.SetModelsErrorCode] - INVALID_CONFIG: _ClassVar[SetModelsResponse.SetModelsErrorCode] - NOT_ENOUGH_MEMORY: _ClassVar[SetModelsResponse.SetModelsErrorCode] - MODEL_IN_USE: _ClassVar[SetModelsResponse.SetModelsErrorCode] - OK: SetModelsResponse.SetModelsErrorCode - INVALID_CONFIG: SetModelsResponse.SetModelsErrorCode - NOT_ENOUGH_MEMORY: SetModelsResponse.SetModelsErrorCode - MODEL_IN_USE: SetModelsResponse.SetModelsErrorCode - CODE_FIELD_NUMBER: _ClassVar[int] - MESSAGE_FIELD_NUMBER: _ClassVar[int] - UPDATED_LIST_FIELD_NUMBER: _ClassVar[int] - code: SetModelsResponse.SetModelsErrorCode - message: str - updated_list: ModelList - def __init__(self, code: _Optional[_Union[SetModelsResponse.SetModelsErrorCode, str]] = ..., message: _Optional[str] = ..., updated_list: _Optional[_Union[ModelList, _Mapping]] = ...) -> None: ... - -class SetModelsRequest(_message.Message): - __slots__ = ("updates",) - class UpdatesEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: ModelConfig - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[ModelConfig, _Mapping]] = ...) -> None: ... - UPDATES_FIELD_NUMBER: _ClassVar[int] - updates: _containers.MessageMap[str, ModelConfig] - def __init__(self, updates: _Optional[_Mapping[str, ModelConfig]] = ...) -> None: ... diff --git a/truffle/infer/model_pb2_grpc.py b/truffle/infer/model_pb2_grpc.py deleted file mode 100644 index 05e4dd8..0000000 --- a/truffle/infer/model_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/model_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/tokenize_pb2.py b/truffle/infer/tokenize_pb2.py deleted file mode 100644 index 9d13153..0000000 --- a/truffle/infer/tokenize_pb2.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/tokenize.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/tokenize.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/infer/tokenize.proto\x12\rtruffle.infer\"4\n\x0fTokenizeRequest\x12\r\n\x05texts\x18\x01 \x03(\t\x12\x12\n\nmodel_uuid\x18\x02 \x01(\t\"#\n\x10TokenizeResponse\x12\x0f\n\x07lengths\x18\x01 \x03(\rb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.tokenize_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_TOKENIZEREQUEST']._serialized_start=47 - _globals['_TOKENIZEREQUEST']._serialized_end=99 - _globals['_TOKENIZERESPONSE']._serialized_start=101 - _globals['_TOKENIZERESPONSE']._serialized_end=136 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/tokenize_pb2.pyi b/truffle/infer/tokenize_pb2.pyi deleted file mode 100644 index 8f9a640..0000000 --- a/truffle/infer/tokenize_pb2.pyi +++ /dev/null @@ -1,21 +0,0 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable -from typing import ClassVar as _ClassVar, Optional as _Optional - -DESCRIPTOR: _descriptor.FileDescriptor - -class TokenizeRequest(_message.Message): - __slots__ = ("texts", "model_uuid") - TEXTS_FIELD_NUMBER: _ClassVar[int] - MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - texts: _containers.RepeatedScalarFieldContainer[str] - model_uuid: str - def __init__(self, texts: _Optional[_Iterable[str]] = ..., model_uuid: _Optional[str] = ...) -> None: ... - -class TokenizeResponse(_message.Message): - __slots__ = ("lengths",) - LENGTHS_FIELD_NUMBER: _ClassVar[int] - lengths: _containers.RepeatedScalarFieldContainer[int] - def __init__(self, lengths: _Optional[_Iterable[int]] = ...) -> None: ... diff --git a/truffle/infer/tokenize_pb2_grpc.py b/truffle/infer/tokenize_pb2_grpc.py deleted file mode 100644 index 25b9a25..0000000 --- a/truffle/infer/tokenize_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/tokenize_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/infer/usage_pb2.py b/truffle/infer/usage_pb2.py deleted file mode 100644 index 60361b0..0000000 --- a/truffle/infer/usage_pb2.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/infer/usage.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/infer/usage.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19truffle/infer/usage.proto\x12\rtruffle.infer\"\xbb\x02\n\x05Usage\x12\x12\n\ntotal_time\x18\x01 \x01(\x01\x12\x14\n\x0cprefill_time\x18\x02 \x01(\x01\x12\x13\n\x0b\x64\x65\x63ode_time\x18\x03 \x01(\x01\x12\x0c\n\x04ttft\x18\x04 \x01(\x01\x12\x1b\n\x13inter_token_latency\x18\x05 \x01(\x01\x12\x12\n\ndecode_tps\x18\x06 \x01(\x01\x12\x13\n\x0bprefill_tps\x18\x07 \x01(\x01\x12+\n\x06tokens\x18\x08 \x01(\x0b\x32\x1b.truffle.infer.Usage.Tokens\x1ar\n\x06Tokens\x12\x0e\n\x06prompt\x18\x01 \x01(\x04\x12\x12\n\ncompletion\x18\x02 \x01(\x04\x12\x0f\n\x07prefill\x18\x03 \x01(\x04\x12\x0e\n\x06\x64\x65\x63ode\x18\x04 \x01(\x04\x12\x14\n\x0cjump_forward\x18\x05 \x01(\x04\x12\r\n\x05image\x18\x06 \x01(\x04\"R\n\x0bSystemUsage\x12\x13\n\x0b\x64\x65vice_name\x18\x01 \x01(\t\x12\x14\n\x0ctotal_memory\x18\x02 \x01(\x04\x12\x18\n\x10\x61vailable_memory\x18\x03 \x01(\x04\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.infer.usage_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_USAGE']._serialized_start=45 - _globals['_USAGE']._serialized_end=360 - _globals['_USAGE_TOKENS']._serialized_start=246 - _globals['_USAGE_TOKENS']._serialized_end=360 - _globals['_SYSTEMUSAGE']._serialized_start=362 - _globals['_SYSTEMUSAGE']._serialized_end=444 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/infer/usage_pb2.pyi b/truffle/infer/usage_pb2.pyi deleted file mode 100644 index bb2758e..0000000 --- a/truffle/infer/usage_pb2.pyi +++ /dev/null @@ -1,51 +0,0 @@ -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class Usage(_message.Message): - __slots__ = ("total_time", "prefill_time", "decode_time", "ttft", "inter_token_latency", "decode_tps", "prefill_tps", "tokens") - class Tokens(_message.Message): - __slots__ = ("prompt", "completion", "prefill", "decode", "jump_forward", "image") - PROMPT_FIELD_NUMBER: _ClassVar[int] - COMPLETION_FIELD_NUMBER: _ClassVar[int] - PREFILL_FIELD_NUMBER: _ClassVar[int] - DECODE_FIELD_NUMBER: _ClassVar[int] - JUMP_FORWARD_FIELD_NUMBER: _ClassVar[int] - IMAGE_FIELD_NUMBER: _ClassVar[int] - prompt: int - completion: int - prefill: int - decode: int - jump_forward: int - image: int - def __init__(self, prompt: _Optional[int] = ..., completion: _Optional[int] = ..., prefill: _Optional[int] = ..., decode: _Optional[int] = ..., jump_forward: _Optional[int] = ..., image: _Optional[int] = ...) -> None: ... - TOTAL_TIME_FIELD_NUMBER: _ClassVar[int] - PREFILL_TIME_FIELD_NUMBER: _ClassVar[int] - DECODE_TIME_FIELD_NUMBER: _ClassVar[int] - TTFT_FIELD_NUMBER: _ClassVar[int] - INTER_TOKEN_LATENCY_FIELD_NUMBER: _ClassVar[int] - DECODE_TPS_FIELD_NUMBER: _ClassVar[int] - PREFILL_TPS_FIELD_NUMBER: _ClassVar[int] - TOKENS_FIELD_NUMBER: _ClassVar[int] - total_time: float - prefill_time: float - decode_time: float - ttft: float - inter_token_latency: float - decode_tps: float - prefill_tps: float - tokens: Usage.Tokens - def __init__(self, total_time: _Optional[float] = ..., prefill_time: _Optional[float] = ..., decode_time: _Optional[float] = ..., ttft: _Optional[float] = ..., inter_token_latency: _Optional[float] = ..., decode_tps: _Optional[float] = ..., prefill_tps: _Optional[float] = ..., tokens: _Optional[_Union[Usage.Tokens, _Mapping]] = ...) -> None: ... - -class SystemUsage(_message.Message): - __slots__ = ("device_name", "total_memory", "available_memory") - DEVICE_NAME_FIELD_NUMBER: _ClassVar[int] - TOTAL_MEMORY_FIELD_NUMBER: _ClassVar[int] - AVAILABLE_MEMORY_FIELD_NUMBER: _ClassVar[int] - device_name: str - total_memory: int - available_memory: int - def __init__(self, device_name: _Optional[str] = ..., total_memory: _Optional[int] = ..., available_memory: _Optional[int] = ...) -> None: ... diff --git a/truffle/infer/usage_pb2_grpc.py b/truffle/infer/usage_pb2_grpc.py deleted file mode 100644 index 0e4108f..0000000 --- a/truffle/infer/usage_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/usage_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/os/app_queries_pb2.py b/truffle/os/app_queries_pb2.py index 1b0f73c..4ea7a94 100644 --- a/truffle/os/app_queries_pb2.py +++ b/truffle/os/app_queries_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/app_queries.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/app_queries.proto' ) @@ -22,31 +22,22 @@ _sym_db = _symbol_database.Default() -from truffle.app import foreground_pb2 as truffle_dot_app_dot_foreground__pb2 -from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 +from truffle.app import app_pb2 as truffle_dot_app_dot_app__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/os/app_queries.proto\x12\ntruffle.os\x1a\x1ctruffle/app/foreground.proto\x1a\x1ctruffle/app/background.proto\")\n\x18GetForegroundAppsRequest\x12\r\n\x05uuids\x18\x01 \x03(\t\"E\n\x19GetForegroundAppsResponse\x12(\n\x04\x61pps\x18\x01 \x03(\x0b\x32\x1a.truffle.app.ForegroundApp\")\n\x18GetBackgroundAppsRequest\x12\r\n\x05uuids\x18\x01 \x03(\t\"E\n\x19GetBackgroundAppsResponse\x12(\n\x04\x61pps\x18\x01 \x03(\x0b\x32\x1a.truffle.app.BackgroundApp\"\x13\n\x11GetAllAppsRequest\"~\n\x12GetAllAppsResponse\x12\x33\n\x0f\x66oreground_apps\x18\x01 \x03(\x0b\x32\x1a.truffle.app.ForegroundApp\x12\x33\n\x0f\x62\x61\x63kground_apps\x18\x02 \x03(\x0b\x32\x1a.truffle.app.BackgroundApp\"$\n\x10\x44\x65leteAppRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\"\x13\n\x11\x44\x65leteAppResponseb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/os/app_queries.proto\x12\ntruffle.os\x1a\x15truffle/app/app.proto\"\x13\n\x11GetAllAppsRequest\"4\n\x12GetAllAppsResponse\x12\x1e\n\x04\x61pps\x18\x01 \x03(\x0b\x32\x10.truffle.app.App\"$\n\x10\x44\x65leteAppRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\"\x13\n\x11\x44\x65leteAppResponseb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.app_queries_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_GETFOREGROUNDAPPSREQUEST']._serialized_start=104 - _globals['_GETFOREGROUNDAPPSREQUEST']._serialized_end=145 - _globals['_GETFOREGROUNDAPPSRESPONSE']._serialized_start=147 - _globals['_GETFOREGROUNDAPPSRESPONSE']._serialized_end=216 - _globals['_GETBACKGROUNDAPPSREQUEST']._serialized_start=218 - _globals['_GETBACKGROUNDAPPSREQUEST']._serialized_end=259 - _globals['_GETBACKGROUNDAPPSRESPONSE']._serialized_start=261 - _globals['_GETBACKGROUNDAPPSRESPONSE']._serialized_end=330 - _globals['_GETALLAPPSREQUEST']._serialized_start=332 - _globals['_GETALLAPPSREQUEST']._serialized_end=351 - _globals['_GETALLAPPSRESPONSE']._serialized_start=353 - _globals['_GETALLAPPSRESPONSE']._serialized_end=479 - _globals['_DELETEAPPREQUEST']._serialized_start=481 - _globals['_DELETEAPPREQUEST']._serialized_end=517 - _globals['_DELETEAPPRESPONSE']._serialized_start=519 - _globals['_DELETEAPPRESPONSE']._serialized_end=538 + _globals['_GETALLAPPSREQUEST']._serialized_start=67 + _globals['_GETALLAPPSREQUEST']._serialized_end=86 + _globals['_GETALLAPPSRESPONSE']._serialized_start=88 + _globals['_GETALLAPPSRESPONSE']._serialized_end=140 + _globals['_DELETEAPPREQUEST']._serialized_start=142 + _globals['_DELETEAPPREQUEST']._serialized_end=178 + _globals['_DELETEAPPRESPONSE']._serialized_start=180 + _globals['_DELETEAPPRESPONSE']._serialized_end=199 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/app_queries_pb2.pyi b/truffle/os/app_queries_pb2.pyi index d18e1f9..e1f2157 100644 --- a/truffle/os/app_queries_pb2.pyi +++ b/truffle/os/app_queries_pb2.pyi @@ -1,5 +1,4 @@ -from truffle.app import foreground_pb2 as _foreground_pb2 -from truffle.app import background_pb2 as _background_pb2 +from truffle.app import app_pb2 as _app_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -8,41 +7,15 @@ from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor -class GetForegroundAppsRequest(_message.Message): - __slots__ = ("uuids",) - UUIDS_FIELD_NUMBER: _ClassVar[int] - uuids: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, uuids: _Optional[_Iterable[str]] = ...) -> None: ... - -class GetForegroundAppsResponse(_message.Message): - __slots__ = ("apps",) - APPS_FIELD_NUMBER: _ClassVar[int] - apps: _containers.RepeatedCompositeFieldContainer[_foreground_pb2.ForegroundApp] - def __init__(self, apps: _Optional[_Iterable[_Union[_foreground_pb2.ForegroundApp, _Mapping]]] = ...) -> None: ... - -class GetBackgroundAppsRequest(_message.Message): - __slots__ = ("uuids",) - UUIDS_FIELD_NUMBER: _ClassVar[int] - uuids: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, uuids: _Optional[_Iterable[str]] = ...) -> None: ... - -class GetBackgroundAppsResponse(_message.Message): - __slots__ = ("apps",) - APPS_FIELD_NUMBER: _ClassVar[int] - apps: _containers.RepeatedCompositeFieldContainer[_background_pb2.BackgroundApp] - def __init__(self, apps: _Optional[_Iterable[_Union[_background_pb2.BackgroundApp, _Mapping]]] = ...) -> None: ... - class GetAllAppsRequest(_message.Message): __slots__ = () def __init__(self) -> None: ... class GetAllAppsResponse(_message.Message): - __slots__ = ("foreground_apps", "background_apps") - FOREGROUND_APPS_FIELD_NUMBER: _ClassVar[int] - BACKGROUND_APPS_FIELD_NUMBER: _ClassVar[int] - foreground_apps: _containers.RepeatedCompositeFieldContainer[_foreground_pb2.ForegroundApp] - background_apps: _containers.RepeatedCompositeFieldContainer[_background_pb2.BackgroundApp] - def __init__(self, foreground_apps: _Optional[_Iterable[_Union[_foreground_pb2.ForegroundApp, _Mapping]]] = ..., background_apps: _Optional[_Iterable[_Union[_background_pb2.BackgroundApp, _Mapping]]] = ...) -> None: ... + __slots__ = ("apps",) + APPS_FIELD_NUMBER: _ClassVar[int] + apps: _containers.RepeatedCompositeFieldContainer[_app_pb2.App] + def __init__(self, apps: _Optional[_Iterable[_Union[_app_pb2.App, _Mapping]]] = ...) -> None: ... class DeleteAppRequest(_message.Message): __slots__ = ("app_uuid",) diff --git a/truffle/os/app_queries_pb2_grpc.py b/truffle/os/app_queries_pb2_grpc.py index 18983d9..e6864a7 100644 --- a/truffle/os/app_queries_pb2_grpc.py +++ b/truffle/os/app_queries_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/app_queries_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/app_queries_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/background_feed_pb2.py b/truffle/os/background_feed_pb2.py new file mode 100644 index 0000000..d609505 --- /dev/null +++ b/truffle/os/background_feed_pb2.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: truffle/os/background_feed.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'truffle/os/background_feed.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from truffle.common import content_pb2 as truffle_dot_common_dot_content__pb2 +from truffle.os import proactivity_pb2 as truffle_dot_os_dot_proactivity__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n truffle/os/background_feed.proto\x12\ntruffle.os\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1ctruffle/common/content.proto\x1a\x1ctruffle/os/proactivity.proto\"\xdc\x01\n\x08\x46\x65\x65\x64\x43\x61rd\x12\r\n\x05title\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\t\x12\x32\n\rmedia_sources\x18\x03 \x03(\x0b\x32\x1b.truffle.common.MediaSource\x12\x12\n\nsource_uri\x18\x04 \x01(\t\x12.\n\ncreated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x88\x01\x01\x42\x0b\n\t_metadata\"8\n\x0e\x42\x61\x63kgroundFeed\x12&\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x15.truffle.os.FeedEntry\"\xce\x01\n\x15\x46\x65\x65\x64\x45ntryNotification\x12\x11\n\tentry_ids\x18\x01 \x03(\x04\x12>\n\toperation\x18\x02 \x01(\x0e\x32+.truffle.os.FeedEntryNotification.Operation\"b\n\tOperation\x12\x15\n\x11OPERATION_INVALID\x10\x00\x12\x11\n\rOPERATION_ADD\x10\x01\x12\x14\n\x10OPERATION_DELETE\x10\x02\x12\x15\n\x11OPERATION_REFRESH\x10\x03\"\xb0\x01\n\tFeedEntry\x12\n\n\x02id\x18\x01 \x01(\x04\x12$\n\x04\x63\x61rd\x18\x02 \x01(\x0b\x32\x14.truffle.os.FeedCardH\x00\x12\x37\n\x10proactive_action\x18\x03 \x01(\x0b\x32\x1b.truffle.os.ProactiveActionH\x00\x12-\n\ttimestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\t\n\x07\x63ontent\">\n\x14\x46\x65\x65\x64\x45ntryTaskContext\x12&\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x15.truffle.os.FeedEntryb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.background_feed_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_FEEDCARD']._serialized_start=172 + _globals['_FEEDCARD']._serialized_end=392 + _globals['_BACKGROUNDFEED']._serialized_start=394 + _globals['_BACKGROUNDFEED']._serialized_end=450 + _globals['_FEEDENTRYNOTIFICATION']._serialized_start=453 + _globals['_FEEDENTRYNOTIFICATION']._serialized_end=659 + _globals['_FEEDENTRYNOTIFICATION_OPERATION']._serialized_start=561 + _globals['_FEEDENTRYNOTIFICATION_OPERATION']._serialized_end=659 + _globals['_FEEDENTRY']._serialized_start=662 + _globals['_FEEDENTRY']._serialized_end=838 + _globals['_FEEDENTRYTASKCONTEXT']._serialized_start=840 + _globals['_FEEDENTRYTASKCONTEXT']._serialized_end=902 +# @@protoc_insertion_point(module_scope) diff --git a/truffle/app/background_feed_pb2.pyi b/truffle/os/background_feed_pb2.pyi similarity index 50% rename from truffle/app/background_feed_pb2.pyi rename to truffle/os/background_feed_pb2.pyi index f9bbf1d..3513f11 100644 --- a/truffle/app/background_feed_pb2.pyi +++ b/truffle/os/background_feed_pb2.pyi @@ -1,7 +1,11 @@ +import datetime + from google.protobuf import timestamp_pb2 as _timestamp_pb2 -from truffle.common import content_pb2 as _content_pb2 from google.protobuf import struct_pb2 as _struct_pb2 +from truffle.common import content_pb2 as _content_pb2 +from truffle.os import proactivity_pb2 as _proactivity_pb2 from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from collections.abc import Iterable as _Iterable, Mapping as _Mapping @@ -10,20 +14,20 @@ from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor class FeedCard(_message.Message): - __slots__ = ("title", "body", "media_sources", "source_uri", "content_timestamp", "metadata") + __slots__ = ("title", "body", "media_sources", "source_uri", "created_at", "metadata") TITLE_FIELD_NUMBER: _ClassVar[int] BODY_FIELD_NUMBER: _ClassVar[int] MEDIA_SOURCES_FIELD_NUMBER: _ClassVar[int] SOURCE_URI_FIELD_NUMBER: _ClassVar[int] - CONTENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + CREATED_AT_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] title: str body: str media_sources: _containers.RepeatedCompositeFieldContainer[_content_pb2.MediaSource] source_uri: str - content_timestamp: _timestamp_pb2.Timestamp + created_at: _timestamp_pb2.Timestamp metadata: _struct_pb2.Struct - def __init__(self, title: _Optional[str] = ..., body: _Optional[str] = ..., media_sources: _Optional[_Iterable[_Union[_content_pb2.MediaSource, _Mapping]]] = ..., source_uri: _Optional[str] = ..., content_timestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + def __init__(self, title: _Optional[str] = ..., body: _Optional[str] = ..., media_sources: _Optional[_Iterable[_Union[_content_pb2.MediaSource, _Mapping]]] = ..., source_uri: _Optional[str] = ..., created_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... class BackgroundFeed(_message.Message): __slots__ = ("entries",) @@ -31,19 +35,35 @@ class BackgroundFeed(_message.Message): entries: _containers.RepeatedCompositeFieldContainer[FeedEntry] def __init__(self, entries: _Optional[_Iterable[_Union[FeedEntry, _Mapping]]] = ...) -> None: ... +class FeedEntryNotification(_message.Message): + __slots__ = ("entry_ids", "operation") + class Operation(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + OPERATION_INVALID: _ClassVar[FeedEntryNotification.Operation] + OPERATION_ADD: _ClassVar[FeedEntryNotification.Operation] + OPERATION_DELETE: _ClassVar[FeedEntryNotification.Operation] + OPERATION_REFRESH: _ClassVar[FeedEntryNotification.Operation] + OPERATION_INVALID: FeedEntryNotification.Operation + OPERATION_ADD: FeedEntryNotification.Operation + OPERATION_DELETE: FeedEntryNotification.Operation + OPERATION_REFRESH: FeedEntryNotification.Operation + ENTRY_IDS_FIELD_NUMBER: _ClassVar[int] + OPERATION_FIELD_NUMBER: _ClassVar[int] + entry_ids: _containers.RepeatedScalarFieldContainer[int] + operation: FeedEntryNotification.Operation + def __init__(self, entry_ids: _Optional[_Iterable[int]] = ..., operation: _Optional[_Union[FeedEntryNotification.Operation, str]] = ...) -> None: ... + class FeedEntry(_message.Message): - __slots__ = ("id", "app_uuid", "timestamp", "card", "likes") + __slots__ = ("id", "card", "proactive_action", "timestamp") ID_FIELD_NUMBER: _ClassVar[int] - APP_UUID_FIELD_NUMBER: _ClassVar[int] - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] CARD_FIELD_NUMBER: _ClassVar[int] - LIKES_FIELD_NUMBER: _ClassVar[int] + PROACTIVE_ACTION_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] id: int - app_uuid: str - timestamp: _timestamp_pb2.Timestamp card: FeedCard - likes: int - def __init__(self, id: _Optional[int] = ..., app_uuid: _Optional[str] = ..., timestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., card: _Optional[_Union[FeedCard, _Mapping]] = ..., likes: _Optional[int] = ...) -> None: ... + proactive_action: _proactivity_pb2.ProactiveAction + timestamp: _timestamp_pb2.Timestamp + def __init__(self, id: _Optional[int] = ..., card: _Optional[_Union[FeedCard, _Mapping]] = ..., proactive_action: _Optional[_Union[_proactivity_pb2.ProactiveAction, _Mapping]] = ..., timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class FeedEntryTaskContext(_message.Message): __slots__ = ("entries",) diff --git a/truffle/app/background_feed_pb2_grpc.py b/truffle/os/background_feed_pb2_grpc.py similarity index 86% rename from truffle/app/background_feed_pb2_grpc.py rename to truffle/os/background_feed_pb2_grpc.py index eb25b60..cc8f5ef 100644 --- a/truffle/app/background_feed_pb2_grpc.py +++ b/truffle/os/background_feed_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/app/background_feed_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/background_feed_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/background_feed_queries_pb2.py b/truffle/os/background_feed_queries_pb2.py index a3d91d3..5fa8b9f 100644 --- a/truffle/os/background_feed_queries_pb2.py +++ b/truffle/os/background_feed_queries_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/background_feed_queries.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/background_feed_queries.proto' ) @@ -23,10 +23,10 @@ from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 -from truffle.app import background_feed_pb2 as truffle_dot_app_dot_background__feed__pb2 +from truffle.os import background_feed_pb2 as truffle_dot_os_dot_background__feed__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(truffle/os/background_feed_queries.proto\x12\ntruffle.os\x1a\x1ctruffle/app/background.proto\x1a!truffle/app/background_feed.proto\"Z\n\x18GetBackgroundFeedRequest\x12\x17\n\x0ftarget_entry_id\x18\x01 \x01(\x04\x12\x12\n\nmax_before\x18\x02 \x01(\x05\x12\x11\n\tmax_after\x18\x03 \x01(\x05\"D\n\x19GetBackgroundFeedResponse\x12\'\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x16.truffle.app.FeedEntry\"J\n\x1eLikeBackgroundFeedEntryRequest\x12\x15\n\rfeed_entry_id\x18\x01 \x01(\x04\x12\x11\n\tincrement\x18\x02 \x01(\x05\"9\n\x1fLikeBackgroundFeedEntryResponse\x12\x16\n\x0enew_like_count\x18\x01 \x01(\x05\"\x1d\n\x1bGetLatestFeedEntryIDRequest\"<\n\x1cGetLatestFeedEntryIDResponse\x12\x1c\n\x14latest_feed_entry_id\x18\x01 \x01(\x04\"R\n\x1d\x42\x61\x63kgroundFeedFeedbackRequest\x12\x1f\n\x17\x61ssociated_feed_entries\x18\x01 \x03(\x04\x12\x10\n\x08\x66\x65\x65\x64\x62\x61\x63k\x18\x02 \x01(\t\"7\n\x1e\x42\x61\x63kgroundFeedFeedbackResponse\x12\x15\n\rfeedback_uuid\x18\x01 \x01(\tb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(truffle/os/background_feed_queries.proto\x12\ntruffle.os\x1a\x1ctruffle/app/background.proto\x1a truffle/os/background_feed.proto\"\x8a\x01\n\x18GetBackgroundFeedRequest\x12\x17\n\x0ftarget_entry_id\x18\x01 \x01(\x04\x12\x12\n\nmax_before\x18\x02 \x01(\x05\x12\x11\n\tmax_after\x18\x03 \x01(\x05\x12\x17\n\x0finclude_actions\x18\x04 \x01(\x08\x12\x15\n\rinclude_cards\x18\x05 \x01(\x08\"C\n\x19GetBackgroundFeedResponse\x12&\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x15.truffle.os.FeedEntry\"\x1d\n\x1bGetLatestFeedEntryIDRequest\"<\n\x1cGetLatestFeedEntryIDResponse\x12\x1c\n\x14latest_feed_entry_id\x18\x01 \x01(\x04\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,19 +34,11 @@ if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None _globals['_GETBACKGROUNDFEEDREQUEST']._serialized_start=121 - _globals['_GETBACKGROUNDFEEDREQUEST']._serialized_end=211 - _globals['_GETBACKGROUNDFEEDRESPONSE']._serialized_start=213 - _globals['_GETBACKGROUNDFEEDRESPONSE']._serialized_end=281 - _globals['_LIKEBACKGROUNDFEEDENTRYREQUEST']._serialized_start=283 - _globals['_LIKEBACKGROUNDFEEDENTRYREQUEST']._serialized_end=357 - _globals['_LIKEBACKGROUNDFEEDENTRYRESPONSE']._serialized_start=359 - _globals['_LIKEBACKGROUNDFEEDENTRYRESPONSE']._serialized_end=416 - _globals['_GETLATESTFEEDENTRYIDREQUEST']._serialized_start=418 - _globals['_GETLATESTFEEDENTRYIDREQUEST']._serialized_end=447 - _globals['_GETLATESTFEEDENTRYIDRESPONSE']._serialized_start=449 - _globals['_GETLATESTFEEDENTRYIDRESPONSE']._serialized_end=509 - _globals['_BACKGROUNDFEEDFEEDBACKREQUEST']._serialized_start=511 - _globals['_BACKGROUNDFEEDFEEDBACKREQUEST']._serialized_end=593 - _globals['_BACKGROUNDFEEDFEEDBACKRESPONSE']._serialized_start=595 - _globals['_BACKGROUNDFEEDFEEDBACKRESPONSE']._serialized_end=650 + _globals['_GETBACKGROUNDFEEDREQUEST']._serialized_end=259 + _globals['_GETBACKGROUNDFEEDRESPONSE']._serialized_start=261 + _globals['_GETBACKGROUNDFEEDRESPONSE']._serialized_end=328 + _globals['_GETLATESTFEEDENTRYIDREQUEST']._serialized_start=330 + _globals['_GETLATESTFEEDENTRYIDREQUEST']._serialized_end=359 + _globals['_GETLATESTFEEDENTRYIDRESPONSE']._serialized_start=361 + _globals['_GETLATESTFEEDENTRYIDRESPONSE']._serialized_end=421 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/background_feed_queries_pb2.pyi b/truffle/os/background_feed_queries_pb2.pyi index 9ee2d57..7313d33 100644 --- a/truffle/os/background_feed_queries_pb2.pyi +++ b/truffle/os/background_feed_queries_pb2.pyi @@ -1,5 +1,5 @@ from truffle.app import background_pb2 as _background_pb2 -from truffle.app import background_feed_pb2 as _background_feed_pb2 +from truffle.os import background_feed_pb2 as _background_feed_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -9,14 +9,18 @@ from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor class GetBackgroundFeedRequest(_message.Message): - __slots__ = ("target_entry_id", "max_before", "max_after") + __slots__ = ("target_entry_id", "max_before", "max_after", "include_actions", "include_cards") TARGET_ENTRY_ID_FIELD_NUMBER: _ClassVar[int] MAX_BEFORE_FIELD_NUMBER: _ClassVar[int] MAX_AFTER_FIELD_NUMBER: _ClassVar[int] + INCLUDE_ACTIONS_FIELD_NUMBER: _ClassVar[int] + INCLUDE_CARDS_FIELD_NUMBER: _ClassVar[int] target_entry_id: int max_before: int max_after: int - def __init__(self, target_entry_id: _Optional[int] = ..., max_before: _Optional[int] = ..., max_after: _Optional[int] = ...) -> None: ... + include_actions: bool + include_cards: bool + def __init__(self, target_entry_id: _Optional[int] = ..., max_before: _Optional[int] = ..., max_after: _Optional[int] = ..., include_actions: bool = ..., include_cards: bool = ...) -> None: ... class GetBackgroundFeedResponse(_message.Message): __slots__ = ("entries",) @@ -24,20 +28,6 @@ class GetBackgroundFeedResponse(_message.Message): entries: _containers.RepeatedCompositeFieldContainer[_background_feed_pb2.FeedEntry] def __init__(self, entries: _Optional[_Iterable[_Union[_background_feed_pb2.FeedEntry, _Mapping]]] = ...) -> None: ... -class LikeBackgroundFeedEntryRequest(_message.Message): - __slots__ = ("feed_entry_id", "increment") - FEED_ENTRY_ID_FIELD_NUMBER: _ClassVar[int] - INCREMENT_FIELD_NUMBER: _ClassVar[int] - feed_entry_id: int - increment: int - def __init__(self, feed_entry_id: _Optional[int] = ..., increment: _Optional[int] = ...) -> None: ... - -class LikeBackgroundFeedEntryResponse(_message.Message): - __slots__ = ("new_like_count",) - NEW_LIKE_COUNT_FIELD_NUMBER: _ClassVar[int] - new_like_count: int - def __init__(self, new_like_count: _Optional[int] = ...) -> None: ... - class GetLatestFeedEntryIDRequest(_message.Message): __slots__ = () def __init__(self) -> None: ... @@ -47,17 +37,3 @@ class GetLatestFeedEntryIDResponse(_message.Message): LATEST_FEED_ENTRY_ID_FIELD_NUMBER: _ClassVar[int] latest_feed_entry_id: int def __init__(self, latest_feed_entry_id: _Optional[int] = ...) -> None: ... - -class BackgroundFeedFeedbackRequest(_message.Message): - __slots__ = ("associated_feed_entries", "feedback") - ASSOCIATED_FEED_ENTRIES_FIELD_NUMBER: _ClassVar[int] - FEEDBACK_FIELD_NUMBER: _ClassVar[int] - associated_feed_entries: _containers.RepeatedScalarFieldContainer[int] - feedback: str - def __init__(self, associated_feed_entries: _Optional[_Iterable[int]] = ..., feedback: _Optional[str] = ...) -> None: ... - -class BackgroundFeedFeedbackResponse(_message.Message): - __slots__ = ("feedback_uuid",) - FEEDBACK_UUID_FIELD_NUMBER: _ClassVar[int] - feedback_uuid: str - def __init__(self, feedback_uuid: _Optional[str] = ...) -> None: ... diff --git a/truffle/os/background_feed_queries_pb2_grpc.py b/truffle/os/background_feed_queries_pb2_grpc.py index d152381..e6fa230 100644 --- a/truffle/os/background_feed_queries_pb2_grpc.py +++ b/truffle/os/background_feed_queries_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/background_feed_queries_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/background_feed_queries_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/builder_pb2.py b/truffle/os/builder_pb2.py index 866f33f..dde6b87 100644 --- a/truffle/os/builder_pb2.py +++ b/truffle/os/builder_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/builder.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/builder.proto' ) @@ -22,27 +22,27 @@ _sym_db = _symbol_database.Default() -from truffle.app import app_type_pb2 as truffle_dot_app_dot_app__type__pb2 from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 from truffle.app import foreground_pb2 as truffle_dot_app_dot_foreground__pb2 from truffle.app import app_build_pb2 as truffle_dot_app_dot_app__build__pb2 +from truffle.app import app_pb2 as truffle_dot_app_dot_app__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18truffle/os/builder.proto\x12\ntruffle.os\x1a\x1atruffle/app/app_type.proto\x1a\x1ctruffle/app/background.proto\x1a\x1ctruffle/app/foreground.proto\x1a\x1btruffle/app/app_build.proto\"f\n\x18StartBuildSessionRequest\x12\x15\n\x08\x61pp_uuid\x18\x01 \x01(\tH\x00\x88\x01\x01\x12&\n\x08\x61pp_type\x18\x02 \x01(\x0e\x32\x14.truffle.app.AppTypeB\x0b\n\t_app_uuid\"B\n\x19StartBuildSessionResponse\x12\x13\n\x0b\x61\x63\x63\x65ss_path\x18\x01 \x01(\t\x12\x10\n\x08\x61pp_uuid\x18\x02 \x01(\t\"\xef\x01\n\x19\x46inishBuildSessionRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\x12\x0f\n\x07\x64iscard\x18\x02 \x01(\x08\x12\x39\n\nforeground\x18\x03 \x01(\x0b\x32#.truffle.app.ForegroundAppBuildInfoH\x00\x12\x39\n\nbackground\x18\x04 \x01(\x0b\x32#.truffle.app.BackgroundAppBuildInfoH\x00\x12+\n\x07process\x18\x05 \x01(\x0b\x32\x1a.truffle.app.ProcessConfigB\x0c\n\nbuild_info\"D\n\x11\x42uildSessionError\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x14\n\x07\x64\x65tails\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\n\n\x08_details\"Y\n\x1a\x46inishBuildSessionResponse\x12\x31\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x1d.truffle.os.BuildSessionErrorH\x00\x88\x01\x01\x42\x08\n\x06_errorb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18truffle/os/builder.proto\x12\ntruffle.os\x1a\x1ctruffle/app/background.proto\x1a\x1ctruffle/app/foreground.proto\x1a\x1btruffle/app/app_build.proto\x1a\x15truffle/app/app.proto\">\n\x18StartBuildSessionRequest\x12\x15\n\x08\x61pp_uuid\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x0b\n\t_app_uuid\"B\n\x19StartBuildSessionResponse\x12\x13\n\x0b\x61\x63\x63\x65ss_path\x18\x01 \x01(\t\x12\x10\n\x08\x61pp_uuid\x18\x02 \x01(\t\"\x84\x02\n\x19\x46inishBuildSessionRequest\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\x12\x0f\n\x07\x64iscard\x18\x02 \x01(\x08\x12*\n\x08metadata\x18\x03 \x01(\x0b\x32\x18.truffle.app.AppMetadata\x12<\n\nforeground\x18\x04 \x01(\x0b\x32#.truffle.app.ForegroundAppBuildInfoH\x00\x88\x01\x01\x12<\n\nbackground\x18\x05 \x01(\x0b\x32#.truffle.app.BackgroundAppBuildInfoH\x01\x88\x01\x01\x42\r\n\x0b_foregroundB\r\n\x0b_background\"D\n\x11\x42uildSessionError\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x14\n\x07\x64\x65tails\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\n\n\x08_details\"Y\n\x1a\x46inishBuildSessionResponse\x12\x31\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x1d.truffle.os.BuildSessionErrorH\x00\x88\x01\x01\x42\x08\n\x06_errorb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.builder_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_STARTBUILDSESSIONREQUEST']._serialized_start=157 - _globals['_STARTBUILDSESSIONREQUEST']._serialized_end=259 - _globals['_STARTBUILDSESSIONRESPONSE']._serialized_start=261 - _globals['_STARTBUILDSESSIONRESPONSE']._serialized_end=327 - _globals['_FINISHBUILDSESSIONREQUEST']._serialized_start=330 - _globals['_FINISHBUILDSESSIONREQUEST']._serialized_end=569 - _globals['_BUILDSESSIONERROR']._serialized_start=571 - _globals['_BUILDSESSIONERROR']._serialized_end=639 - _globals['_FINISHBUILDSESSIONRESPONSE']._serialized_start=641 - _globals['_FINISHBUILDSESSIONRESPONSE']._serialized_end=730 + _globals['_STARTBUILDSESSIONREQUEST']._serialized_start=152 + _globals['_STARTBUILDSESSIONREQUEST']._serialized_end=214 + _globals['_STARTBUILDSESSIONRESPONSE']._serialized_start=216 + _globals['_STARTBUILDSESSIONRESPONSE']._serialized_end=282 + _globals['_FINISHBUILDSESSIONREQUEST']._serialized_start=285 + _globals['_FINISHBUILDSESSIONREQUEST']._serialized_end=545 + _globals['_BUILDSESSIONERROR']._serialized_start=547 + _globals['_BUILDSESSIONERROR']._serialized_end=615 + _globals['_FINISHBUILDSESSIONRESPONSE']._serialized_start=617 + _globals['_FINISHBUILDSESSIONRESPONSE']._serialized_end=706 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/builder_pb2.pyi b/truffle/os/builder_pb2.pyi index f36a285..e62fd5a 100644 --- a/truffle/os/builder_pb2.pyi +++ b/truffle/os/builder_pb2.pyi @@ -1,7 +1,7 @@ -from truffle.app import app_type_pb2 as _app_type_pb2 from truffle.app import background_pb2 as _background_pb2 from truffle.app import foreground_pb2 as _foreground_pb2 from truffle.app import app_build_pb2 as _app_build_pb2 +from truffle.app import app_pb2 as _app_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from collections.abc import Mapping as _Mapping @@ -10,12 +10,10 @@ from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor class StartBuildSessionRequest(_message.Message): - __slots__ = ("app_uuid", "app_type") + __slots__ = ("app_uuid",) APP_UUID_FIELD_NUMBER: _ClassVar[int] - APP_TYPE_FIELD_NUMBER: _ClassVar[int] app_uuid: str - app_type: _app_type_pb2.AppType - def __init__(self, app_uuid: _Optional[str] = ..., app_type: _Optional[_Union[_app_type_pb2.AppType, str]] = ...) -> None: ... + def __init__(self, app_uuid: _Optional[str] = ...) -> None: ... class StartBuildSessionResponse(_message.Message): __slots__ = ("access_path", "app_uuid") @@ -26,18 +24,18 @@ class StartBuildSessionResponse(_message.Message): def __init__(self, access_path: _Optional[str] = ..., app_uuid: _Optional[str] = ...) -> None: ... class FinishBuildSessionRequest(_message.Message): - __slots__ = ("app_uuid", "discard", "foreground", "background", "process") + __slots__ = ("app_uuid", "discard", "metadata", "foreground", "background") APP_UUID_FIELD_NUMBER: _ClassVar[int] DISCARD_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] FOREGROUND_FIELD_NUMBER: _ClassVar[int] BACKGROUND_FIELD_NUMBER: _ClassVar[int] - PROCESS_FIELD_NUMBER: _ClassVar[int] app_uuid: str discard: bool + metadata: _app_pb2.AppMetadata foreground: _foreground_pb2.ForegroundAppBuildInfo background: _background_pb2.BackgroundAppBuildInfo - process: _app_build_pb2.ProcessConfig - def __init__(self, app_uuid: _Optional[str] = ..., discard: bool = ..., foreground: _Optional[_Union[_foreground_pb2.ForegroundAppBuildInfo, _Mapping]] = ..., background: _Optional[_Union[_background_pb2.BackgroundAppBuildInfo, _Mapping]] = ..., process: _Optional[_Union[_app_build_pb2.ProcessConfig, _Mapping]] = ...) -> None: ... + def __init__(self, app_uuid: _Optional[str] = ..., discard: bool = ..., metadata: _Optional[_Union[_app_pb2.AppMetadata, _Mapping]] = ..., foreground: _Optional[_Union[_foreground_pb2.ForegroundAppBuildInfo, _Mapping]] = ..., background: _Optional[_Union[_background_pb2.BackgroundAppBuildInfo, _Mapping]] = ...) -> None: ... class BuildSessionError(_message.Message): __slots__ = ("error", "details") diff --git a/truffle/os/builder_pb2_grpc.py b/truffle/os/builder_pb2_grpc.py index d5be29b..9a3c71e 100644 --- a/truffle/os/builder_pb2_grpc.py +++ b/truffle/os/builder_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/builder_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/builder_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/classification_pb2.py b/truffle/os/classification_pb2.py deleted file mode 100644 index 0489117..0000000 --- a/truffle/os/classification_pb2.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: truffle/os/classification.proto -# Protobuf Python Version: 6.30.0 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 6, - 30, - 0, - '', - 'truffle/os/classification.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ftruffle/os/classification.proto\x12\ntruffle.os\"K\n\x0f\x43lassifyRequest\x12\x0e\n\x06prompt\x18\x01 \x01(\t\x12\x18\n\x0bmax_results\x18\x02 \x01(\x05H\x00\x88\x01\x01\x42\x0e\n\x0c_max_results\"\x81\x01\n\x10\x43lassifyResponse\x12<\n\x07results\x18\x01 \x03(\x0b\x32+.truffle.os.ClassifyResponse.ClassifyResult\x1a/\n\x0e\x43lassifyResult\x12\x0e\n\x06\x61pp_id\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.classification_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_CLASSIFYREQUEST']._serialized_start=47 - _globals['_CLASSIFYREQUEST']._serialized_end=122 - _globals['_CLASSIFYRESPONSE']._serialized_start=125 - _globals['_CLASSIFYRESPONSE']._serialized_end=254 - _globals['_CLASSIFYRESPONSE_CLASSIFYRESULT']._serialized_start=207 - _globals['_CLASSIFYRESPONSE_CLASSIFYRESULT']._serialized_end=254 -# @@protoc_insertion_point(module_scope) diff --git a/truffle/os/classification_pb2.pyi b/truffle/os/classification_pb2.pyi deleted file mode 100644 index 4bca042..0000000 --- a/truffle/os/classification_pb2.pyi +++ /dev/null @@ -1,28 +0,0 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class ClassifyRequest(_message.Message): - __slots__ = ("prompt", "max_results") - PROMPT_FIELD_NUMBER: _ClassVar[int] - MAX_RESULTS_FIELD_NUMBER: _ClassVar[int] - prompt: str - max_results: int - def __init__(self, prompt: _Optional[str] = ..., max_results: _Optional[int] = ...) -> None: ... - -class ClassifyResponse(_message.Message): - __slots__ = ("results",) - class ClassifyResult(_message.Message): - __slots__ = ("app_id", "score") - APP_ID_FIELD_NUMBER: _ClassVar[int] - SCORE_FIELD_NUMBER: _ClassVar[int] - app_id: str - score: float - def __init__(self, app_id: _Optional[str] = ..., score: _Optional[float] = ...) -> None: ... - RESULTS_FIELD_NUMBER: _ClassVar[int] - results: _containers.RepeatedCompositeFieldContainer[ClassifyResponse.ClassifyResult] - def __init__(self, results: _Optional[_Iterable[_Union[ClassifyResponse.ClassifyResult, _Mapping]]] = ...) -> None: ... diff --git a/truffle/os/classification_pb2_grpc.py b/truffle/os/classification_pb2_grpc.py deleted file mode 100644 index 62c9799..0000000 --- a/truffle/os/classification_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.72.0' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/classification_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/truffle/os/client_metadata_pb2.py b/truffle/os/client_metadata_pb2.py index 9ba5066..d04c359 100644 --- a/truffle/os/client_metadata_pb2.py +++ b/truffle/os/client_metadata_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/client_metadata.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/client_metadata.proto' ) diff --git a/truffle/os/client_metadata_pb2_grpc.py b/truffle/os/client_metadata_pb2_grpc.py index 66437f4..3adeae5 100644 --- a/truffle/os/client_metadata_pb2_grpc.py +++ b/truffle/os/client_metadata_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/client_metadata_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/client_metadata_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/client_session_pb2.py b/truffle/os/client_session_pb2.py index 0eaf851..b8d9c42 100644 --- a/truffle/os/client_session_pb2.py +++ b/truffle/os/client_session_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/client_session.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/client_session.proto' ) diff --git a/truffle/os/client_session_pb2.pyi b/truffle/os/client_session_pb2.pyi index a1a2178..23dc24b 100644 --- a/truffle/os/client_session_pb2.pyi +++ b/truffle/os/client_session_pb2.pyi @@ -1,3 +1,5 @@ +import datetime + from truffle.os import client_metadata_pb2 as _client_metadata_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf.internal import containers as _containers @@ -43,7 +45,7 @@ class NewSessionVerification(_message.Message): verification_token: str expires_at: _timestamp_pb2.Timestamp requesting_client: _client_metadata_pb2.ClientMetadata - def __init__(self, verification_token: _Optional[str] = ..., expires_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., requesting_client: _Optional[_Union[_client_metadata_pb2.ClientMetadata, _Mapping]] = ...) -> None: ... + def __init__(self, verification_token: _Optional[str] = ..., expires_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., requesting_client: _Optional[_Union[_client_metadata_pb2.ClientMetadata, _Mapping]] = ...) -> None: ... class VerifyNewSessionRequest(_message.Message): __slots__ = ("verification_token", "allow") diff --git a/truffle/os/client_session_pb2_grpc.py b/truffle/os/client_session_pb2_grpc.py index d244186..d94ebaf 100644 --- a/truffle/os/client_session_pb2_grpc.py +++ b/truffle/os/client_session_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/client_session_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/client_session_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/client_state_pb2.py b/truffle/os/client_state_pb2.py index f1fe300..59da1b3 100644 --- a/truffle/os/client_state_pb2.py +++ b/truffle/os/client_state_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/client_state.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/client_state.proto' ) diff --git a/truffle/os/client_state_pb2_grpc.py b/truffle/os/client_state_pb2_grpc.py index 0fd1040..92d9bc3 100644 --- a/truffle/os/client_state_pb2_grpc.py +++ b/truffle/os/client_state_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/client_state_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/client_state_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/client_user_pb2.py b/truffle/os/client_user_pb2.py index 1ccf5e6..d2cb9a3 100644 --- a/truffle/os/client_user_pb2.py +++ b/truffle/os/client_user_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/client_user.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/client_user.proto' ) diff --git a/truffle/os/client_user_pb2_grpc.py b/truffle/os/client_user_pb2_grpc.py index 0a6955b..732739c 100644 --- a/truffle/os/client_user_pb2_grpc.py +++ b/truffle/os/client_user_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/client_user_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/client_user_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/hardware_control_pb2.py b/truffle/os/hardware_control_pb2.py index 9f28a54..25f2ab5 100644 --- a/truffle/os/hardware_control_pb2.py +++ b/truffle/os/hardware_control_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/hardware_control.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/hardware_control.proto' ) diff --git a/truffle/os/hardware_control_pb2_grpc.py b/truffle/os/hardware_control_pb2_grpc.py index e98792b..0cabd14 100644 --- a/truffle/os/hardware_control_pb2_grpc.py +++ b/truffle/os/hardware_control_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/hardware_control_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/hardware_control_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/hardware_info_pb2.py b/truffle/os/hardware_info_pb2.py index bcf246c..f98e4da 100644 --- a/truffle/os/hardware_info_pb2.py +++ b/truffle/os/hardware_info_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/hardware_info.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/hardware_info.proto' ) @@ -23,17 +23,16 @@ from truffle.os import hardware_network_pb2 as truffle_dot_os_dot_hardware__network__pb2 -from truffle.common import led_states_pb2 as truffle_dot_common_dot_led__states__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1etruffle/os/hardware_info.proto\x12\ntruffle.os\x1a!truffle/os/hardware_network.proto\x1a\x1ftruffle/common/led_states.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa8\x02\n\x0cHardwareInfo\x12\x10\n\x08hostname\x18\x01 \x01(\t\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x13\n\x0bmac_address\x18\x03 \x01(\t\x12\x39\n\x0enetwork_status\x18\x04 \x01(\x0e\x32!.truffle.os.HardwareNetworkStatus\x12\x42\n\x14\x63urrent_wifi_network\x18\x05 \x01(\x0b\x32\x1f.truffle.os.HardwareWifiNetworkH\x00\x88\x01\x01\x12\x15\n\rserial_number\x18\n \x01(\t\x12.\n\nstart_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x17\n\x15_current_wifi_networkb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1etruffle/os/hardware_info.proto\x12\ntruffle.os\x1a!truffle/os/hardware_network.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xba\x02\n\x0cHardwareInfo\x12\x10\n\x08hostname\x18\x01 \x01(\t\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x13\n\x0bmac_address\x18\x03 \x01(\t\x12\x39\n\x0enetwork_status\x18\x04 \x01(\x0e\x32!.truffle.os.HardwareNetworkStatus\x12\x42\n\x14\x63urrent_wifi_network\x18\x05 \x01(\x0b\x32\x1f.truffle.os.HardwareWifiNetworkH\x00\x88\x01\x01\x12\x15\n\rserial_number\x18\n \x01(\t\x12.\n\nstart_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x10\n\x08timezone\x18\x0c \x01(\tB\x17\n\x15_current_wifi_networkb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.hardware_info_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_HARDWAREINFO']._serialized_start=148 - _globals['_HARDWAREINFO']._serialized_end=444 + _globals['_HARDWAREINFO']._serialized_start=115 + _globals['_HARDWAREINFO']._serialized_end=429 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/hardware_info_pb2.pyi b/truffle/os/hardware_info_pb2.pyi index 5042bff..5381f44 100644 --- a/truffle/os/hardware_info_pb2.pyi +++ b/truffle/os/hardware_info_pb2.pyi @@ -1,5 +1,6 @@ +import datetime + from truffle.os import hardware_network_pb2 as _hardware_network_pb2 -from truffle.common import led_states_pb2 as _led_states_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -9,7 +10,7 @@ from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor class HardwareInfo(_message.Message): - __slots__ = ("hostname", "ip_address", "mac_address", "network_status", "current_wifi_network", "serial_number", "start_time") + __slots__ = ("hostname", "ip_address", "mac_address", "network_status", "current_wifi_network", "serial_number", "start_time", "timezone") HOSTNAME_FIELD_NUMBER: _ClassVar[int] IP_ADDRESS_FIELD_NUMBER: _ClassVar[int] MAC_ADDRESS_FIELD_NUMBER: _ClassVar[int] @@ -17,6 +18,7 @@ class HardwareInfo(_message.Message): CURRENT_WIFI_NETWORK_FIELD_NUMBER: _ClassVar[int] SERIAL_NUMBER_FIELD_NUMBER: _ClassVar[int] START_TIME_FIELD_NUMBER: _ClassVar[int] + TIMEZONE_FIELD_NUMBER: _ClassVar[int] hostname: str ip_address: str mac_address: str @@ -24,4 +26,5 @@ class HardwareInfo(_message.Message): current_wifi_network: _hardware_network_pb2.HardwareWifiNetwork serial_number: str start_time: _timestamp_pb2.Timestamp - def __init__(self, hostname: _Optional[str] = ..., ip_address: _Optional[str] = ..., mac_address: _Optional[str] = ..., network_status: _Optional[_Union[_hardware_network_pb2.HardwareNetworkStatus, str]] = ..., current_wifi_network: _Optional[_Union[_hardware_network_pb2.HardwareWifiNetwork, _Mapping]] = ..., serial_number: _Optional[str] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + timezone: str + def __init__(self, hostname: _Optional[str] = ..., ip_address: _Optional[str] = ..., mac_address: _Optional[str] = ..., network_status: _Optional[_Union[_hardware_network_pb2.HardwareNetworkStatus, str]] = ..., current_wifi_network: _Optional[_Union[_hardware_network_pb2.HardwareWifiNetwork, _Mapping]] = ..., serial_number: _Optional[str] = ..., start_time: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., timezone: _Optional[str] = ...) -> None: ... diff --git a/truffle/os/hardware_info_pb2_grpc.py b/truffle/os/hardware_info_pb2_grpc.py index 2b1cec8..0920be0 100644 --- a/truffle/os/hardware_info_pb2_grpc.py +++ b/truffle/os/hardware_info_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/hardware_info_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/hardware_info_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/hardware_network_pb2.py b/truffle/os/hardware_network_pb2.py index b961935..a93c1d4 100644 --- a/truffle/os/hardware_network_pb2.py +++ b/truffle/os/hardware_network_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/hardware_network.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/hardware_network.proto' ) diff --git a/truffle/os/hardware_network_pb2_grpc.py b/truffle/os/hardware_network_pb2_grpc.py index 83be82a..122a5b5 100644 --- a/truffle/os/hardware_network_pb2_grpc.py +++ b/truffle/os/hardware_network_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/hardware_network_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/hardware_network_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/hardware_settings_pb2.py b/truffle/os/hardware_settings_pb2.py index 73b02f9..2dc5c8b 100644 --- a/truffle/os/hardware_settings_pb2.py +++ b/truffle/os/hardware_settings_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/hardware_settings.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/hardware_settings.proto' ) diff --git a/truffle/os/hardware_settings_pb2_grpc.py b/truffle/os/hardware_settings_pb2_grpc.py index c3ea15a..ed803c6 100644 --- a/truffle/os/hardware_settings_pb2_grpc.py +++ b/truffle/os/hardware_settings_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/hardware_settings_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/hardware_settings_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/hardware_stats_pb2.py b/truffle/os/hardware_stats_pb2.py index f2deb13..6cfab76 100644 --- a/truffle/os/hardware_stats_pb2.py +++ b/truffle/os/hardware_stats_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/hardware_stats.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/hardware_stats.proto' ) diff --git a/truffle/os/hardware_stats_pb2_grpc.py b/truffle/os/hardware_stats_pb2_grpc.py index e5ba7dd..122c216 100644 --- a/truffle/os/hardware_stats_pb2_grpc.py +++ b/truffle/os/hardware_stats_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/hardware_stats_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/hardware_stats_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/installer_pb2.py b/truffle/os/installer_pb2.py index 82cf1b8..4ac2a76 100644 --- a/truffle/os/installer_pb2.py +++ b/truffle/os/installer_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/installer.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/installer.proto' ) @@ -22,12 +22,11 @@ _sym_db = _symbol_database.Default() -from truffle.app import foreground_pb2 as truffle_dot_app_dot_foreground__pb2 -from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 -from truffle.app import app_type_pb2 as truffle_dot_app_dot_app__type__pb2 +from truffle.app import app_pb2 as truffle_dot_app_dot_app__pb2 +from truffle.app.app_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/os/installer.proto\x12\ntruffle.os\x1a\x1ctruffle/app/foreground.proto\x1a\x1ctruffle/app/background.proto\x1a\x1atruffle/app/app_type.proto\"\x87\x01\n\x10\x41ppInstallSource\x12\x35\n\x0bsource_type\x18\x01 \x01(\x0e\x32 .truffle.os.AppInstallSourceType\x12\x10\n\x03url\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08git_hash\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\x06\n\x04_urlB\x0b\n\t_git_hash\"\xc3\x07\n\x0f\x41ppInstallModal\x12\x12\n\nstep_index\x18\x01 \x01(\x05\x12\x11\n\tstep_name\x18\x02 \x01(\t\x12\x41\n\rwelcome_modal\x18\n \x01(\x0b\x32(.truffle.os.AppInstallModal.WelcomeModalH\x00\x12H\n\x11text_fields_modal\x18\x0b \x01(\x0b\x32+.truffle.os.AppInstallModal.TextFieldsModalH\x00\x12\x39\n\tvnc_modal\x18\x0c \x01(\x0b\x32$.truffle.os.AppInstallModal.VNCModalH\x00\x12?\n\x0c\x66inish_modal\x18\x0e \x01(\x0b\x32\'.truffle.os.AppInstallModal.FinishModalH\x00\x12H\n\x11upload_file_modal\x18\x0f \x01(\x0b\x32+.truffle.os.AppInstallModal.UploadFileModalH\x00\x1a\'\n\x0cWelcomeModal\x12\x17\n\x0fwelcome_message\x18\x01 \x01(\t\x1a\xca\x02\n\x0fTextFieldsModal\x12\x14\n\x0cinstructions\x18\x01 \x01(\t\x12G\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x37.truffle.os.AppInstallModal.TextFieldsModal.FieldsEntry\x1ar\n\tTextField\x12\r\n\x05label\x18\x01 \x01(\t\x12\x13\n\x0bplaceholder\x18\x02 \x01(\t\x12\x13\n\x0bis_password\x18\x03 \x01(\x08\x12\x1a\n\rdefault_value\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x10\n\x0e_default_value\x1a\x64\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x44\n\x05value\x18\x02 \x01(\x0b\x32\x35.truffle.os.AppInstallModal.TextFieldsModal.TextField:\x02\x38\x01\x1aR\n\x08VNCModal\x12\x14\n\x0cinstructions\x18\x01 \x01(\t\x12\x14\n\x0cvnc_uri_path\x18\x02 \x01(\t\x12\x1a\n\x12\x63loses_on_complete\x18\x03 \x01(\x08\x1a*\n\x0fUploadFileModal\x12\x17\n\x0fupload_uri_path\x18\x01 \x01(\t\x1a\x37\n\x0b\x46inishModal\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\x12\x16\n\x0e\x66inish_message\x18\x02 \x01(\tB\x07\n\x05modal\"(\n\x0f\x41ppInstallError\x12\x15\n\rerror_message\x18\x01 \x01(\t\",\n\x11\x41ppInstallLoading\x12\x17\n\x0floading_message\x18\x01 \x01(\t\"\xde\x01\n\x12\x41ppInstallMetadata\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12&\n\x08\x61pp_type\x18\x02 \x01(\x0e\x32\x14.truffle.app.AppType\x12\x42\n\x13\x66oreground_metadata\x18\x03 \x01(\x0b\x32#.truffle.app.ForegroundApp.MetadataH\x00\x12\x42\n\x13\x62\x61\x63kground_metadata\x18\x04 \x01(\x0b\x32#.truffle.app.BackgroundApp.MetadataH\x00\x42\n\n\x08metadata\"\xc1\x03\n\x14\x41ppInstallUserAction\x12;\n\x04next\x18\x01 \x01(\x0b\x32+.truffle.os.AppInstallUserAction.NextActionH\x00\x12N\n\x0btext_fields\x18\x02 \x01(\x0b\x32\x37.truffle.os.AppInstallUserAction.SubmitTextFieldsActionH\x00\x12=\n\x05\x61\x62ort\x18\x03 \x01(\x0b\x32,.truffle.os.AppInstallUserAction.AbortActionH\x00\x1a\x0c\n\nNextAction\x1a\xb5\x01\n\x16SubmitTextFieldsAction\x12\x64\n\x0f\x66ield_responses\x18\x01 \x03(\x0b\x32K.truffle.os.AppInstallUserAction.SubmitTextFieldsAction.FieldResponsesEntry\x1a\x35\n\x13\x46ieldResponsesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\r\n\x0b\x41\x62ortActionB\x08\n\x06\x61\x63tion\"\xde\x01\n\x11\x41ppInstallRequest\x12\x42\n\tstart_new\x18\x01 \x01(\x0b\x32-.truffle.os.AppInstallRequest.StartNewInstallH\x00\x12\x37\n\x0buser_action\x18\x03 \x01(\x0b\x32 .truffle.os.AppInstallUserActionH\x00\x1a?\n\x0fStartNewInstall\x12,\n\x06source\x18\x01 \x01(\x0b\x32\x1c.truffle.os.AppInstallSourceB\x0b\n\toperation\"\x83\x02\n\x12\x41ppInstallResponse\x12\x34\n\rinstall_modal\x18\x01 \x01(\x0b\x32\x1b.truffle.os.AppInstallModalH\x00\x12\x34\n\rinstall_error\x18\x02 \x01(\x0b\x32\x1b.truffle.os.AppInstallErrorH\x00\x12\x38\n\x0finstall_loading\x18\x03 \x01(\x0b\x32\x1d.truffle.os.AppInstallLoadingH\x00\x12:\n\x10install_metadata\x18\x04 \x01(\x0b\x32\x1e.truffle.os.AppInstallMetadataH\x00\x42\x0b\n\toperation*\xa3\x01\n\x14\x41ppInstallSourceType\x12\'\n#APP_INSTALL_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x1f\n\x1b\x41PP_INSTALL_SOURCE_TYPE_URL\x10\x01\x12 \n\x1c\x41PP_INSTALL_SOURCE_TYPE_FILE\x10\x02\x12\x1f\n\x1b\x41PP_INSTALL_SOURCE_TYPE_GIT\x10\x03\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/os/installer.proto\x12\ntruffle.os\x1a\x15truffle/app/app.proto\"\x87\x01\n\x10\x41ppInstallSource\x12\x35\n\x0bsource_type\x18\x01 \x01(\x0e\x32 .truffle.os.AppInstallSourceType\x12\x10\n\x03url\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08git_hash\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\x06\n\x04_urlB\x0b\n\t_git_hash\"\xd9\x08\n\x0f\x41ppInstallModal\x12\x12\n\nstep_index\x18\x01 \x01(\x05\x12\x11\n\tstep_name\x18\x02 \x01(\t\x12\x41\n\rwelcome_modal\x18\n \x01(\x0b\x32(.truffle.os.AppInstallModal.WelcomeModalH\x00\x12H\n\x11text_fields_modal\x18\x0b \x01(\x0b\x32+.truffle.os.AppInstallModal.TextFieldsModalH\x00\x12\x39\n\tvnc_modal\x18\x0c \x01(\x0b\x32$.truffle.os.AppInstallModal.VNCModalH\x00\x12?\n\x0c\x66inish_modal\x18\x0e \x01(\x0b\x32\'.truffle.os.AppInstallModal.FinishModalH\x00\x12H\n\x11upload_file_modal\x18\x0f \x01(\x0b\x32+.truffle.os.AppInstallModal.UploadFileModalH\x00\x12=\n\x0boauth_modal\x18\x10 \x01(\x0b\x32&.truffle.os.AppInstallModal.OAuthModalH\x00\x1a\'\n\x0cWelcomeModal\x12\x17\n\x0fwelcome_message\x18\x01 \x01(\t\x1a\xca\x02\n\x0fTextFieldsModal\x12\x14\n\x0cinstructions\x18\x01 \x01(\t\x12G\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x37.truffle.os.AppInstallModal.TextFieldsModal.FieldsEntry\x1ar\n\tTextField\x12\r\n\x05label\x18\x01 \x01(\t\x12\x13\n\x0bplaceholder\x18\x02 \x01(\t\x12\x13\n\x0bis_password\x18\x03 \x01(\x08\x12\x1a\n\rdefault_value\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x10\n\x0e_default_value\x1a\x64\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x44\n\x05value\x18\x02 \x01(\x0b\x32\x35.truffle.os.AppInstallModal.TextFieldsModal.TextField:\x02\x38\x01\x1aR\n\x08VNCModal\x12\x14\n\x0cinstructions\x18\x01 \x01(\t\x12\x14\n\x0cvnc_uri_path\x18\x02 \x01(\t\x12\x1a\n\x12\x63loses_on_complete\x18\x03 \x01(\x08\x1aU\n\nOAuthModal\x12\x14\n\x0cinstructions\x18\x01 \x01(\t\x12\x10\n\x08provider\x18\x02 \x01(\t\x12\x10\n\x08\x61uth_url\x18\x03 \x01(\t\x12\r\n\x05state\x18\x04 \x01(\t\x1a*\n\x0fUploadFileModal\x12\x17\n\x0fupload_uri_path\x18\x01 \x01(\t\x1a\x37\n\x0b\x46inishModal\x12\x10\n\x08\x61pp_uuid\x18\x01 \x01(\t\x12\x16\n\x0e\x66inish_message\x18\x02 \x01(\tB\x07\n\x05modal\"(\n\x0f\x41ppInstallError\x12\x15\n\rerror_message\x18\x01 \x01(\t\",\n\x11\x41ppInstallLoading\x12\x17\n\x0floading_message\x18\x01 \x01(\t\"\xb1\x01\n\x0e\x41ppInstallHint\x12\x34\n\x08ui_state\x18\x01 \x01(\x0e\x32\".truffle.os.AppInstallHint.UiState\"i\n\x07UiState\x12\x18\n\x14UI_STATE_UNSPECIFIED\x10\x00\x12#\n\x1fUI_STATE_USER_INTERACTION_READY\x10\x01\x12\x1f\n\x1bUI_STATE_MOVE_TO_BACKGROUND\x10\x02\"~\n\x12\x41ppInstallMetadata\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12*\n\x08metadata\x18\x02 \x01(\x0b\x32\x18.truffle.app.AppMetadata\x12\x16\n\x0ehas_foreground\x18\x03 \x01(\x08\x12\x16\n\x0ehas_background\x18\x04 \x01(\x08\"\xb8\x04\n\x14\x41ppInstallUserAction\x12;\n\x04next\x18\x01 \x01(\x0b\x32+.truffle.os.AppInstallUserAction.NextActionH\x00\x12N\n\x0btext_fields\x18\x02 \x01(\x0b\x32\x37.truffle.os.AppInstallUserAction.SubmitTextFieldsActionH\x00\x12=\n\x05\x61\x62ort\x18\x03 \x01(\x0b\x32,.truffle.os.AppInstallUserAction.AbortActionH\x00\x12\x43\n\x05oauth\x18\x04 \x01(\x0b\x32\x32.truffle.os.AppInstallUserAction.SubmitOAuthActionH\x00\x1a\x0c\n\nNextAction\x1a\xb5\x01\n\x16SubmitTextFieldsAction\x12\x64\n\x0f\x66ield_responses\x18\x01 \x03(\x0b\x32K.truffle.os.AppInstallUserAction.SubmitTextFieldsAction.FieldResponsesEntry\x1a\x35\n\x13\x46ieldResponsesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x30\n\x11SubmitOAuthAction\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\x12\r\n\x05state\x18\x02 \x01(\t\x1a\r\n\x0b\x41\x62ortActionB\x08\n\x06\x61\x63tion\"\xde\x01\n\x11\x41ppInstallRequest\x12\x42\n\tstart_new\x18\x01 \x01(\x0b\x32-.truffle.os.AppInstallRequest.StartNewInstallH\x00\x12\x37\n\x0buser_action\x18\x03 \x01(\x0b\x32 .truffle.os.AppInstallUserActionH\x00\x1a?\n\x0fStartNewInstall\x12,\n\x06source\x18\x01 \x01(\x0b\x32\x1c.truffle.os.AppInstallSourceB\x0b\n\toperation\"\xb7\x02\n\x12\x41ppInstallResponse\x12\x34\n\rinstall_modal\x18\x01 \x01(\x0b\x32\x1b.truffle.os.AppInstallModalH\x00\x12\x34\n\rinstall_error\x18\x02 \x01(\x0b\x32\x1b.truffle.os.AppInstallErrorH\x00\x12\x38\n\x0finstall_loading\x18\x03 \x01(\x0b\x32\x1d.truffle.os.AppInstallLoadingH\x00\x12:\n\x10install_metadata\x18\x04 \x01(\x0b\x32\x1e.truffle.os.AppInstallMetadataH\x00\x12\x32\n\x0cinstall_hint\x18\x05 \x01(\x0b\x32\x1a.truffle.os.AppInstallHintH\x00\x42\x0b\n\toperation*\xa3\x01\n\x14\x41ppInstallSourceType\x12\'\n#APP_INSTALL_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x1f\n\x1b\x41PP_INSTALL_SOURCE_TYPE_URL\x10\x01\x12 \n\x1c\x41PP_INSTALL_SOURCE_TYPE_FILE\x10\x02\x12\x1f\n\x1b\x41PP_INSTALL_SOURCE_TYPE_GIT\x10\x03P\x00\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,46 +37,54 @@ _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_FIELDSENTRY']._serialized_options = b'8\001' _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION_FIELDRESPONSESENTRY']._loaded_options = None _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION_FIELDRESPONSESENTRY']._serialized_options = b'8\001' - _globals['_APPINSTALLSOURCETYPE']._serialized_start=2487 - _globals['_APPINSTALLSOURCETYPE']._serialized_end=2650 - _globals['_APPINSTALLSOURCE']._serialized_start=131 - _globals['_APPINSTALLSOURCE']._serialized_end=266 - _globals['_APPINSTALLMODAL']._serialized_start=269 - _globals['_APPINSTALLMODAL']._serialized_end=1232 - _globals['_APPINSTALLMODAL_WELCOMEMODAL']._serialized_start=666 - _globals['_APPINSTALLMODAL_WELCOMEMODAL']._serialized_end=705 - _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL']._serialized_start=708 - _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL']._serialized_end=1038 - _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_TEXTFIELD']._serialized_start=822 - _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_TEXTFIELD']._serialized_end=936 - _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_FIELDSENTRY']._serialized_start=938 - _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_FIELDSENTRY']._serialized_end=1038 - _globals['_APPINSTALLMODAL_VNCMODAL']._serialized_start=1040 - _globals['_APPINSTALLMODAL_VNCMODAL']._serialized_end=1122 - _globals['_APPINSTALLMODAL_UPLOADFILEMODAL']._serialized_start=1124 - _globals['_APPINSTALLMODAL_UPLOADFILEMODAL']._serialized_end=1166 - _globals['_APPINSTALLMODAL_FINISHMODAL']._serialized_start=1168 - _globals['_APPINSTALLMODAL_FINISHMODAL']._serialized_end=1223 - _globals['_APPINSTALLERROR']._serialized_start=1234 - _globals['_APPINSTALLERROR']._serialized_end=1274 - _globals['_APPINSTALLLOADING']._serialized_start=1276 - _globals['_APPINSTALLLOADING']._serialized_end=1320 - _globals['_APPINSTALLMETADATA']._serialized_start=1323 - _globals['_APPINSTALLMETADATA']._serialized_end=1545 - _globals['_APPINSTALLUSERACTION']._serialized_start=1548 - _globals['_APPINSTALLUSERACTION']._serialized_end=1997 - _globals['_APPINSTALLUSERACTION_NEXTACTION']._serialized_start=1776 - _globals['_APPINSTALLUSERACTION_NEXTACTION']._serialized_end=1788 - _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION']._serialized_start=1791 - _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION']._serialized_end=1972 - _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION_FIELDRESPONSESENTRY']._serialized_start=1919 - _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION_FIELDRESPONSESENTRY']._serialized_end=1972 - _globals['_APPINSTALLUSERACTION_ABORTACTION']._serialized_start=1974 - _globals['_APPINSTALLUSERACTION_ABORTACTION']._serialized_end=1987 - _globals['_APPINSTALLREQUEST']._serialized_start=2000 - _globals['_APPINSTALLREQUEST']._serialized_end=2222 - _globals['_APPINSTALLREQUEST_STARTNEWINSTALL']._serialized_start=2146 - _globals['_APPINSTALLREQUEST_STARTNEWINSTALL']._serialized_end=2209 - _globals['_APPINSTALLRESPONSE']._serialized_start=2225 - _globals['_APPINSTALLRESPONSE']._serialized_end=2484 + _globals['_APPINSTALLSOURCETYPE']._serialized_start=2826 + _globals['_APPINSTALLSOURCETYPE']._serialized_end=2989 + _globals['_APPINSTALLSOURCE']._serialized_start=66 + _globals['_APPINSTALLSOURCE']._serialized_end=201 + _globals['_APPINSTALLMODAL']._serialized_start=204 + _globals['_APPINSTALLMODAL']._serialized_end=1317 + _globals['_APPINSTALLMODAL_WELCOMEMODAL']._serialized_start=664 + _globals['_APPINSTALLMODAL_WELCOMEMODAL']._serialized_end=703 + _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL']._serialized_start=706 + _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL']._serialized_end=1036 + _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_TEXTFIELD']._serialized_start=820 + _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_TEXTFIELD']._serialized_end=934 + _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_FIELDSENTRY']._serialized_start=936 + _globals['_APPINSTALLMODAL_TEXTFIELDSMODAL_FIELDSENTRY']._serialized_end=1036 + _globals['_APPINSTALLMODAL_VNCMODAL']._serialized_start=1038 + _globals['_APPINSTALLMODAL_VNCMODAL']._serialized_end=1120 + _globals['_APPINSTALLMODAL_OAUTHMODAL']._serialized_start=1122 + _globals['_APPINSTALLMODAL_OAUTHMODAL']._serialized_end=1207 + _globals['_APPINSTALLMODAL_UPLOADFILEMODAL']._serialized_start=1209 + _globals['_APPINSTALLMODAL_UPLOADFILEMODAL']._serialized_end=1251 + _globals['_APPINSTALLMODAL_FINISHMODAL']._serialized_start=1253 + _globals['_APPINSTALLMODAL_FINISHMODAL']._serialized_end=1308 + _globals['_APPINSTALLERROR']._serialized_start=1319 + _globals['_APPINSTALLERROR']._serialized_end=1359 + _globals['_APPINSTALLLOADING']._serialized_start=1361 + _globals['_APPINSTALLLOADING']._serialized_end=1405 + _globals['_APPINSTALLHINT']._serialized_start=1408 + _globals['_APPINSTALLHINT']._serialized_end=1585 + _globals['_APPINSTALLHINT_UISTATE']._serialized_start=1480 + _globals['_APPINSTALLHINT_UISTATE']._serialized_end=1585 + _globals['_APPINSTALLMETADATA']._serialized_start=1587 + _globals['_APPINSTALLMETADATA']._serialized_end=1713 + _globals['_APPINSTALLUSERACTION']._serialized_start=1716 + _globals['_APPINSTALLUSERACTION']._serialized_end=2284 + _globals['_APPINSTALLUSERACTION_NEXTACTION']._serialized_start=2013 + _globals['_APPINSTALLUSERACTION_NEXTACTION']._serialized_end=2025 + _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION']._serialized_start=2028 + _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION']._serialized_end=2209 + _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION_FIELDRESPONSESENTRY']._serialized_start=2156 + _globals['_APPINSTALLUSERACTION_SUBMITTEXTFIELDSACTION_FIELDRESPONSESENTRY']._serialized_end=2209 + _globals['_APPINSTALLUSERACTION_SUBMITOAUTHACTION']._serialized_start=2211 + _globals['_APPINSTALLUSERACTION_SUBMITOAUTHACTION']._serialized_end=2259 + _globals['_APPINSTALLUSERACTION_ABORTACTION']._serialized_start=2261 + _globals['_APPINSTALLUSERACTION_ABORTACTION']._serialized_end=2274 + _globals['_APPINSTALLREQUEST']._serialized_start=2287 + _globals['_APPINSTALLREQUEST']._serialized_end=2509 + _globals['_APPINSTALLREQUEST_STARTNEWINSTALL']._serialized_start=2433 + _globals['_APPINSTALLREQUEST_STARTNEWINSTALL']._serialized_end=2496 + _globals['_APPINSTALLRESPONSE']._serialized_start=2512 + _globals['_APPINSTALLRESPONSE']._serialized_end=2823 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/installer_pb2.pyi b/truffle/os/installer_pb2.pyi index 78b5080..21ac0b4 100644 --- a/truffle/os/installer_pb2.pyi +++ b/truffle/os/installer_pb2.pyi @@ -1,12 +1,14 @@ -from truffle.app import foreground_pb2 as _foreground_pb2 -from truffle.app import background_pb2 as _background_pb2 -from truffle.app import app_type_pb2 as _app_type_pb2 +from truffle.app import app_pb2 as _app_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from collections.abc import Mapping as _Mapping from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union +from truffle.app.app_pb2 import AppMetadata as AppMetadata +from truffle.app.app_pb2 import AppConfig as AppConfig +from truffle.app.app_pb2 import App as App +from truffle.app.app_pb2 import AppError as AppError DESCRIPTOR: _descriptor.FileDescriptor @@ -32,7 +34,7 @@ class AppInstallSource(_message.Message): def __init__(self, source_type: _Optional[_Union[AppInstallSourceType, str]] = ..., url: _Optional[str] = ..., git_hash: _Optional[str] = ...) -> None: ... class AppInstallModal(_message.Message): - __slots__ = ("step_index", "step_name", "welcome_modal", "text_fields_modal", "vnc_modal", "finish_modal", "upload_file_modal") + __slots__ = ("step_index", "step_name", "welcome_modal", "text_fields_modal", "vnc_modal", "finish_modal", "upload_file_modal", "oauth_modal") class WelcomeModal(_message.Message): __slots__ = ("welcome_message",) WELCOME_MESSAGE_FIELD_NUMBER: _ClassVar[int] @@ -72,6 +74,17 @@ class AppInstallModal(_message.Message): vnc_uri_path: str closes_on_complete: bool def __init__(self, instructions: _Optional[str] = ..., vnc_uri_path: _Optional[str] = ..., closes_on_complete: bool = ...) -> None: ... + class OAuthModal(_message.Message): + __slots__ = ("instructions", "provider", "auth_url", "state") + INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] + PROVIDER_FIELD_NUMBER: _ClassVar[int] + AUTH_URL_FIELD_NUMBER: _ClassVar[int] + STATE_FIELD_NUMBER: _ClassVar[int] + instructions: str + provider: str + auth_url: str + state: str + def __init__(self, instructions: _Optional[str] = ..., provider: _Optional[str] = ..., auth_url: _Optional[str] = ..., state: _Optional[str] = ...) -> None: ... class UploadFileModal(_message.Message): __slots__ = ("upload_uri_path",) UPLOAD_URI_PATH_FIELD_NUMBER: _ClassVar[int] @@ -91,6 +104,7 @@ class AppInstallModal(_message.Message): VNC_MODAL_FIELD_NUMBER: _ClassVar[int] FINISH_MODAL_FIELD_NUMBER: _ClassVar[int] UPLOAD_FILE_MODAL_FIELD_NUMBER: _ClassVar[int] + OAUTH_MODAL_FIELD_NUMBER: _ClassVar[int] step_index: int step_name: str welcome_modal: AppInstallModal.WelcomeModal @@ -98,7 +112,8 @@ class AppInstallModal(_message.Message): vnc_modal: AppInstallModal.VNCModal finish_modal: AppInstallModal.FinishModal upload_file_modal: AppInstallModal.UploadFileModal - def __init__(self, step_index: _Optional[int] = ..., step_name: _Optional[str] = ..., welcome_modal: _Optional[_Union[AppInstallModal.WelcomeModal, _Mapping]] = ..., text_fields_modal: _Optional[_Union[AppInstallModal.TextFieldsModal, _Mapping]] = ..., vnc_modal: _Optional[_Union[AppInstallModal.VNCModal, _Mapping]] = ..., finish_modal: _Optional[_Union[AppInstallModal.FinishModal, _Mapping]] = ..., upload_file_modal: _Optional[_Union[AppInstallModal.UploadFileModal, _Mapping]] = ...) -> None: ... + oauth_modal: AppInstallModal.OAuthModal + def __init__(self, step_index: _Optional[int] = ..., step_name: _Optional[str] = ..., welcome_modal: _Optional[_Union[AppInstallModal.WelcomeModal, _Mapping]] = ..., text_fields_modal: _Optional[_Union[AppInstallModal.TextFieldsModal, _Mapping]] = ..., vnc_modal: _Optional[_Union[AppInstallModal.VNCModal, _Mapping]] = ..., finish_modal: _Optional[_Union[AppInstallModal.FinishModal, _Mapping]] = ..., upload_file_modal: _Optional[_Union[AppInstallModal.UploadFileModal, _Mapping]] = ..., oauth_modal: _Optional[_Union[AppInstallModal.OAuthModal, _Mapping]] = ...) -> None: ... class AppInstallError(_message.Message): __slots__ = ("error_message",) @@ -112,20 +127,34 @@ class AppInstallLoading(_message.Message): loading_message: str def __init__(self, loading_message: _Optional[str] = ...) -> None: ... +class AppInstallHint(_message.Message): + __slots__ = ("ui_state",) + class UiState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + UI_STATE_UNSPECIFIED: _ClassVar[AppInstallHint.UiState] + UI_STATE_USER_INTERACTION_READY: _ClassVar[AppInstallHint.UiState] + UI_STATE_MOVE_TO_BACKGROUND: _ClassVar[AppInstallHint.UiState] + UI_STATE_UNSPECIFIED: AppInstallHint.UiState + UI_STATE_USER_INTERACTION_READY: AppInstallHint.UiState + UI_STATE_MOVE_TO_BACKGROUND: AppInstallHint.UiState + UI_STATE_FIELD_NUMBER: _ClassVar[int] + ui_state: AppInstallHint.UiState + def __init__(self, ui_state: _Optional[_Union[AppInstallHint.UiState, str]] = ...) -> None: ... + class AppInstallMetadata(_message.Message): - __slots__ = ("uuid", "app_type", "foreground_metadata", "background_metadata") + __slots__ = ("uuid", "metadata", "has_foreground", "has_background") UUID_FIELD_NUMBER: _ClassVar[int] - APP_TYPE_FIELD_NUMBER: _ClassVar[int] - FOREGROUND_METADATA_FIELD_NUMBER: _ClassVar[int] - BACKGROUND_METADATA_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + HAS_FOREGROUND_FIELD_NUMBER: _ClassVar[int] + HAS_BACKGROUND_FIELD_NUMBER: _ClassVar[int] uuid: str - app_type: _app_type_pb2.AppType - foreground_metadata: _foreground_pb2.ForegroundApp.Metadata - background_metadata: _background_pb2.BackgroundApp.Metadata - def __init__(self, uuid: _Optional[str] = ..., app_type: _Optional[_Union[_app_type_pb2.AppType, str]] = ..., foreground_metadata: _Optional[_Union[_foreground_pb2.ForegroundApp.Metadata, _Mapping]] = ..., background_metadata: _Optional[_Union[_background_pb2.BackgroundApp.Metadata, _Mapping]] = ...) -> None: ... + metadata: _app_pb2.AppMetadata + has_foreground: bool + has_background: bool + def __init__(self, uuid: _Optional[str] = ..., metadata: _Optional[_Union[_app_pb2.AppMetadata, _Mapping]] = ..., has_foreground: bool = ..., has_background: bool = ...) -> None: ... class AppInstallUserAction(_message.Message): - __slots__ = ("next", "text_fields", "abort") + __slots__ = ("next", "text_fields", "abort", "oauth") class NextAction(_message.Message): __slots__ = () def __init__(self) -> None: ... @@ -141,16 +170,25 @@ class AppInstallUserAction(_message.Message): FIELD_RESPONSES_FIELD_NUMBER: _ClassVar[int] field_responses: _containers.ScalarMap[str, str] def __init__(self, field_responses: _Optional[_Mapping[str, str]] = ...) -> None: ... + class SubmitOAuthAction(_message.Message): + __slots__ = ("code", "state") + CODE_FIELD_NUMBER: _ClassVar[int] + STATE_FIELD_NUMBER: _ClassVar[int] + code: str + state: str + def __init__(self, code: _Optional[str] = ..., state: _Optional[str] = ...) -> None: ... class AbortAction(_message.Message): __slots__ = () def __init__(self) -> None: ... NEXT_FIELD_NUMBER: _ClassVar[int] TEXT_FIELDS_FIELD_NUMBER: _ClassVar[int] ABORT_FIELD_NUMBER: _ClassVar[int] + OAUTH_FIELD_NUMBER: _ClassVar[int] next: AppInstallUserAction.NextAction text_fields: AppInstallUserAction.SubmitTextFieldsAction abort: AppInstallUserAction.AbortAction - def __init__(self, next: _Optional[_Union[AppInstallUserAction.NextAction, _Mapping]] = ..., text_fields: _Optional[_Union[AppInstallUserAction.SubmitTextFieldsAction, _Mapping]] = ..., abort: _Optional[_Union[AppInstallUserAction.AbortAction, _Mapping]] = ...) -> None: ... + oauth: AppInstallUserAction.SubmitOAuthAction + def __init__(self, next: _Optional[_Union[AppInstallUserAction.NextAction, _Mapping]] = ..., text_fields: _Optional[_Union[AppInstallUserAction.SubmitTextFieldsAction, _Mapping]] = ..., abort: _Optional[_Union[AppInstallUserAction.AbortAction, _Mapping]] = ..., oauth: _Optional[_Union[AppInstallUserAction.SubmitOAuthAction, _Mapping]] = ...) -> None: ... class AppInstallRequest(_message.Message): __slots__ = ("start_new", "user_action") @@ -166,13 +204,15 @@ class AppInstallRequest(_message.Message): def __init__(self, start_new: _Optional[_Union[AppInstallRequest.StartNewInstall, _Mapping]] = ..., user_action: _Optional[_Union[AppInstallUserAction, _Mapping]] = ...) -> None: ... class AppInstallResponse(_message.Message): - __slots__ = ("install_modal", "install_error", "install_loading", "install_metadata") + __slots__ = ("install_modal", "install_error", "install_loading", "install_metadata", "install_hint") INSTALL_MODAL_FIELD_NUMBER: _ClassVar[int] INSTALL_ERROR_FIELD_NUMBER: _ClassVar[int] INSTALL_LOADING_FIELD_NUMBER: _ClassVar[int] INSTALL_METADATA_FIELD_NUMBER: _ClassVar[int] + INSTALL_HINT_FIELD_NUMBER: _ClassVar[int] install_modal: AppInstallModal install_error: AppInstallError install_loading: AppInstallLoading install_metadata: AppInstallMetadata - def __init__(self, install_modal: _Optional[_Union[AppInstallModal, _Mapping]] = ..., install_error: _Optional[_Union[AppInstallError, _Mapping]] = ..., install_loading: _Optional[_Union[AppInstallLoading, _Mapping]] = ..., install_metadata: _Optional[_Union[AppInstallMetadata, _Mapping]] = ...) -> None: ... + install_hint: AppInstallHint + def __init__(self, install_modal: _Optional[_Union[AppInstallModal, _Mapping]] = ..., install_error: _Optional[_Union[AppInstallError, _Mapping]] = ..., install_loading: _Optional[_Union[AppInstallLoading, _Mapping]] = ..., install_metadata: _Optional[_Union[AppInstallMetadata, _Mapping]] = ..., install_hint: _Optional[_Union[AppInstallHint, _Mapping]] = ...) -> None: ... diff --git a/truffle/os/installer_pb2_grpc.py b/truffle/os/installer_pb2_grpc.py index c1eb7ce..9e4d9a2 100644 --- a/truffle/os/installer_pb2_grpc.py +++ b/truffle/os/installer_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/installer_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/installer_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/notification_pb2.py b/truffle/os/notification_pb2.py index c66af35..198341b 100644 --- a/truffle/os/notification_pb2.py +++ b/truffle/os/notification_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/notification.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/notification.proto' ) @@ -26,20 +26,20 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from truffle.os import hardware_stats_pb2 as truffle_dot_os_dot_hardware__stats__pb2 from truffle.os import client_session_pb2 as truffle_dot_os_dot_client__session__pb2 -from truffle.app import background_pb2 as truffle_dot_app_dot_background__pb2 +from truffle.os import background_feed_pb2 as truffle_dot_os_dot_background__feed__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/os/notification.proto\x12\ntruffle.os\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1ftruffle/os/hardware_stats.proto\x1a\x1ftruffle/os/client_session.proto\x1a\x1ctruffle/app/background.proto\"!\n\x1fSubscribeToNotificationsRequest\"\xa6\x05\n\x0cNotification\x12\x37\n\x04type\x18\x01 \x01(\x0e\x32).truffle.os.Notification.NotificationType\x12\x15\n\rassociated_id\x18\x02 \x01(\t\x12&\n\x04none\x18\x03 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x12*\n\x07payload\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x46\n\x18new_session_verification\x18\x07 \x01(\x0b\x32\".truffle.os.NewSessionVerificationH\x00\x12M\n\x1b\x62\x61\x63kground_app_notification\x18\n \x01(\x0b\x32&.truffle.app.BackgroundAppNotificationH\x00\x12\x10\n\x08is_error\x18\x08 \x01(\x08\"\xc0\x02\n\x10NotificationType\x12\x1d\n\x19NOTIFICATION_TYPE_INVALID\x10\x00\x12\x12\n\x0e\x42G_FEED_UPDATE\x10\x01\x12\x13\n\x0fTASK_HAS_RESULT\x10\x02\x12\x1e\n\x1a\x42G_FEED_FEEDBACK_PROCESSED\x10\x03\x12\x15\n\x11\x42G_APP_LIST_DIRTY\x10\x0e\x12\x15\n\x11\x46G_APP_LIST_DIRTY\x10\x0f\x12\x13\n\x0fTASK_LIST_DIRTY\x10\x10\x12\x11\n\rSESSION_READY\x10\x14\x12 \n\x1cSESSION_VERIFICATION_REQUEST\x10\x15\x12\x11\n\rSESSION_ADDED\x10\x16\x12\x12\n\x0eSESSION_DENIED\x10\x17\x12\x12\n\x0eSERVER_CLOSING\x10\x1f\x12\x11\n\rDISPLAY_TOAST\x10 B\x06\n\x04\x64\x61tab\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dtruffle/os/notification.proto\x12\ntruffle.os\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1ftruffle/os/hardware_stats.proto\x1a\x1ftruffle/os/client_session.proto\x1a truffle/os/background_feed.proto\"!\n\x1fSubscribeToNotificationsRequest\"\xb7\x04\n\x0cNotification\x12\x37\n\x04type\x18\x01 \x01(\x0e\x32).truffle.os.Notification.NotificationType\x12\x15\n\rassociated_id\x18\x02 \x01(\t\x12&\n\x04none\x18\x03 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x12\x46\n\x18new_session_verification\x18\x07 \x01(\x0b\x32\".truffle.os.NewSessionVerificationH\x00\x12\x44\n\x17\x66\x65\x65\x64_entry_notification\x18\n \x01(\x0b\x32!.truffle.os.FeedEntryNotificationH\x00\x12\x10\n\x08is_error\x18\x08 \x01(\x08\"\x86\x02\n\x10NotificationType\x12\x1d\n\x19NOTIFICATION_TYPE_INVALID\x10\x00\x12\x12\n\x0e\x42G_FEED_UPDATE\x10\x01\x12\x13\n\x0fTASK_HAS_RESULT\x10\x02\x12\x12\n\x0e\x41PP_LIST_DIRTY\x10\x0e\x12\x13\n\x0fTASK_LIST_DIRTY\x10\x10\x12\x11\n\rSESSION_READY\x10\x14\x12 \n\x1cSESSION_VERIFICATION_REQUEST\x10\x15\x12\x11\n\rSESSION_ADDED\x10\x16\x12\x12\n\x0eSESSION_DENIED\x10\x17\x12\x12\n\x0eSERVER_CLOSING\x10\x1f\x12\x11\n\rDISPLAY_TOAST\x10 B\x06\n\x04\x64\x61tab\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.notification_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_SUBSCRIBETONOTIFICATIONSREQUEST']._serialized_start=200 - _globals['_SUBSCRIBETONOTIFICATIONSREQUEST']._serialized_end=233 - _globals['_NOTIFICATION']._serialized_start=236 - _globals['_NOTIFICATION']._serialized_end=914 - _globals['_NOTIFICATION_NOTIFICATIONTYPE']._serialized_start=586 - _globals['_NOTIFICATION_NOTIFICATIONTYPE']._serialized_end=906 + _globals['_SUBSCRIBETONOTIFICATIONSREQUEST']._serialized_start=204 + _globals['_SUBSCRIBETONOTIFICATIONSREQUEST']._serialized_end=237 + _globals['_NOTIFICATION']._serialized_start=240 + _globals['_NOTIFICATION']._serialized_end=807 + _globals['_NOTIFICATION_NOTIFICATIONTYPE']._serialized_start=537 + _globals['_NOTIFICATION_NOTIFICATIONTYPE']._serialized_end=799 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/notification_pb2.pyi b/truffle/os/notification_pb2.pyi index 1a521b0..dee6108 100644 --- a/truffle/os/notification_pb2.pyi +++ b/truffle/os/notification_pb2.pyi @@ -2,7 +2,7 @@ from google.protobuf import struct_pb2 as _struct_pb2 from google.protobuf import empty_pb2 as _empty_pb2 from truffle.os import hardware_stats_pb2 as _hardware_stats_pb2 from truffle.os import client_session_pb2 as _client_session_pb2 -from truffle.app import background_pb2 as _background_pb2 +from truffle.os import background_feed_pb2 as _background_feed_pb2 from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -16,15 +16,13 @@ class SubscribeToNotificationsRequest(_message.Message): def __init__(self) -> None: ... class Notification(_message.Message): - __slots__ = ("type", "associated_id", "none", "payload", "new_session_verification", "background_app_notification", "is_error") + __slots__ = ("type", "associated_id", "none", "new_session_verification", "feed_entry_notification", "is_error") class NotificationType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () NOTIFICATION_TYPE_INVALID: _ClassVar[Notification.NotificationType] BG_FEED_UPDATE: _ClassVar[Notification.NotificationType] TASK_HAS_RESULT: _ClassVar[Notification.NotificationType] - BG_FEED_FEEDBACK_PROCESSED: _ClassVar[Notification.NotificationType] - BG_APP_LIST_DIRTY: _ClassVar[Notification.NotificationType] - FG_APP_LIST_DIRTY: _ClassVar[Notification.NotificationType] + APP_LIST_DIRTY: _ClassVar[Notification.NotificationType] TASK_LIST_DIRTY: _ClassVar[Notification.NotificationType] SESSION_READY: _ClassVar[Notification.NotificationType] SESSION_VERIFICATION_REQUEST: _ClassVar[Notification.NotificationType] @@ -35,9 +33,7 @@ class Notification(_message.Message): NOTIFICATION_TYPE_INVALID: Notification.NotificationType BG_FEED_UPDATE: Notification.NotificationType TASK_HAS_RESULT: Notification.NotificationType - BG_FEED_FEEDBACK_PROCESSED: Notification.NotificationType - BG_APP_LIST_DIRTY: Notification.NotificationType - FG_APP_LIST_DIRTY: Notification.NotificationType + APP_LIST_DIRTY: Notification.NotificationType TASK_LIST_DIRTY: Notification.NotificationType SESSION_READY: Notification.NotificationType SESSION_VERIFICATION_REQUEST: Notification.NotificationType @@ -48,15 +44,13 @@ class Notification(_message.Message): TYPE_FIELD_NUMBER: _ClassVar[int] ASSOCIATED_ID_FIELD_NUMBER: _ClassVar[int] NONE_FIELD_NUMBER: _ClassVar[int] - PAYLOAD_FIELD_NUMBER: _ClassVar[int] NEW_SESSION_VERIFICATION_FIELD_NUMBER: _ClassVar[int] - BACKGROUND_APP_NOTIFICATION_FIELD_NUMBER: _ClassVar[int] + FEED_ENTRY_NOTIFICATION_FIELD_NUMBER: _ClassVar[int] IS_ERROR_FIELD_NUMBER: _ClassVar[int] type: Notification.NotificationType associated_id: str none: _empty_pb2.Empty - payload: _struct_pb2.Struct new_session_verification: _client_session_pb2.NewSessionVerification - background_app_notification: _background_pb2.BackgroundAppNotification + feed_entry_notification: _background_feed_pb2.FeedEntryNotification is_error: bool - def __init__(self, type: _Optional[_Union[Notification.NotificationType, str]] = ..., associated_id: _Optional[str] = ..., none: _Optional[_Union[_empty_pb2.Empty, _Mapping]] = ..., payload: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., new_session_verification: _Optional[_Union[_client_session_pb2.NewSessionVerification, _Mapping]] = ..., background_app_notification: _Optional[_Union[_background_pb2.BackgroundAppNotification, _Mapping]] = ..., is_error: bool = ...) -> None: ... + def __init__(self, type: _Optional[_Union[Notification.NotificationType, str]] = ..., associated_id: _Optional[str] = ..., none: _Optional[_Union[_empty_pb2.Empty, _Mapping]] = ..., new_session_verification: _Optional[_Union[_client_session_pb2.NewSessionVerification, _Mapping]] = ..., feed_entry_notification: _Optional[_Union[_background_feed_pb2.FeedEntryNotification, _Mapping]] = ..., is_error: bool = ...) -> None: ... diff --git a/truffle/os/notification_pb2_grpc.py b/truffle/os/notification_pb2_grpc.py index 1b8bc2c..504a3bc 100644 --- a/truffle/os/notification_pb2_grpc.py +++ b/truffle/os/notification_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/notification_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/notification_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/proactivity_pb2.py b/truffle/os/proactivity_pb2.py new file mode 100644 index 0000000..b1c9d3b --- /dev/null +++ b/truffle/os/proactivity_pb2.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: truffle/os/proactivity.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'truffle/os/proactivity.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ctruffle/os/proactivity.proto\x12\ntruffle.os\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe9\x04\n\x0fProactiveAction\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12:\n\nactionable\x18\x04 \x01(\x0b\x32&.truffle.os.ProactiveAction.Actionable\x12\x32\n\x06status\x18\x05 \x01(\x0e\x32\".truffle.os.ProactiveAction.Status\x12.\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\tapp_uuids\x18\x08 \x03(\t\x12\x1b\n\x13prompt_for_subagent\x18\t \x01(\t\x1a\x9c\x01\n\nActionable\x12J\n\x0c\x62oolean_text\x18\x01 \x01(\x0b\x32\x32.truffle.os.ProactiveAction.Actionable.BooleanTextH\x00\x1a:\n\x0b\x42ooleanText\x12\x0f\n\x07\x61pprove\x18\x01 \x01(\x08\x12\x11\n\x04text\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_textB\x06\n\x04type\"\x92\x01\n\x06Status\x12\x18\n\x14\x41\x43TION_STATE_INVALID\x10\x00\x12\x18\n\x14\x41\x43TION_STATE_PENDING\x10\x01\x12\x1c\n\x18\x41\x43TION_STATE_IN_PROGRESS\x10\x02\x12\x1a\n\x16\x41\x43TION_STATE_CANCELLED\x10\x03\x12\x1a\n\x16\x41\x43TION_STATE_COMPLETED\x10\x04\"n\n\x1d\x41pproveProactiveActionRequest\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\x04\x12;\n\x0buser_action\x18\x02 \x01(\x0b\x32&.truffle.os.ProactiveAction.Actionable\"U\n\x1e\x41pproveProactiveActionResponse\x12\x33\n\x0eupdated_action\x18\x01 \x01(\x0b\x32\x1b.truffle.os.ProactiveAction\"0\n\x1c\x43\x61ncelProactiveActionRequest\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\x04\"\x1f\n\x1d\x43\x61ncelProactiveActionResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.proactivity_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_PROACTIVEACTION']._serialized_start=78 + _globals['_PROACTIVEACTION']._serialized_end=695 + _globals['_PROACTIVEACTION_ACTIONABLE']._serialized_start=390 + _globals['_PROACTIVEACTION_ACTIONABLE']._serialized_end=546 + _globals['_PROACTIVEACTION_ACTIONABLE_BOOLEANTEXT']._serialized_start=480 + _globals['_PROACTIVEACTION_ACTIONABLE_BOOLEANTEXT']._serialized_end=538 + _globals['_PROACTIVEACTION_STATUS']._serialized_start=549 + _globals['_PROACTIVEACTION_STATUS']._serialized_end=695 + _globals['_APPROVEPROACTIVEACTIONREQUEST']._serialized_start=697 + _globals['_APPROVEPROACTIVEACTIONREQUEST']._serialized_end=807 + _globals['_APPROVEPROACTIVEACTIONRESPONSE']._serialized_start=809 + _globals['_APPROVEPROACTIVEACTIONRESPONSE']._serialized_end=894 + _globals['_CANCELPROACTIVEACTIONREQUEST']._serialized_start=896 + _globals['_CANCELPROACTIVEACTIONREQUEST']._serialized_end=944 + _globals['_CANCELPROACTIVEACTIONRESPONSE']._serialized_start=946 + _globals['_CANCELPROACTIVEACTIONRESPONSE']._serialized_end=977 +# @@protoc_insertion_point(module_scope) diff --git a/truffle/os/proactivity_pb2.pyi b/truffle/os/proactivity_pb2.pyi new file mode 100644 index 0000000..c2e952e --- /dev/null +++ b/truffle/os/proactivity_pb2.pyi @@ -0,0 +1,79 @@ +import datetime + +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class ProactiveAction(_message.Message): + __slots__ = ("title", "description", "actionable", "status", "created_at", "updated_at", "app_uuids", "prompt_for_subagent") + class Status(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ACTION_STATE_INVALID: _ClassVar[ProactiveAction.Status] + ACTION_STATE_PENDING: _ClassVar[ProactiveAction.Status] + ACTION_STATE_IN_PROGRESS: _ClassVar[ProactiveAction.Status] + ACTION_STATE_CANCELLED: _ClassVar[ProactiveAction.Status] + ACTION_STATE_COMPLETED: _ClassVar[ProactiveAction.Status] + ACTION_STATE_INVALID: ProactiveAction.Status + ACTION_STATE_PENDING: ProactiveAction.Status + ACTION_STATE_IN_PROGRESS: ProactiveAction.Status + ACTION_STATE_CANCELLED: ProactiveAction.Status + ACTION_STATE_COMPLETED: ProactiveAction.Status + class Actionable(_message.Message): + __slots__ = ("boolean_text",) + class BooleanText(_message.Message): + __slots__ = ("approve", "text") + APPROVE_FIELD_NUMBER: _ClassVar[int] + TEXT_FIELD_NUMBER: _ClassVar[int] + approve: bool + text: str + def __init__(self, approve: bool = ..., text: _Optional[str] = ...) -> None: ... + BOOLEAN_TEXT_FIELD_NUMBER: _ClassVar[int] + boolean_text: ProactiveAction.Actionable.BooleanText + def __init__(self, boolean_text: _Optional[_Union[ProactiveAction.Actionable.BooleanText, _Mapping]] = ...) -> None: ... + TITLE_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + ACTIONABLE_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + CREATED_AT_FIELD_NUMBER: _ClassVar[int] + UPDATED_AT_FIELD_NUMBER: _ClassVar[int] + APP_UUIDS_FIELD_NUMBER: _ClassVar[int] + PROMPT_FOR_SUBAGENT_FIELD_NUMBER: _ClassVar[int] + title: str + description: str + actionable: ProactiveAction.Actionable + status: ProactiveAction.Status + created_at: _timestamp_pb2.Timestamp + updated_at: _timestamp_pb2.Timestamp + app_uuids: _containers.RepeatedScalarFieldContainer[str] + prompt_for_subagent: str + def __init__(self, title: _Optional[str] = ..., description: _Optional[str] = ..., actionable: _Optional[_Union[ProactiveAction.Actionable, _Mapping]] = ..., status: _Optional[_Union[ProactiveAction.Status, str]] = ..., created_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., updated_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., app_uuids: _Optional[_Iterable[str]] = ..., prompt_for_subagent: _Optional[str] = ...) -> None: ... + +class ApproveProactiveActionRequest(_message.Message): + __slots__ = ("entry_id", "user_action") + ENTRY_ID_FIELD_NUMBER: _ClassVar[int] + USER_ACTION_FIELD_NUMBER: _ClassVar[int] + entry_id: int + user_action: ProactiveAction.Actionable + def __init__(self, entry_id: _Optional[int] = ..., user_action: _Optional[_Union[ProactiveAction.Actionable, _Mapping]] = ...) -> None: ... + +class ApproveProactiveActionResponse(_message.Message): + __slots__ = ("updated_action",) + UPDATED_ACTION_FIELD_NUMBER: _ClassVar[int] + updated_action: ProactiveAction + def __init__(self, updated_action: _Optional[_Union[ProactiveAction, _Mapping]] = ...) -> None: ... + +class CancelProactiveActionRequest(_message.Message): + __slots__ = ("entry_id",) + ENTRY_ID_FIELD_NUMBER: _ClassVar[int] + entry_id: int + def __init__(self, entry_id: _Optional[int] = ...) -> None: ... + +class CancelProactiveActionResponse(_message.Message): + __slots__ = () + def __init__(self) -> None: ... diff --git a/truffle/infer/gencfg_pb2_grpc.py b/truffle/os/proactivity_pb2_grpc.py similarity index 86% rename from truffle/infer/gencfg_pb2_grpc.py rename to truffle/os/proactivity_pb2_grpc.py index 15da2a6..ebb049d 100644 --- a/truffle/infer/gencfg_pb2_grpc.py +++ b/truffle/os/proactivity_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/infer/gencfg_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/proactivity_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/system_info_pb2.py b/truffle/os/system_info_pb2.py index 2968f0c..7a91170 100644 --- a/truffle/os/system_info_pb2.py +++ b/truffle/os/system_info_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/system_info.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/system_info.proto' ) diff --git a/truffle/os/system_info_pb2_grpc.py b/truffle/os/system_info_pb2_grpc.py index b4a31ff..1ff7692 100644 --- a/truffle/os/system_info_pb2_grpc.py +++ b/truffle/os/system_info_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/system_info_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/system_info_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/system_settings_pb2.py b/truffle/os/system_settings_pb2.py index 93de2d3..2f372f5 100644 --- a/truffle/os/system_settings_pb2.py +++ b/truffle/os/system_settings_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/system_settings.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/system_settings.proto' ) @@ -26,7 +26,7 @@ from truffle.os.hardware_settings_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n truffle/os/system_settings.proto\x12\ntruffle.os\x1a\"truffle/os/hardware_settings.proto\"\xac\x01\n\x0eSystemSettings\x12<\n\x11hardware_settings\x18\x01 \x01(\x0b\x32\x1c.truffle.os.HardwareSettingsH\x00\x88\x01\x01\x12\x34\n\rtask_settings\x18\x02 \x01(\x0b\x32\x18.truffle.os.TaskSettingsH\x01\x88\x01\x01\x42\x14\n\x12_hardware_settingsB\x10\n\x0e_task_settings\"0\n\x0cTaskSettings\x12\x1a\n\x12\x64\x65\x66\x61ult_model_uuid\x18\x02 \x01(\tJ\x04\x08\x01\x10\x02P\x00\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n truffle/os/system_settings.proto\x12\ntruffle.os\x1a\"truffle/os/hardware_settings.proto\"\xac\x01\n\x0eSystemSettings\x12<\n\x11hardware_settings\x18\x01 \x01(\x0b\x32\x1c.truffle.os.HardwareSettingsH\x00\x88\x01\x01\x12\x34\n\rtask_settings\x18\x02 \x01(\x0b\x32\x18.truffle.os.TaskSettingsH\x01\x88\x01\x01\x42\x14\n\x12_hardware_settingsB\x10\n\x0e_task_settings\"\x0e\n\x0cTaskSettingsP\x00\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -36,5 +36,5 @@ _globals['_SYSTEMSETTINGS']._serialized_start=85 _globals['_SYSTEMSETTINGS']._serialized_end=257 _globals['_TASKSETTINGS']._serialized_start=259 - _globals['_TASKSETTINGS']._serialized_end=307 + _globals['_TASKSETTINGS']._serialized_end=273 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/system_settings_pb2.pyi b/truffle/os/system_settings_pb2.pyi index 16864f1..870865a 100644 --- a/truffle/os/system_settings_pb2.pyi +++ b/truffle/os/system_settings_pb2.pyi @@ -16,7 +16,5 @@ class SystemSettings(_message.Message): def __init__(self, hardware_settings: _Optional[_Union[_hardware_settings_pb2.HardwareSettings, _Mapping]] = ..., task_settings: _Optional[_Union[TaskSettings, _Mapping]] = ...) -> None: ... class TaskSettings(_message.Message): - __slots__ = ("default_model_uuid",) - DEFAULT_MODEL_UUID_FIELD_NUMBER: _ClassVar[int] - default_model_uuid: str - def __init__(self, default_model_uuid: _Optional[str] = ...) -> None: ... + __slots__ = () + def __init__(self) -> None: ... diff --git a/truffle/os/system_settings_pb2_grpc.py b/truffle/os/system_settings_pb2_grpc.py index b192150..45fd07e 100644 --- a/truffle/os/system_settings_pb2_grpc.py +++ b/truffle/os/system_settings_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/system_settings_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/system_settings_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_actions_pb2.py b/truffle/os/task_actions_pb2.py index 6e57ed2..a78e2db 100644 --- a/truffle/os/task_actions_pb2.py +++ b/truffle/os/task_actions_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_actions.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_actions.proto' ) diff --git a/truffle/os/task_actions_pb2.pyi b/truffle/os/task_actions_pb2.pyi index 11ff9e6..775b279 100644 --- a/truffle/os/task_actions_pb2.pyi +++ b/truffle/os/task_actions_pb2.pyi @@ -15,6 +15,7 @@ from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union from truffle.os.task_pb2 import Task as Task from truffle.os.task_pb2 import TasksList as TasksList from truffle.os.task_pb2 import TaskNode as TaskNode +from truffle.os.task_pb2 import StreamingTaskStepResult as StreamingTaskStepResult from truffle.os.task_pb2 import TaskStreamUpdate as TaskStreamUpdate from truffle.os.task_target_pb2 import TargetTask as TargetTask from truffle.os.task_options_pb2 import TaskOptions as TaskOptions diff --git a/truffle/os/task_actions_pb2_grpc.py b/truffle/os/task_actions_pb2_grpc.py index 1b4d731..1f22a7b 100644 --- a/truffle/os/task_actions_pb2_grpc.py +++ b/truffle/os/task_actions_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_actions_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_actions_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_error_pb2.py b/truffle/os/task_error_pb2.py index a78d4c0..3a4e0d8 100644 --- a/truffle/os/task_error_pb2.py +++ b/truffle/os/task_error_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_error.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_error.proto' ) diff --git a/truffle/os/task_error_pb2_grpc.py b/truffle/os/task_error_pb2_grpc.py index 7876bc4..2443ad2 100644 --- a/truffle/os/task_error_pb2_grpc.py +++ b/truffle/os/task_error_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_error_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_error_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_info_pb2.py b/truffle/os/task_info_pb2.py index 0d6aa9b..4de7973 100644 --- a/truffle/os/task_info_pb2.py +++ b/truffle/os/task_info_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_info.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_info.proto' ) diff --git a/truffle/os/task_info_pb2.pyi b/truffle/os/task_info_pb2.pyi index 00f2f90..b457df9 100644 --- a/truffle/os/task_info_pb2.pyi +++ b/truffle/os/task_info_pb2.pyi @@ -1,3 +1,5 @@ +import datetime + from google.protobuf import timestamp_pb2 as _timestamp_pb2 from truffle.os import task_options_pb2 as _task_options_pb2 from google.protobuf.internal import containers as _containers @@ -48,4 +50,4 @@ class TaskInfo(_message.Message): created: _timestamp_pb2.Timestamp last_updated: _timestamp_pb2.Timestamp access_uri: str - def __init__(self, run_state: _Optional[_Union[TaskInfo.TaskRunState, str]] = ..., app_uuids: _Optional[_Iterable[str]] = ..., task_title: _Optional[str] = ..., options: _Optional[_Union[_task_options_pb2.TaskOptions, _Mapping]] = ..., created: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., last_updated: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., access_uri: _Optional[str] = ...) -> None: ... + def __init__(self, run_state: _Optional[_Union[TaskInfo.TaskRunState, str]] = ..., app_uuids: _Optional[_Iterable[str]] = ..., task_title: _Optional[str] = ..., options: _Optional[_Union[_task_options_pb2.TaskOptions, _Mapping]] = ..., created: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., last_updated: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., access_uri: _Optional[str] = ...) -> None: ... diff --git a/truffle/os/task_info_pb2_grpc.py b/truffle/os/task_info_pb2_grpc.py index b2a4954..58c324e 100644 --- a/truffle/os/task_info_pb2_grpc.py +++ b/truffle/os/task_info_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_info_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_info_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_options_pb2.py b/truffle/os/task_options_pb2.py index f809068..099f5c7 100644 --- a/truffle/os/task_options_pb2.py +++ b/truffle/os/task_options_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_options.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_options.proto' ) diff --git a/truffle/os/task_options_pb2_grpc.py b/truffle/os/task_options_pb2_grpc.py index 11e3f5b..8930d65 100644 --- a/truffle/os/task_options_pb2_grpc.py +++ b/truffle/os/task_options_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_options_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_options_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_pb2.py b/truffle/os/task_pb2.py index b50c38c..c612b37 100644 --- a/truffle/os/task_pb2.py +++ b/truffle/os/task_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task.proto' ) @@ -37,7 +37,7 @@ from truffle.os.task_user_response_pb2 import * from truffle.os.task_step_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15truffle/os/task.proto\x12\ntruffle.os\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19truffle/common/file.proto\x1a\x1atruffle/os/task_info.proto\x1a#truffle/os/task_user_response.proto\x1a\x1atruffle/os/task_step.proto\x1a\x1btruffle/os/task_error.proto\"t\n\x04Task\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\"\n\x04info\x18\x02 \x01(\x0b\x32\x14.truffle.os.TaskInfo\x12\x12\n\ntask_flags\x18\x03 \x01(\r\x12#\n\x05nodes\x18\x05 \x03(\x0b\x32\x14.truffle.os.TaskNode\",\n\tTasksList\x12\x1f\n\x05tasks\x18\x01 \x03(\x0b\x32\x10.truffle.os.Task\"\xc0\x01\n\x08TaskNode\x12\n\n\x02id\x18\x01 \x01(\r\x12\x11\n\tparent_id\x18\x02 \x01(\r\x12\x11\n\tchild_ids\x18\x03 \x03(\r\x12+\n\x05\x66iles\x18\x08 \x03(\x0b\x32\x1c.truffle.common.AttachedFile\x12 \n\x04step\x18\t \x01(\x0b\x32\x10.truffle.os.StepH\x00\x12+\n\x08user_msg\x18\n \x01(\x0b\x32\x17.truffle.os.UserMessageH\x00\x42\x06\n\x04item\"\xaf\x01\n\x10TaskStreamUpdate\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\'\n\x04info\x18\x03 \x01(\x0b\x32\x14.truffle.os.TaskInfoH\x00\x88\x01\x01\x12#\n\x05nodes\x18\x02 \x03(\x0b\x32\x14.truffle.os.TaskNode\x12)\n\x05\x65rror\x18\x05 \x01(\x0b\x32\x15.truffle.os.TaskErrorH\x01\x88\x01\x01\x42\x07\n\x05_infoB\x08\n\x06_errorP\x02P\x03P\x04\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15truffle/os/task.proto\x12\ntruffle.os\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19truffle/common/file.proto\x1a\x1atruffle/os/task_info.proto\x1a#truffle/os/task_user_response.proto\x1a\x1atruffle/os/task_step.proto\x1a\x1btruffle/os/task_error.proto\"t\n\x04Task\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\"\n\x04info\x18\x02 \x01(\x0b\x32\x14.truffle.os.TaskInfo\x12\x12\n\ntask_flags\x18\x03 \x01(\r\x12#\n\x05nodes\x18\x05 \x03(\x0b\x32\x14.truffle.os.TaskNode\",\n\tTasksList\x12\x1f\n\x05tasks\x18\x01 \x03(\x0b\x32\x10.truffle.os.Task\"\xf0\x01\n\x08TaskNode\x12\n\n\x02id\x18\x01 \x01(\r\x12\x11\n\tparent_id\x18\x02 \x01(\r\x12\x11\n\tchild_ids\x18\x03 \x03(\r\x12.\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05\x66iles\x18\x08 \x03(\x0b\x32\x1c.truffle.common.AttachedFile\x12 \n\x04step\x18\t \x01(\x0b\x32\x10.truffle.os.StepH\x00\x12+\n\x08user_msg\x18\n \x01(\x0b\x32\x17.truffle.os.UserMessageH\x00\x42\x06\n\x04item\"C\n\x17StreamingTaskStepResult\x12\x0f\n\x07node_id\x18\x01 \x01(\r\x12\x17\n\x0fpartial_content\x18\x02 \x01(\t\"\x92\x02\n\x10TaskStreamUpdate\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\'\n\x04info\x18\x03 \x01(\x0b\x32\x14.truffle.os.TaskInfoH\x00\x88\x01\x01\x12#\n\x05nodes\x18\x02 \x03(\x0b\x32\x14.truffle.os.TaskNode\x12)\n\x05\x65rror\x18\x05 \x01(\x0b\x32\x15.truffle.os.TaskErrorH\x01\x88\x01\x01\x12G\n\x15streaming_step_result\x18\x06 \x01(\x0b\x32#.truffle.os.StreamingTaskStepResultH\x02\x88\x01\x01\x42\x07\n\x05_infoB\x08\n\x06_errorB\x18\n\x16_streaming_step_resultP\x02P\x03P\x04\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -49,7 +49,9 @@ _globals['_TASKSLIST']._serialized_start=337 _globals['_TASKSLIST']._serialized_end=381 _globals['_TASKNODE']._serialized_start=384 - _globals['_TASKNODE']._serialized_end=576 - _globals['_TASKSTREAMUPDATE']._serialized_start=579 - _globals['_TASKSTREAMUPDATE']._serialized_end=754 + _globals['_TASKNODE']._serialized_end=624 + _globals['_STREAMINGTASKSTEPRESULT']._serialized_start=626 + _globals['_STREAMINGTASKSTEPRESULT']._serialized_end=693 + _globals['_TASKSTREAMUPDATE']._serialized_start=696 + _globals['_TASKSTREAMUPDATE']._serialized_end=970 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/task_pb2.pyi b/truffle/os/task_pb2.pyi index 2778deb..322c076 100644 --- a/truffle/os/task_pb2.pyi +++ b/truffle/os/task_pb2.pyi @@ -1,3 +1,5 @@ +import datetime + from google.protobuf import timestamp_pb2 as _timestamp_pb2 from truffle.common import file_pb2 as _file_pb2 from truffle.os import task_info_pb2 as _task_info_pb2 @@ -42,29 +44,41 @@ class TasksList(_message.Message): def __init__(self, tasks: _Optional[_Iterable[_Union[Task, _Mapping]]] = ...) -> None: ... class TaskNode(_message.Message): - __slots__ = ("id", "parent_id", "child_ids", "files", "step", "user_msg") + __slots__ = ("id", "parent_id", "child_ids", "created_at", "files", "step", "user_msg") ID_FIELD_NUMBER: _ClassVar[int] PARENT_ID_FIELD_NUMBER: _ClassVar[int] CHILD_IDS_FIELD_NUMBER: _ClassVar[int] + CREATED_AT_FIELD_NUMBER: _ClassVar[int] FILES_FIELD_NUMBER: _ClassVar[int] STEP_FIELD_NUMBER: _ClassVar[int] USER_MSG_FIELD_NUMBER: _ClassVar[int] id: int parent_id: int child_ids: _containers.RepeatedScalarFieldContainer[int] + created_at: _timestamp_pb2.Timestamp files: _containers.RepeatedCompositeFieldContainer[_file_pb2.AttachedFile] step: _task_step_pb2.Step user_msg: _task_user_response_pb2.UserMessage - def __init__(self, id: _Optional[int] = ..., parent_id: _Optional[int] = ..., child_ids: _Optional[_Iterable[int]] = ..., files: _Optional[_Iterable[_Union[_file_pb2.AttachedFile, _Mapping]]] = ..., step: _Optional[_Union[_task_step_pb2.Step, _Mapping]] = ..., user_msg: _Optional[_Union[_task_user_response_pb2.UserMessage, _Mapping]] = ...) -> None: ... + def __init__(self, id: _Optional[int] = ..., parent_id: _Optional[int] = ..., child_ids: _Optional[_Iterable[int]] = ..., created_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., files: _Optional[_Iterable[_Union[_file_pb2.AttachedFile, _Mapping]]] = ..., step: _Optional[_Union[_task_step_pb2.Step, _Mapping]] = ..., user_msg: _Optional[_Union[_task_user_response_pb2.UserMessage, _Mapping]] = ...) -> None: ... + +class StreamingTaskStepResult(_message.Message): + __slots__ = ("node_id", "partial_content") + NODE_ID_FIELD_NUMBER: _ClassVar[int] + PARTIAL_CONTENT_FIELD_NUMBER: _ClassVar[int] + node_id: int + partial_content: str + def __init__(self, node_id: _Optional[int] = ..., partial_content: _Optional[str] = ...) -> None: ... class TaskStreamUpdate(_message.Message): - __slots__ = ("task_id", "info", "nodes", "error") + __slots__ = ("task_id", "info", "nodes", "error", "streaming_step_result") TASK_ID_FIELD_NUMBER: _ClassVar[int] INFO_FIELD_NUMBER: _ClassVar[int] NODES_FIELD_NUMBER: _ClassVar[int] ERROR_FIELD_NUMBER: _ClassVar[int] + STREAMING_STEP_RESULT_FIELD_NUMBER: _ClassVar[int] task_id: str info: _task_info_pb2.TaskInfo nodes: _containers.RepeatedCompositeFieldContainer[TaskNode] error: _task_error_pb2.TaskError - def __init__(self, task_id: _Optional[str] = ..., info: _Optional[_Union[_task_info_pb2.TaskInfo, _Mapping]] = ..., nodes: _Optional[_Iterable[_Union[TaskNode, _Mapping]]] = ..., error: _Optional[_Union[_task_error_pb2.TaskError, _Mapping]] = ...) -> None: ... + streaming_step_result: StreamingTaskStepResult + def __init__(self, task_id: _Optional[str] = ..., info: _Optional[_Union[_task_info_pb2.TaskInfo, _Mapping]] = ..., nodes: _Optional[_Iterable[_Union[TaskNode, _Mapping]]] = ..., error: _Optional[_Union[_task_error_pb2.TaskError, _Mapping]] = ..., streaming_step_result: _Optional[_Union[StreamingTaskStepResult, _Mapping]] = ...) -> None: ... diff --git a/truffle/os/task_pb2_grpc.py b/truffle/os/task_pb2_grpc.py index 563c6fb..61b5ff6 100644 --- a/truffle/os/task_pb2_grpc.py +++ b/truffle/os/task_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_queries_pb2.py b/truffle/os/task_queries_pb2.py index 8c23762..8a6eb9e 100644 --- a/truffle/os/task_queries_pb2.py +++ b/truffle/os/task_queries_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_queries.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_queries.proto' ) diff --git a/truffle/os/task_queries_pb2_grpc.py b/truffle/os/task_queries_pb2_grpc.py index bd31e7f..2d4e661 100644 --- a/truffle/os/task_queries_pb2_grpc.py +++ b/truffle/os/task_queries_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_queries_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_queries_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_search_pb2.py b/truffle/os/task_search_pb2.py index 2fcf123..5af5440 100644 --- a/truffle/os/task_search_pb2.py +++ b/truffle/os/task_search_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_search.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_search.proto' ) diff --git a/truffle/os/task_search_pb2.pyi b/truffle/os/task_search_pb2.pyi index 789a436..4c64b2f 100644 --- a/truffle/os/task_search_pb2.pyi +++ b/truffle/os/task_search_pb2.pyi @@ -1,3 +1,5 @@ +import datetime + from google.protobuf import timestamp_pb2 as _timestamp_pb2 from truffle.os import task_info_pb2 as _task_info_pb2 from google.protobuf.internal import containers as _containers @@ -33,7 +35,7 @@ class TaskSearchResult(_message.Message): task_info: _task_info_pb2.TaskInfo timestamp: _timestamp_pb2.Timestamp content: TaskSearchResult.TaskSearchContent - def __init__(self, task_id: _Optional[str] = ..., task_info: _Optional[_Union[_task_info_pb2.TaskInfo, _Mapping]] = ..., timestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., content: _Optional[_Union[TaskSearchResult.TaskSearchContent, _Mapping]] = ...) -> None: ... + def __init__(self, task_id: _Optional[str] = ..., task_info: _Optional[_Union[_task_info_pb2.TaskInfo, _Mapping]] = ..., timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., content: _Optional[_Union[TaskSearchResult.TaskSearchContent, _Mapping]] = ...) -> None: ... class SearchTasksResponse(_message.Message): __slots__ = ("total_results", "current_offset", "results") diff --git a/truffle/os/task_search_pb2_grpc.py b/truffle/os/task_search_pb2_grpc.py index 022f753..15dc29f 100644 --- a/truffle/os/task_search_pb2_grpc.py +++ b/truffle/os/task_search_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_search_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_search_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_step_pb2.py b/truffle/os/task_step_pb2.py index 3f55d2e..b1cf1da 100644 --- a/truffle/os/task_step_pb2.py +++ b/truffle/os/task_step_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_step.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_step.proto' ) @@ -24,29 +24,27 @@ from truffle.os import task_user_response_pb2 as truffle_dot_os_dot_task__user__response__pb2 from truffle.common import content_pb2 as truffle_dot_common_dot_content__pb2 -from truffle.infer import usage_pb2 as truffle_dot_infer_dot_usage__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from truffle.common.content_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/os/task_step.proto\x12\ntruffle.os\x1a#truffle/os/task_user_response.proto\x1a\x1ctruffle/common/content.proto\x1a\x19truffle/infer/usage.proto\"\x8d\x08\n\x04Step\x12.\n\x05state\x18\x01 \x01(\x0e\x32\x1a.truffle.os.Step.StepStateH\x00\x88\x01\x01\x12;\n\ruser_response\x18\n \x01(\x0b\x32\x1f.truffle.os.PendingUserResponseH\x01\x88\x01\x01\x12+\n\x08thinking\x18\x02 \x01(\x0b\x32\x19.truffle.os.Step.Thinking\x12-\n\ntool_calls\x18\x03 \x03(\x0b\x32\x19.truffle.os.Step.ToolCall\x12+\n\texecution\x18\x04 \x01(\x0b\x32\x18.truffle.os.Step.Execute\x12)\n\x07results\x18\x05 \x01(\x0b\x32\x18.truffle.os.Step.Results\x12)\n\x07metrics\x18\x06 \x01(\x0b\x32\x18.truffle.os.Step.Metrics\x12\x17\n\nmodel_uuid\x18\x07 \x01(\tH\x02\x88\x01\x01\x1a\x93\x01\n\x08Thinking\x12\x12\n\ncot_chunks\x18\x01 \x03(\t\x12\x15\n\rcot_summaries\x18\x02 \x03(\t\x12\x17\n\nraw_output\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1e\n\x11thinking_finished\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_raw_outputB\x14\n\x12_thinking_finished\x1aR\n\x08ToolCall\x12\x16\n\ttool_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07summary\x18\x02 \x01(\tH\x01\x88\x01\x01\x42\x0c\n\n_tool_nameB\n\n\x08_summary\x1a\x1f\n\x07\x45xecute\x12\x14\n\x0ctool_updates\x18\x01 \x03(\t\x1a\xbd\x01\n\x07Results\x12\x14\n\x07summary\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07\x63ontent\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x1f\n\x12\x63ontent_incomplete\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12.\n\x03web\x18\x04 \x01(\x0b\x32\x1c.truffle.common.WebComponentH\x03\x88\x01\x01\x42\n\n\x08_summaryB\n\n\x08_contentB\x15\n\x13_content_incompleteB\x06\n\x04_web\x1aQ\n\x07Metrics\x12\x32\n\x0finference_usage\x18\x01 \x01(\x0b\x32\x14.truffle.infer.UsageH\x00\x88\x01\x01\x42\x12\n\x10_inference_usage\"W\n\tStepState\x12\x10\n\x0cSTEP_INVALID\x10\x00\x12\x13\n\x0fSTEP_GENERATING\x10\x01\x12\x12\n\x0eSTEP_EXECUTING\x10\x02\x12\x0f\n\x0bSTEP_RESULT\x10\x03\x42\x08\n\x06_stateB\x10\n\x0e_user_responseB\r\n\x0b_model_uuidP\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/os/task_step.proto\x12\ntruffle.os\x1a#truffle/os/task_user_response.proto\x1a\x1ctruffle/common/content.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc5\x05\n\x04Step\x12.\n\x05state\x18\x01 \x01(\x0e\x32\x1a.truffle.os.Step.StepStateH\x00\x88\x01\x01\x12;\n\ruser_response\x18\n \x01(\x0b\x32\x1f.truffle.os.PendingUserResponseH\x01\x88\x01\x01\x12+\n\x08thinking\x18\x02 \x01(\x0b\x32\x19.truffle.os.Step.Thinking\x12-\n\ntool_calls\x18\x03 \x03(\x0b\x32\x19.truffle.os.Step.ToolCall\x12+\n\texecution\x18\x04 \x01(\x0b\x32\x18.truffle.os.Step.Execute\x12)\n\x07results\x18\x05 \x01(\x0b\x32\x18.truffle.os.Step.Results\x12\x17\n\nmodel_uuid\x18\x07 \x01(\tH\x02\x88\x01\x01\x1a\x35\n\x08Thinking\x12\x12\n\ncot_chunks\x18\x01 \x03(\t\x12\x15\n\rcot_summaries\x18\x02 \x03(\t\x1an\n\x08ToolCall\x12\x16\n\ttool_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07summary\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04\x61rgs\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x0c\n\n_tool_nameB\n\n\x08_summaryB\x07\n\x05_args\x1a\t\n\x07\x45xecute\x1aM\n\x07Results\x12\x14\n\x07summary\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07\x63ontent\x18\x02 \x01(\tH\x01\x88\x01\x01\x42\n\n\x08_summaryB\n\n\x08_content\"W\n\tStepState\x12\x10\n\x0cSTEP_INVALID\x10\x00\x12\x13\n\x0fSTEP_GENERATING\x10\x01\x12\x12\n\x0eSTEP_EXECUTING\x10\x02\x12\x0f\n\x0bSTEP_RESULT\x10\x03\x42\x08\n\x06_stateB\x10\n\x0e_user_responseB\r\n\x0b_model_uuidP\x01\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'truffle.os.task_step_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_STEP']._serialized_start=137 - _globals['_STEP']._serialized_end=1174 - _globals['_STEP_THINKING']._serialized_start=503 - _globals['_STEP_THINKING']._serialized_end=650 - _globals['_STEP_TOOLCALL']._serialized_start=652 - _globals['_STEP_TOOLCALL']._serialized_end=734 - _globals['_STEP_EXECUTE']._serialized_start=736 - _globals['_STEP_EXECUTE']._serialized_end=767 - _globals['_STEP_RESULTS']._serialized_start=770 - _globals['_STEP_RESULTS']._serialized_end=959 - _globals['_STEP_METRICS']._serialized_start=961 - _globals['_STEP_METRICS']._serialized_end=1042 - _globals['_STEP_STEPSTATE']._serialized_start=1044 - _globals['_STEP_STEPSTATE']._serialized_end=1131 + _globals['_STEP']._serialized_start=143 + _globals['_STEP']._serialized_end=852 + _globals['_STEP_THINKING']._serialized_start=465 + _globals['_STEP_THINKING']._serialized_end=518 + _globals['_STEP_TOOLCALL']._serialized_start=520 + _globals['_STEP_TOOLCALL']._serialized_end=630 + _globals['_STEP_EXECUTE']._serialized_start=632 + _globals['_STEP_EXECUTE']._serialized_end=641 + _globals['_STEP_RESULTS']._serialized_start=643 + _globals['_STEP_RESULTS']._serialized_end=720 + _globals['_STEP_STEPSTATE']._serialized_start=722 + _globals['_STEP_STEPSTATE']._serialized_end=809 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/task_step_pb2.pyi b/truffle/os/task_step_pb2.pyi index 5b4164a..9f4c8da 100644 --- a/truffle/os/task_step_pb2.pyi +++ b/truffle/os/task_step_pb2.pyi @@ -1,6 +1,6 @@ from truffle.os import task_user_response_pb2 as _task_user_response_pb2 from truffle.common import content_pb2 as _content_pb2 -from truffle.infer import usage_pb2 as _usage_pb2 +from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -13,7 +13,7 @@ from truffle.common.content_pb2 import WebComponent as WebComponent DESCRIPTOR: _descriptor.FileDescriptor class Step(_message.Message): - __slots__ = ("state", "user_response", "thinking", "tool_calls", "execution", "results", "metrics", "model_uuid") + __slots__ = ("state", "user_response", "thinking", "tool_calls", "execution", "results", "model_uuid") class StepState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () STEP_INVALID: _ClassVar[Step.StepState] @@ -25,51 +25,37 @@ class Step(_message.Message): STEP_EXECUTING: Step.StepState STEP_RESULT: Step.StepState class Thinking(_message.Message): - __slots__ = ("cot_chunks", "cot_summaries", "raw_output", "thinking_finished") + __slots__ = ("cot_chunks", "cot_summaries") COT_CHUNKS_FIELD_NUMBER: _ClassVar[int] COT_SUMMARIES_FIELD_NUMBER: _ClassVar[int] - RAW_OUTPUT_FIELD_NUMBER: _ClassVar[int] - THINKING_FINISHED_FIELD_NUMBER: _ClassVar[int] cot_chunks: _containers.RepeatedScalarFieldContainer[str] cot_summaries: _containers.RepeatedScalarFieldContainer[str] - raw_output: str - thinking_finished: bool - def __init__(self, cot_chunks: _Optional[_Iterable[str]] = ..., cot_summaries: _Optional[_Iterable[str]] = ..., raw_output: _Optional[str] = ..., thinking_finished: bool = ...) -> None: ... + def __init__(self, cot_chunks: _Optional[_Iterable[str]] = ..., cot_summaries: _Optional[_Iterable[str]] = ...) -> None: ... class ToolCall(_message.Message): - __slots__ = ("tool_name", "summary") + __slots__ = ("tool_name", "summary", "args") TOOL_NAME_FIELD_NUMBER: _ClassVar[int] SUMMARY_FIELD_NUMBER: _ClassVar[int] + ARGS_FIELD_NUMBER: _ClassVar[int] tool_name: str summary: str - def __init__(self, tool_name: _Optional[str] = ..., summary: _Optional[str] = ...) -> None: ... + args: str + def __init__(self, tool_name: _Optional[str] = ..., summary: _Optional[str] = ..., args: _Optional[str] = ...) -> None: ... class Execute(_message.Message): - __slots__ = ("tool_updates",) - TOOL_UPDATES_FIELD_NUMBER: _ClassVar[int] - tool_updates: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, tool_updates: _Optional[_Iterable[str]] = ...) -> None: ... + __slots__ = () + def __init__(self) -> None: ... class Results(_message.Message): - __slots__ = ("summary", "content", "content_incomplete", "web") + __slots__ = ("summary", "content") SUMMARY_FIELD_NUMBER: _ClassVar[int] CONTENT_FIELD_NUMBER: _ClassVar[int] - CONTENT_INCOMPLETE_FIELD_NUMBER: _ClassVar[int] - WEB_FIELD_NUMBER: _ClassVar[int] summary: str content: str - content_incomplete: bool - web: _content_pb2.WebComponent - def __init__(self, summary: _Optional[str] = ..., content: _Optional[str] = ..., content_incomplete: bool = ..., web: _Optional[_Union[_content_pb2.WebComponent, _Mapping]] = ...) -> None: ... - class Metrics(_message.Message): - __slots__ = ("inference_usage",) - INFERENCE_USAGE_FIELD_NUMBER: _ClassVar[int] - inference_usage: _usage_pb2.Usage - def __init__(self, inference_usage: _Optional[_Union[_usage_pb2.Usage, _Mapping]] = ...) -> None: ... + def __init__(self, summary: _Optional[str] = ..., content: _Optional[str] = ...) -> None: ... STATE_FIELD_NUMBER: _ClassVar[int] USER_RESPONSE_FIELD_NUMBER: _ClassVar[int] THINKING_FIELD_NUMBER: _ClassVar[int] TOOL_CALLS_FIELD_NUMBER: _ClassVar[int] EXECUTION_FIELD_NUMBER: _ClassVar[int] RESULTS_FIELD_NUMBER: _ClassVar[int] - METRICS_FIELD_NUMBER: _ClassVar[int] MODEL_UUID_FIELD_NUMBER: _ClassVar[int] state: Step.StepState user_response: _task_user_response_pb2.PendingUserResponse @@ -77,6 +63,5 @@ class Step(_message.Message): tool_calls: _containers.RepeatedCompositeFieldContainer[Step.ToolCall] execution: Step.Execute results: Step.Results - metrics: Step.Metrics model_uuid: str - def __init__(self, state: _Optional[_Union[Step.StepState, str]] = ..., user_response: _Optional[_Union[_task_user_response_pb2.PendingUserResponse, _Mapping]] = ..., thinking: _Optional[_Union[Step.Thinking, _Mapping]] = ..., tool_calls: _Optional[_Iterable[_Union[Step.ToolCall, _Mapping]]] = ..., execution: _Optional[_Union[Step.Execute, _Mapping]] = ..., results: _Optional[_Union[Step.Results, _Mapping]] = ..., metrics: _Optional[_Union[Step.Metrics, _Mapping]] = ..., model_uuid: _Optional[str] = ...) -> None: ... + def __init__(self, state: _Optional[_Union[Step.StepState, str]] = ..., user_response: _Optional[_Union[_task_user_response_pb2.PendingUserResponse, _Mapping]] = ..., thinking: _Optional[_Union[Step.Thinking, _Mapping]] = ..., tool_calls: _Optional[_Iterable[_Union[Step.ToolCall, _Mapping]]] = ..., execution: _Optional[_Union[Step.Execute, _Mapping]] = ..., results: _Optional[_Union[Step.Results, _Mapping]] = ..., model_uuid: _Optional[str] = ...) -> None: ... diff --git a/truffle/os/task_step_pb2_grpc.py b/truffle/os/task_step_pb2_grpc.py index 1937b34..65fe0d4 100644 --- a/truffle/os/task_step_pb2_grpc.py +++ b/truffle/os/task_step_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_step_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_step_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_target_pb2.py b/truffle/os/task_target_pb2.py index 6596839..2d85791 100644 --- a/truffle/os/task_target_pb2.py +++ b/truffle/os/task_target_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_target.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_target.proto' ) diff --git a/truffle/os/task_target_pb2_grpc.py b/truffle/os/task_target_pb2_grpc.py index 400a908..daa929f 100644 --- a/truffle/os/task_target_pb2_grpc.py +++ b/truffle/os/task_target_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_target_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_target_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/task_user_response_pb2.py b/truffle/os/task_user_response_pb2.py index 181a0e5..238fc45 100644 --- a/truffle/os/task_user_response_pb2.py +++ b/truffle/os/task_user_response_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/task_user_response.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/task_user_response.proto' ) diff --git a/truffle/os/task_user_response_pb2_grpc.py b/truffle/os/task_user_response_pb2_grpc.py index f23ce5a..5df8dd7 100644 --- a/truffle/os/task_user_response_pb2_grpc.py +++ b/truffle/os/task_user_response_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -17,7 +17,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/task_user_response_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/task_user_response_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' diff --git a/truffle/os/truffleos_pb2.py b/truffle/os/truffleos_pb2.py index 936eea4..5d300c4 100644 --- a/truffle/os/truffleos_pb2.py +++ b/truffle/os/truffleos_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: truffle/os/truffleos.proto -# Protobuf Python Version: 6.30.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -12,8 +12,8 @@ _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, - 30, - 0, + 31, + 1, '', 'truffle/os/truffleos.proto' ) @@ -98,8 +98,13 @@ from truffle.os import task_search_pb2 as truffle_dot_os_dot_task__search__pb2 from truffle.os import builder_pb2 as truffle_dot_os_dot_builder__pb2 from truffle.os import background_feed_queries_pb2 as truffle_dot_os_dot_background__feed__queries__pb2 +from truffle.os import proactivity_pb2 as truffle_dot_os_dot_proactivity__pb2 from truffle.os import app_queries_pb2 as truffle_dot_os_dot_app__queries__pb2 from truffle.os import installer_pb2 as truffle_dot_os_dot_installer__pb2 +try: + truffle_dot_app_dot_app__pb2 = truffle_dot_os_dot_installer__pb2.truffle_dot_app_dot_app__pb2 +except AttributeError: + truffle_dot_app_dot_app__pb2 = truffle_dot_os_dot_installer__pb2.truffle.app.app_pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from truffle.os.hardware_stats_pb2 import * @@ -117,9 +122,10 @@ from truffle.os.task_search_pb2 import * from truffle.os.builder_pb2 import * from truffle.os.background_feed_queries_pb2 import * +from truffle.os.proactivity_pb2 import * from truffle.os.app_queries_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/os/truffleos.proto\x12\ntruffle.os\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1ftruffle/os/hardware_stats.proto\x1a!truffle/os/hardware_control.proto\x1a truffle/os/system_settings.proto\x1a\x1ctruffle/os/system_info.proto\x1a\x1dtruffle/os/notification.proto\x1a\x1ftruffle/os/client_session.proto\x1a\x1ctruffle/os/client_user.proto\x1a\x1dtruffle/os/client_state.proto\x1a\x15truffle/os/task.proto\x1a\x1dtruffle/os/task_queries.proto\x1a\x1dtruffle/os/task_actions.proto\x1a#truffle/os/task_user_response.proto\x1a\x1ctruffle/os/task_search.proto\x1a\x18truffle/os/builder.proto\x1a(truffle/os/background_feed_queries.proto\x1a\x1ctruffle/os/app_queries.proto\x1a\x1atruffle/os/installer.proto\x1a\x1cgoogle/api/annotations.proto2\xc9$\n\tTruffleOS\x12m\n\x0e\x41pps_DeleteApp\x12\x1c.truffle.os.DeleteAppRequest\x1a\x1d.truffle.os.DeleteAppResponse\"\x1e\x82\xd3\xe4\x93\x02\x18*\x16/v1/os/apps/{app_uuid}\x12\x81\x01\n\x12\x41pps_GetBackground\x12$.truffle.os.GetBackgroundAppsRequest\x1a%.truffle.os.GetBackgroundAppsResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/os/apps/background\x12\x81\x01\n\x12\x41pps_GetForeground\x12$.truffle.os.GetForegroundAppsRequest\x1a%.truffle.os.GetForegroundAppsResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/os/apps/foreground\x12\x65\n\x0b\x41pps_GetAll\x12\x1d.truffle.os.GetAllAppsRequest\x1a\x1e.truffle.os.GetAllAppsResponse\"\x17\x82\xd3\xe4\x93\x02\x11\x12\x0f/v1/os/apps/all\x12T\n\x0f\x41pps_InstallApp\x12\x1d.truffle.os.AppInstallRequest\x1a\x1e.truffle.os.AppInstallResponse(\x01\x30\x01\x12\x8d\x01\n\x12\x42\x61\x63kground_GetFeed\x12$.truffle.os.GetBackgroundFeedRequest\x1a%.truffle.os.GetBackgroundFeedResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/os/apps/background/feed:get:\x01*\x12\xa8\x01\n\x1f\x42\x61\x63kground_GetLatestFeedEntryID\x12\'.truffle.os.GetLatestFeedEntryIDRequest\x1a(.truffle.os.GetLatestFeedEntryIDResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/os/apps/background/latestfeedid:get:\x01*\x12\xa6\x01\n\x18\x42\x61\x63kground_LikeFeedEntry\x12*.truffle.os.LikeBackgroundFeedEntryRequest\x1a+.truffle.os.LikeBackgroundFeedEntryResponse\"1\x82\xd3\xe4\x93\x02+\"&/v1/os/apps/background/feed/entry:like:\x01*\x12\xa7\x01\n\x1d\x42\x61\x63kground_SubmitFeedFeedback\x12).truffle.os.BackgroundFeedFeedbackRequest\x1a*.truffle.os.BackgroundFeedFeedbackResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/os/apps/background/feed:feedback:\x01*\x12\x90\x01\n\x19\x42uilder_StartBuildSession\x12$.truffle.os.StartBuildSessionRequest\x1a%.truffle.os.StartBuildSessionResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/os/builder/builds:start:\x01*\x12\x94\x01\n\x1a\x42uilder_FinishBuildSession\x12%.truffle.os.FinishBuildSessionRequest\x1a&.truffle.os.FinishBuildSessionResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1c/v1/os/builder/builds:finish:\x01*\x12\x83\x01\n\x16\x43lient_RegisterNewUser\x12\".truffle.os.RegisterNewUserRequest\x1a#.truffle.os.RegisterNewUserResponse\" \x82\xd3\xe4\x93\x02\x1a\"\x15/v1/os/users:register:\x01*\x12\x8f\x01\n\x19\x43lient_RegisterNewSession\x12%.truffle.os.RegisterNewSessionRequest\x1a&.truffle.os.RegisterNewSessionResponse\"#\x82\xd3\xe4\x93\x02\x1d\"\x18/v1/os/sessions:register:\x01*\x12\x82\x01\n\x15\x43lient_UserIDForToken\x12!.truffle.os.UserIDForTokenRequest\x1a\".truffle.os.UserIDForTokenResponse\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/v1/os/tokens/{token}/user\x12\x8b\x01\n#Client_VerifyNewSessionRegistration\x12#.truffle.os.VerifyNewSessionRequest\x1a\x1c.truffle.os.NewSessionStatus\"!\x82\xd3\xe4\x93\x02\x1b\"\x16/v1/os/sessions:verify:\x01*\x12x\n\x1b\x43lient_GetUserRecoveryCodes\x12\x16.google.protobuf.Empty\x1a\x1d.truffle.os.UserRecoveryCodes\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/v1/os/users/recoveryCodes\x12\x87\x01\n\x18\x43lient_UpdateClientState\x12$.truffle.os.UpdateClientStateRequest\x1a%.truffle.os.UpdateClientStateResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\"\x13/v1/os/client/state:\x01*\x12\x81\x01\n\x15\x43lient_GetClientState\x12!.truffle.os.GetClientStateRequest\x1a\".truffle.os.GetClientStateResponse\"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/v1/os/client/state/{key}\x12\x88\x01\n\x19\x43lient_GetAllClientStates\x12%.truffle.os.GetAllClientStatesRequest\x1a&.truffle.os.GetAllClientStatesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/os/client/states\x12\x8b\x01\n\x18SubscribeToNotifications\x12+.truffle.os.SubscribeToNotificationsRequest\x1a\x18.truffle.os.Notification\"&\x82\xd3\xe4\x93\x02 \x12\x1e/v1/os/notifications:subscribe0\x01\x12o\n\x11Hardware_GetStats\x12 .truffle.os.HardwareStatsRequest\x1a\x19.truffle.os.HardwareStats\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v1/os/hardware/stats\x12|\n\x15Hardware_StatsUpdates\x12 .truffle.os.HardwareStatsRequest\x1a\x19.truffle.os.HardwareStats\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/v1/os/hardware/stats:stream0\x01\x12\x93\x01\n\x15Hardware_PowerControl\x12\'.truffle.os.HardwarePowerControlRequest\x1a(.truffle.os.HardwarePowerControlResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1c/v1/os/hardware/powerControl:\x01*\x12i\n\x0cSystem_GetID\x12\x1e.truffle.os.SystemGetIDRequest\x1a\x1f.truffle.os.SystemGetIDResponse\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/v1/os/system/id\x12h\n\x12System_GetSettings\x12\x16.google.protobuf.Empty\x1a\x1a.truffle.os.SystemSettings\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/os/system/settings\x12o\n\x12System_SetSettings\x12\x1a.truffle.os.SystemSettings\x1a\x1a.truffle.os.SystemSettings\"!\x82\xd3\xe4\x93\x02\x1b\x32\x16/v1/os/system/settings:\x01*\x12\\\n\x0eSystem_GetInfo\x12\x16.google.protobuf.Empty\x1a\x16.truffle.os.SystemInfo\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/os/system/info\x12\x8e\x01\n\x15System_CheckForUpdate\x12\'.truffle.os.SystemCheckForUpdateRequest\x1a(.truffle.os.SystemCheckForUpdateResponse\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/v1/os/system/check-update\x12j\n\rTask_OpenTask\x12\x1b.truffle.os.OpenTaskRequest\x1a\x1c.truffle.os.TaskStreamUpdate\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v1/os/tasks:open:\x01*0\x01\x12\x83\x01\n\x12Task_InterruptTask\x12 .truffle.os.InterruptTaskRequest\x1a\x1e.truffle.os.TaskActionResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/os/tasks/{task_id}:interrupt:\x01*\x12\x81\x01\n\x12Task_RespondToTask\x12 .truffle.os.RespondToTaskRequest\x1a\x1e.truffle.os.TaskActionResponse\")\x82\xd3\xe4\x93\x02#\"\x1e/v1/os/tasks/{task_id}:respond:\x01*\x12\x9e\x01\n\x15Task_SetAvailableApps\x12\'.truffle.os.TaskSetAvailableAppsRequest\x1a(.truffle.os.TaskSetAvailableAppsResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/os/tasks/{task_id}:setAvailableApps:\x01*\x12s\n\x10Task_SearchTasks\x12\x1e.truffle.os.SearchTasksRequest\x1a\x1f.truffle.os.SearchTasksResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\"\x13/v1/os/tasks:search:\x01*\x12\x63\n\rTask_GetTasks\x12\x1b.truffle.os.GetTasksRequest\x1a\x10.truffle.os.Task\"!\x82\xd3\xe4\x93\x02\x1b\"\x16/v1/os/tasks:streamGet:\x01*0\x01\x12\x62\n\x0fTask_GetOneTask\x12\x1d.truffle.os.GetOneTaskRequest\x1a\x10.truffle.os.Task\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/os/tasks/{task_id}\x12u\n\x11Task_GetTaskInfos\x12\x1f.truffle.os.GetTaskInfosRequest\x1a .truffle.os.GetTaskInfosResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/os/tasks/infos:\x01*P\x01P\x02P\x03P\x04P\x05P\x06P\x07P\x08P\tP\nP\x0bP\x0cP\rP\x0eP\x0fP\x10\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1atruffle/os/truffleos.proto\x12\ntruffle.os\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1ftruffle/os/hardware_stats.proto\x1a!truffle/os/hardware_control.proto\x1a truffle/os/system_settings.proto\x1a\x1ctruffle/os/system_info.proto\x1a\x1dtruffle/os/notification.proto\x1a\x1ftruffle/os/client_session.proto\x1a\x1ctruffle/os/client_user.proto\x1a\x1dtruffle/os/client_state.proto\x1a\x15truffle/os/task.proto\x1a\x1dtruffle/os/task_queries.proto\x1a\x1dtruffle/os/task_actions.proto\x1a#truffle/os/task_user_response.proto\x1a\x1ctruffle/os/task_search.proto\x1a\x18truffle/os/builder.proto\x1a(truffle/os/background_feed_queries.proto\x1a\x1ctruffle/os/proactivity.proto\x1a\x1ctruffle/os/app_queries.proto\x1a\x1atruffle/os/installer.proto\x1a\x1cgoogle/api/annotations.proto2\xfd#\n\tTruffleOS\x12m\n\x0e\x41pps_DeleteApp\x12\x1c.truffle.os.DeleteAppRequest\x1a\x1d.truffle.os.DeleteAppResponse\"\x1e\x82\xd3\xe4\x93\x02\x18*\x16/v1/os/apps/{app_uuid}\x12\x65\n\x0b\x41pps_GetAll\x12\x1d.truffle.os.GetAllAppsRequest\x1a\x1e.truffle.os.GetAllAppsResponse\"\x17\x82\xd3\xe4\x93\x02\x11\x12\x0f/v1/os/apps/all\x12T\n\x0f\x41pps_InstallApp\x12\x1d.truffle.os.AppInstallRequest\x1a\x1e.truffle.os.AppInstallResponse(\x01\x30\x01\x12\x8d\x01\n\x12\x42\x61\x63kground_GetFeed\x12$.truffle.os.GetBackgroundFeedRequest\x1a%.truffle.os.GetBackgroundFeedResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/os/apps/background/feed:get:\x01*\x12\xa8\x01\n\x1f\x42\x61\x63kground_GetLatestFeedEntryID\x12\'.truffle.os.GetLatestFeedEntryIDRequest\x1a(.truffle.os.GetLatestFeedEntryIDResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/os/apps/background/latestfeedid:get:\x01*\x12\xab\x01\n!Background_ApproveProactiveAction\x12).truffle.os.ApproveProactiveActionRequest\x1a*.truffle.os.ApproveProactiveActionResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/os/apps/background/approveAction:\x01*\x12\xa7\x01\n Background_CancelProactiveAction\x12(.truffle.os.CancelProactiveActionRequest\x1a).truffle.os.CancelProactiveActionResponse\".\x82\xd3\xe4\x93\x02(\"#/v1/os/apps/background/cancelAction:\x01*\x12\x90\x01\n\x19\x42uilder_StartBuildSession\x12$.truffle.os.StartBuildSessionRequest\x1a%.truffle.os.StartBuildSessionResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/os/builder/builds:start:\x01*\x12\x94\x01\n\x1a\x42uilder_FinishBuildSession\x12%.truffle.os.FinishBuildSessionRequest\x1a&.truffle.os.FinishBuildSessionResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1c/v1/os/builder/builds:finish:\x01*\x12\x83\x01\n\x16\x43lient_RegisterNewUser\x12\".truffle.os.RegisterNewUserRequest\x1a#.truffle.os.RegisterNewUserResponse\" \x82\xd3\xe4\x93\x02\x1a\"\x15/v1/os/users:register:\x01*\x12\x8f\x01\n\x19\x43lient_RegisterNewSession\x12%.truffle.os.RegisterNewSessionRequest\x1a&.truffle.os.RegisterNewSessionResponse\"#\x82\xd3\xe4\x93\x02\x1d\"\x18/v1/os/sessions:register:\x01*\x12\x82\x01\n\x15\x43lient_UserIDForToken\x12!.truffle.os.UserIDForTokenRequest\x1a\".truffle.os.UserIDForTokenResponse\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/v1/os/tokens/{token}/user\x12\x8b\x01\n#Client_VerifyNewSessionRegistration\x12#.truffle.os.VerifyNewSessionRequest\x1a\x1c.truffle.os.NewSessionStatus\"!\x82\xd3\xe4\x93\x02\x1b\"\x16/v1/os/sessions:verify:\x01*\x12x\n\x1b\x43lient_GetUserRecoveryCodes\x12\x16.google.protobuf.Empty\x1a\x1d.truffle.os.UserRecoveryCodes\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/v1/os/users/recoveryCodes\x12\x87\x01\n\x18\x43lient_UpdateClientState\x12$.truffle.os.UpdateClientStateRequest\x1a%.truffle.os.UpdateClientStateResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\"\x13/v1/os/client/state:\x01*\x12\x81\x01\n\x15\x43lient_GetClientState\x12!.truffle.os.GetClientStateRequest\x1a\".truffle.os.GetClientStateResponse\"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/v1/os/client/state/{key}\x12\x88\x01\n\x19\x43lient_GetAllClientStates\x12%.truffle.os.GetAllClientStatesRequest\x1a&.truffle.os.GetAllClientStatesResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/os/client/states\x12\x8b\x01\n\x18SubscribeToNotifications\x12+.truffle.os.SubscribeToNotificationsRequest\x1a\x18.truffle.os.Notification\"&\x82\xd3\xe4\x93\x02 \x12\x1e/v1/os/notifications:subscribe0\x01\x12o\n\x11Hardware_GetStats\x12 .truffle.os.HardwareStatsRequest\x1a\x19.truffle.os.HardwareStats\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v1/os/hardware/stats\x12|\n\x15Hardware_StatsUpdates\x12 .truffle.os.HardwareStatsRequest\x1a\x19.truffle.os.HardwareStats\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/v1/os/hardware/stats:stream0\x01\x12\x93\x01\n\x15Hardware_PowerControl\x12\'.truffle.os.HardwarePowerControlRequest\x1a(.truffle.os.HardwarePowerControlResponse\"\'\x82\xd3\xe4\x93\x02!\"\x1c/v1/os/hardware/powerControl:\x01*\x12i\n\x0cSystem_GetID\x12\x1e.truffle.os.SystemGetIDRequest\x1a\x1f.truffle.os.SystemGetIDResponse\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/v1/os/system/id\x12h\n\x12System_GetSettings\x12\x16.google.protobuf.Empty\x1a\x1a.truffle.os.SystemSettings\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/os/system/settings\x12o\n\x12System_SetSettings\x12\x1a.truffle.os.SystemSettings\x1a\x1a.truffle.os.SystemSettings\"!\x82\xd3\xe4\x93\x02\x1b\x32\x16/v1/os/system/settings:\x01*\x12\\\n\x0eSystem_GetInfo\x12\x16.google.protobuf.Empty\x1a\x16.truffle.os.SystemInfo\"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/os/system/info\x12\x8e\x01\n\x15System_CheckForUpdate\x12\'.truffle.os.SystemCheckForUpdateRequest\x1a(.truffle.os.SystemCheckForUpdateResponse\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/v1/os/system/check-update\x12\xb4\x01\n\x1dTask_TestExternalToolProvider\x12/.truffle.os.TaskTestExternalToolProviderRequest\x1a\x30.truffle.os.TaskTestExternalToolProviderResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/os/tasks:testExternalToolProvider:\x01*\x12j\n\rTask_OpenTask\x12\x1b.truffle.os.OpenTaskRequest\x1a\x1c.truffle.os.TaskStreamUpdate\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v1/os/tasks:open:\x01*0\x01\x12\x83\x01\n\x12Task_InterruptTask\x12 .truffle.os.InterruptTaskRequest\x1a\x1e.truffle.os.TaskActionResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/os/tasks/{task_id}:interrupt:\x01*\x12\x81\x01\n\x12Task_RespondToTask\x12 .truffle.os.RespondToTaskRequest\x1a\x1e.truffle.os.TaskActionResponse\")\x82\xd3\xe4\x93\x02#\"\x1e/v1/os/tasks/{task_id}:respond:\x01*\x12\x9e\x01\n\x15Task_SetAvailableApps\x12\'.truffle.os.TaskSetAvailableAppsRequest\x1a(.truffle.os.TaskSetAvailableAppsResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/os/tasks/{task_id}:setAvailableApps:\x01*\x12s\n\x10Task_SearchTasks\x12\x1e.truffle.os.SearchTasksRequest\x1a\x1f.truffle.os.SearchTasksResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\"\x13/v1/os/tasks:search:\x01*\x12\x63\n\rTask_GetTasks\x12\x1b.truffle.os.GetTasksRequest\x1a\x10.truffle.os.Task\"!\x82\xd3\xe4\x93\x02\x1b\"\x16/v1/os/tasks:streamGet:\x01*0\x01\x12\x62\n\x0fTask_GetOneTask\x12\x1d.truffle.os.GetOneTaskRequest\x1a\x10.truffle.os.Task\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/os/tasks/{task_id}\x12u\n\x11Task_GetTaskInfos\x12\x1f.truffle.os.GetTaskInfosRequest\x1a .truffle.os.GetTaskInfosResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/os/tasks/infos:\x01*P\x01P\x02P\x03P\x04P\x05P\x06P\x07P\x08P\tP\nP\x0bP\x0cP\rP\x0eP\x0fP\x10P\x11\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -128,20 +134,16 @@ DESCRIPTOR._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Apps_DeleteApp']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Apps_DeleteApp']._serialized_options = b'\202\323\344\223\002\030*\026/v1/os/apps/{app_uuid}' - _globals['_TRUFFLEOS'].methods_by_name['Apps_GetBackground']._loaded_options = None - _globals['_TRUFFLEOS'].methods_by_name['Apps_GetBackground']._serialized_options = b'\202\323\344\223\002\030\022\026/v1/os/apps/background' - _globals['_TRUFFLEOS'].methods_by_name['Apps_GetForeground']._loaded_options = None - _globals['_TRUFFLEOS'].methods_by_name['Apps_GetForeground']._serialized_options = b'\202\323\344\223\002\030\022\026/v1/os/apps/foreground' _globals['_TRUFFLEOS'].methods_by_name['Apps_GetAll']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Apps_GetAll']._serialized_options = b'\202\323\344\223\002\021\022\017/v1/os/apps/all' _globals['_TRUFFLEOS'].methods_by_name['Background_GetFeed']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Background_GetFeed']._serialized_options = b'\202\323\344\223\002$\"\037/v1/os/apps/background/feed:get:\001*' _globals['_TRUFFLEOS'].methods_by_name['Background_GetLatestFeedEntryID']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Background_GetLatestFeedEntryID']._serialized_options = b'\202\323\344\223\002,\"\'/v1/os/apps/background/latestfeedid:get:\001*' - _globals['_TRUFFLEOS'].methods_by_name['Background_LikeFeedEntry']._loaded_options = None - _globals['_TRUFFLEOS'].methods_by_name['Background_LikeFeedEntry']._serialized_options = b'\202\323\344\223\002+\"&/v1/os/apps/background/feed/entry:like:\001*' - _globals['_TRUFFLEOS'].methods_by_name['Background_SubmitFeedFeedback']._loaded_options = None - _globals['_TRUFFLEOS'].methods_by_name['Background_SubmitFeedFeedback']._serialized_options = b'\202\323\344\223\002)\"$/v1/os/apps/background/feed:feedback:\001*' + _globals['_TRUFFLEOS'].methods_by_name['Background_ApproveProactiveAction']._loaded_options = None + _globals['_TRUFFLEOS'].methods_by_name['Background_ApproveProactiveAction']._serialized_options = b'\202\323\344\223\002)\"$/v1/os/apps/background/approveAction:\001*' + _globals['_TRUFFLEOS'].methods_by_name['Background_CancelProactiveAction']._loaded_options = None + _globals['_TRUFFLEOS'].methods_by_name['Background_CancelProactiveAction']._serialized_options = b'\202\323\344\223\002(\"#/v1/os/apps/background/cancelAction:\001*' _globals['_TRUFFLEOS'].methods_by_name['Builder_StartBuildSession']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Builder_StartBuildSession']._serialized_options = b'\202\323\344\223\002 \"\033/v1/os/builder/builds:start:\001*' _globals['_TRUFFLEOS'].methods_by_name['Builder_FinishBuildSession']._loaded_options = None @@ -180,6 +182,8 @@ _globals['_TRUFFLEOS'].methods_by_name['System_GetInfo']._serialized_options = b'\202\323\344\223\002\024\022\022/v1/os/system/info' _globals['_TRUFFLEOS'].methods_by_name['System_CheckForUpdate']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['System_CheckForUpdate']._serialized_options = b'\202\323\344\223\002\034\022\032/v1/os/system/check-update' + _globals['_TRUFFLEOS'].methods_by_name['Task_TestExternalToolProvider']._loaded_options = None + _globals['_TRUFFLEOS'].methods_by_name['Task_TestExternalToolProvider']._serialized_options = b'\202\323\344\223\002*\"%/v1/os/tasks:testExternalToolProvider:\001*' _globals['_TRUFFLEOS'].methods_by_name['Task_OpenTask']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Task_OpenTask']._serialized_options = b'\202\323\344\223\002\026\"\021/v1/os/tasks:open:\001*' _globals['_TRUFFLEOS'].methods_by_name['Task_InterruptTask']._loaded_options = None @@ -196,6 +200,6 @@ _globals['_TRUFFLEOS'].methods_by_name['Task_GetOneTask']._serialized_options = b'\202\323\344\223\002\030\022\026/v1/os/tasks/{task_id}' _globals['_TRUFFLEOS'].methods_by_name['Task_GetTaskInfos']._loaded_options = None _globals['_TRUFFLEOS'].methods_by_name['Task_GetTaskInfos']._serialized_options = b'\202\323\344\223\002\027\"\022/v1/os/tasks/infos:\001*' - _globals['_TRUFFLEOS']._serialized_start=637 - _globals['_TRUFFLEOS']._serialized_end=5318 + _globals['_TRUFFLEOS']._serialized_start=667 + _globals['_TRUFFLEOS']._serialized_end=5272 # @@protoc_insertion_point(module_scope) diff --git a/truffle/os/truffleos_pb2.pyi b/truffle/os/truffleos_pb2.pyi index da187f1..b3e4a9a 100644 --- a/truffle/os/truffleos_pb2.pyi +++ b/truffle/os/truffleos_pb2.pyi @@ -24,8 +24,10 @@ from truffle.os import task_user_response_pb2 as _task_user_response_pb2_1_1 from truffle.os import task_search_pb2 as _task_search_pb2 from truffle.os import builder_pb2 as _builder_pb2 from truffle.os import background_feed_queries_pb2 as _background_feed_queries_pb2 +from truffle.os import proactivity_pb2 as _proactivity_pb2 from truffle.os import app_queries_pb2 as _app_queries_pb2 from truffle.os import installer_pb2 as _installer_pb2 +from truffle.app import app_pb2 as _app_pb2 from google.api import annotations_pb2 as _annotations_pb2 from google.protobuf import descriptor as _descriptor from typing import ClassVar as _ClassVar @@ -63,6 +65,7 @@ from truffle.os.client_state_pb2 import GetAllClientStatesResponse as GetAllClie from truffle.os.task_pb2 import Task as Task from truffle.os.task_pb2 import TasksList as TasksList from truffle.os.task_pb2 import TaskNode as TaskNode +from truffle.os.task_pb2 import StreamingTaskStepResult as StreamingTaskStepResult from truffle.os.task_pb2 import TaskStreamUpdate as TaskStreamUpdate from truffle.os.task_queries_pb2 import GetTasksRequest as GetTasksRequest from truffle.os.task_queries_pb2 import GetOneTaskRequest as GetOneTaskRequest @@ -89,16 +92,13 @@ from truffle.os.builder_pb2 import BuildSessionError as BuildSessionError from truffle.os.builder_pb2 import FinishBuildSessionResponse as FinishBuildSessionResponse from truffle.os.background_feed_queries_pb2 import GetBackgroundFeedRequest as GetBackgroundFeedRequest from truffle.os.background_feed_queries_pb2 import GetBackgroundFeedResponse as GetBackgroundFeedResponse -from truffle.os.background_feed_queries_pb2 import LikeBackgroundFeedEntryRequest as LikeBackgroundFeedEntryRequest -from truffle.os.background_feed_queries_pb2 import LikeBackgroundFeedEntryResponse as LikeBackgroundFeedEntryResponse from truffle.os.background_feed_queries_pb2 import GetLatestFeedEntryIDRequest as GetLatestFeedEntryIDRequest from truffle.os.background_feed_queries_pb2 import GetLatestFeedEntryIDResponse as GetLatestFeedEntryIDResponse -from truffle.os.background_feed_queries_pb2 import BackgroundFeedFeedbackRequest as BackgroundFeedFeedbackRequest -from truffle.os.background_feed_queries_pb2 import BackgroundFeedFeedbackResponse as BackgroundFeedFeedbackResponse -from truffle.os.app_queries_pb2 import GetForegroundAppsRequest as GetForegroundAppsRequest -from truffle.os.app_queries_pb2 import GetForegroundAppsResponse as GetForegroundAppsResponse -from truffle.os.app_queries_pb2 import GetBackgroundAppsRequest as GetBackgroundAppsRequest -from truffle.os.app_queries_pb2 import GetBackgroundAppsResponse as GetBackgroundAppsResponse +from truffle.os.proactivity_pb2 import ProactiveAction as ProactiveAction +from truffle.os.proactivity_pb2 import ApproveProactiveActionRequest as ApproveProactiveActionRequest +from truffle.os.proactivity_pb2 import ApproveProactiveActionResponse as ApproveProactiveActionResponse +from truffle.os.proactivity_pb2 import CancelProactiveActionRequest as CancelProactiveActionRequest +from truffle.os.proactivity_pb2 import CancelProactiveActionResponse as CancelProactiveActionResponse from truffle.os.app_queries_pb2 import GetAllAppsRequest as GetAllAppsRequest from truffle.os.app_queries_pb2 import GetAllAppsResponse as GetAllAppsResponse from truffle.os.app_queries_pb2 import DeleteAppRequest as DeleteAppRequest diff --git a/truffle/os/truffleos_pb2_grpc.py b/truffle/os/truffleos_pb2_grpc.py index ac95a7b..86946ec 100644 --- a/truffle/os/truffleos_pb2_grpc.py +++ b/truffle/os/truffleos_pb2_grpc.py @@ -14,6 +14,7 @@ from truffle.os import hardware_stats_pb2 as truffle_dot_os_dot_hardware__stats__pb2 from truffle.os import installer_pb2 as truffle_dot_os_dot_installer__pb2 from truffle.os import notification_pb2 as truffle_dot_os_dot_notification__pb2 +from truffle.os import proactivity_pb2 as truffle_dot_os_dot_proactivity__pb2 from truffle.os import system_info_pb2 as truffle_dot_os_dot_system__info__pb2 from truffle.os import system_settings_pb2 as truffle_dot_os_dot_system__settings__pb2 from truffle.os import task_actions_pb2 as truffle_dot_os_dot_task__actions__pb2 @@ -22,7 +23,7 @@ from truffle.os import task_search_pb2 as truffle_dot_os_dot_task__search__pb2 from truffle.os import task_user_response_pb2 as truffle_dot_os_dot_task__user__response__pb2 -GRPC_GENERATED_VERSION = '1.72.0' +GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -35,7 +36,7 @@ if _version_not_supported: raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in truffle/os/truffleos_pb2_grpc.py depends on' + + ' but the generated code in truffle/os/truffleos_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' @@ -56,16 +57,6 @@ def __init__(self, channel): request_serializer=truffle_dot_os_dot_app__queries__pb2.DeleteAppRequest.SerializeToString, response_deserializer=truffle_dot_os_dot_app__queries__pb2.DeleteAppResponse.FromString, _registered_method=True) - self.Apps_GetBackground = channel.unary_unary( - '/truffle.os.TruffleOS/Apps_GetBackground', - request_serializer=truffle_dot_os_dot_app__queries__pb2.GetBackgroundAppsRequest.SerializeToString, - response_deserializer=truffle_dot_os_dot_app__queries__pb2.GetBackgroundAppsResponse.FromString, - _registered_method=True) - self.Apps_GetForeground = channel.unary_unary( - '/truffle.os.TruffleOS/Apps_GetForeground', - request_serializer=truffle_dot_os_dot_app__queries__pb2.GetForegroundAppsRequest.SerializeToString, - response_deserializer=truffle_dot_os_dot_app__queries__pb2.GetForegroundAppsResponse.FromString, - _registered_method=True) self.Apps_GetAll = channel.unary_unary( '/truffle.os.TruffleOS/Apps_GetAll', request_serializer=truffle_dot_os_dot_app__queries__pb2.GetAllAppsRequest.SerializeToString, @@ -86,15 +77,15 @@ def __init__(self, channel): request_serializer=truffle_dot_os_dot_background__feed__queries__pb2.GetLatestFeedEntryIDRequest.SerializeToString, response_deserializer=truffle_dot_os_dot_background__feed__queries__pb2.GetLatestFeedEntryIDResponse.FromString, _registered_method=True) - self.Background_LikeFeedEntry = channel.unary_unary( - '/truffle.os.TruffleOS/Background_LikeFeedEntry', - request_serializer=truffle_dot_os_dot_background__feed__queries__pb2.LikeBackgroundFeedEntryRequest.SerializeToString, - response_deserializer=truffle_dot_os_dot_background__feed__queries__pb2.LikeBackgroundFeedEntryResponse.FromString, + self.Background_ApproveProactiveAction = channel.unary_unary( + '/truffle.os.TruffleOS/Background_ApproveProactiveAction', + request_serializer=truffle_dot_os_dot_proactivity__pb2.ApproveProactiveActionRequest.SerializeToString, + response_deserializer=truffle_dot_os_dot_proactivity__pb2.ApproveProactiveActionResponse.FromString, _registered_method=True) - self.Background_SubmitFeedFeedback = channel.unary_unary( - '/truffle.os.TruffleOS/Background_SubmitFeedFeedback', - request_serializer=truffle_dot_os_dot_background__feed__queries__pb2.BackgroundFeedFeedbackRequest.SerializeToString, - response_deserializer=truffle_dot_os_dot_background__feed__queries__pb2.BackgroundFeedFeedbackResponse.FromString, + self.Background_CancelProactiveAction = channel.unary_unary( + '/truffle.os.TruffleOS/Background_CancelProactiveAction', + request_serializer=truffle_dot_os_dot_proactivity__pb2.CancelProactiveActionRequest.SerializeToString, + response_deserializer=truffle_dot_os_dot_proactivity__pb2.CancelProactiveActionResponse.FromString, _registered_method=True) self.Builder_StartBuildSession = channel.unary_unary( '/truffle.os.TruffleOS/Builder_StartBuildSession', @@ -191,6 +182,11 @@ def __init__(self, channel): request_serializer=truffle_dot_os_dot_system__info__pb2.SystemCheckForUpdateRequest.SerializeToString, response_deserializer=truffle_dot_os_dot_system__info__pb2.SystemCheckForUpdateResponse.FromString, _registered_method=True) + self.Task_TestExternalToolProvider = channel.unary_unary( + '/truffle.os.TruffleOS/Task_TestExternalToolProvider', + request_serializer=truffle_dot_os_dot_task__actions__pb2.TaskTestExternalToolProviderRequest.SerializeToString, + response_deserializer=truffle_dot_os_dot_task__actions__pb2.TaskTestExternalToolProviderResponse.FromString, + _registered_method=True) self.Task_OpenTask = channel.unary_stream( '/truffle.os.TruffleOS/Task_OpenTask', request_serializer=truffle_dot_os_dot_task__actions__pb2.OpenTaskRequest.SerializeToString, @@ -242,20 +238,6 @@ def Apps_DeleteApp(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def Apps_GetBackground(self, request, context): - """apps that can contribute to bg feed (ambients) - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Apps_GetForeground(self, request, context): - """apps used in tasks (focuses) - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - def Apps_GetAll(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -280,13 +262,13 @@ def Background_GetLatestFeedEntryID(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def Background_LikeFeedEntry(self, request, context): + def Background_ApproveProactiveAction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def Background_SubmitFeedFeedback(self, request, context): + def Background_CancelProactiveAction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -418,7 +400,7 @@ def System_CheckForUpdate(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def Task_OpenTask(self, request, context): + def Task_TestExternalToolProvider(self, request, context): """Task == Focus == Foreground App create or open an existing task and its update stream """ @@ -426,6 +408,12 @@ def Task_OpenTask(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def Task_OpenTask(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Task_InterruptTask(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -476,16 +464,6 @@ def add_TruffleOSServicer_to_server(servicer, server): request_deserializer=truffle_dot_os_dot_app__queries__pb2.DeleteAppRequest.FromString, response_serializer=truffle_dot_os_dot_app__queries__pb2.DeleteAppResponse.SerializeToString, ), - 'Apps_GetBackground': grpc.unary_unary_rpc_method_handler( - servicer.Apps_GetBackground, - request_deserializer=truffle_dot_os_dot_app__queries__pb2.GetBackgroundAppsRequest.FromString, - response_serializer=truffle_dot_os_dot_app__queries__pb2.GetBackgroundAppsResponse.SerializeToString, - ), - 'Apps_GetForeground': grpc.unary_unary_rpc_method_handler( - servicer.Apps_GetForeground, - request_deserializer=truffle_dot_os_dot_app__queries__pb2.GetForegroundAppsRequest.FromString, - response_serializer=truffle_dot_os_dot_app__queries__pb2.GetForegroundAppsResponse.SerializeToString, - ), 'Apps_GetAll': grpc.unary_unary_rpc_method_handler( servicer.Apps_GetAll, request_deserializer=truffle_dot_os_dot_app__queries__pb2.GetAllAppsRequest.FromString, @@ -506,15 +484,15 @@ def add_TruffleOSServicer_to_server(servicer, server): request_deserializer=truffle_dot_os_dot_background__feed__queries__pb2.GetLatestFeedEntryIDRequest.FromString, response_serializer=truffle_dot_os_dot_background__feed__queries__pb2.GetLatestFeedEntryIDResponse.SerializeToString, ), - 'Background_LikeFeedEntry': grpc.unary_unary_rpc_method_handler( - servicer.Background_LikeFeedEntry, - request_deserializer=truffle_dot_os_dot_background__feed__queries__pb2.LikeBackgroundFeedEntryRequest.FromString, - response_serializer=truffle_dot_os_dot_background__feed__queries__pb2.LikeBackgroundFeedEntryResponse.SerializeToString, + 'Background_ApproveProactiveAction': grpc.unary_unary_rpc_method_handler( + servicer.Background_ApproveProactiveAction, + request_deserializer=truffle_dot_os_dot_proactivity__pb2.ApproveProactiveActionRequest.FromString, + response_serializer=truffle_dot_os_dot_proactivity__pb2.ApproveProactiveActionResponse.SerializeToString, ), - 'Background_SubmitFeedFeedback': grpc.unary_unary_rpc_method_handler( - servicer.Background_SubmitFeedFeedback, - request_deserializer=truffle_dot_os_dot_background__feed__queries__pb2.BackgroundFeedFeedbackRequest.FromString, - response_serializer=truffle_dot_os_dot_background__feed__queries__pb2.BackgroundFeedFeedbackResponse.SerializeToString, + 'Background_CancelProactiveAction': grpc.unary_unary_rpc_method_handler( + servicer.Background_CancelProactiveAction, + request_deserializer=truffle_dot_os_dot_proactivity__pb2.CancelProactiveActionRequest.FromString, + response_serializer=truffle_dot_os_dot_proactivity__pb2.CancelProactiveActionResponse.SerializeToString, ), 'Builder_StartBuildSession': grpc.unary_unary_rpc_method_handler( servicer.Builder_StartBuildSession, @@ -611,6 +589,11 @@ def add_TruffleOSServicer_to_server(servicer, server): request_deserializer=truffle_dot_os_dot_system__info__pb2.SystemCheckForUpdateRequest.FromString, response_serializer=truffle_dot_os_dot_system__info__pb2.SystemCheckForUpdateResponse.SerializeToString, ), + 'Task_TestExternalToolProvider': grpc.unary_unary_rpc_method_handler( + servicer.Task_TestExternalToolProvider, + request_deserializer=truffle_dot_os_dot_task__actions__pb2.TaskTestExternalToolProviderRequest.FromString, + response_serializer=truffle_dot_os_dot_task__actions__pb2.TaskTestExternalToolProviderResponse.SerializeToString, + ), 'Task_OpenTask': grpc.unary_stream_rpc_method_handler( servicer.Task_OpenTask, request_deserializer=truffle_dot_os_dot_task__actions__pb2.OpenTaskRequest.FromString, @@ -689,60 +672,6 @@ def Apps_DeleteApp(request, metadata, _registered_method=True) - @staticmethod - def Apps_GetBackground(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.os.TruffleOS/Apps_GetBackground', - truffle_dot_os_dot_app__queries__pb2.GetBackgroundAppsRequest.SerializeToString, - truffle_dot_os_dot_app__queries__pb2.GetBackgroundAppsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def Apps_GetForeground(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/truffle.os.TruffleOS/Apps_GetForeground', - truffle_dot_os_dot_app__queries__pb2.GetForegroundAppsRequest.SerializeToString, - truffle_dot_os_dot_app__queries__pb2.GetForegroundAppsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - @staticmethod def Apps_GetAll(request, target, @@ -852,7 +781,7 @@ def Background_GetLatestFeedEntryID(request, _registered_method=True) @staticmethod - def Background_LikeFeedEntry(request, + def Background_ApproveProactiveAction(request, target, options=(), channel_credentials=None, @@ -865,9 +794,9 @@ def Background_LikeFeedEntry(request, return grpc.experimental.unary_unary( request, target, - '/truffle.os.TruffleOS/Background_LikeFeedEntry', - truffle_dot_os_dot_background__feed__queries__pb2.LikeBackgroundFeedEntryRequest.SerializeToString, - truffle_dot_os_dot_background__feed__queries__pb2.LikeBackgroundFeedEntryResponse.FromString, + '/truffle.os.TruffleOS/Background_ApproveProactiveAction', + truffle_dot_os_dot_proactivity__pb2.ApproveProactiveActionRequest.SerializeToString, + truffle_dot_os_dot_proactivity__pb2.ApproveProactiveActionResponse.FromString, options, channel_credentials, insecure, @@ -879,7 +808,7 @@ def Background_LikeFeedEntry(request, _registered_method=True) @staticmethod - def Background_SubmitFeedFeedback(request, + def Background_CancelProactiveAction(request, target, options=(), channel_credentials=None, @@ -892,9 +821,9 @@ def Background_SubmitFeedFeedback(request, return grpc.experimental.unary_unary( request, target, - '/truffle.os.TruffleOS/Background_SubmitFeedFeedback', - truffle_dot_os_dot_background__feed__queries__pb2.BackgroundFeedFeedbackRequest.SerializeToString, - truffle_dot_os_dot_background__feed__queries__pb2.BackgroundFeedFeedbackResponse.FromString, + '/truffle.os.TruffleOS/Background_CancelProactiveAction', + truffle_dot_os_dot_proactivity__pb2.CancelProactiveActionRequest.SerializeToString, + truffle_dot_os_dot_proactivity__pb2.CancelProactiveActionResponse.FromString, options, channel_credentials, insecure, @@ -1418,6 +1347,33 @@ def System_CheckForUpdate(request, metadata, _registered_method=True) + @staticmethod + def Task_TestExternalToolProvider(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/truffle.os.TruffleOS/Task_TestExternalToolProvider', + truffle_dot_os_dot_task__actions__pb2.TaskTestExternalToolProviderRequest.SerializeToString, + truffle_dot_os_dot_task__actions__pb2.TaskTestExternalToolProviderResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + @staticmethod def Task_OpenTask(request, target,