Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
VeyDlin committed Jan 15, 2025
0 parents commit aea0fce
Show file tree
Hide file tree
Showing 61 changed files with 2,074 additions and 0 deletions.
36 changes: 36 additions & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: Python Package Publish

on:
push:
tags:
- 'v*'

jobs:
test-build-publish:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.10

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build twine
- name: Build package
run: python -m build

#- name: Test installation
# run: pip install dist/*.whl

- name: Publish to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: python -m twine upload dist/*
87 changes: 87 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*.pyo
*.pyd

# Virtual environment
venv/
env/

# Distribution / Packaging
.Python
build/
dist/
*.egg-info/
.eggs/

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# PyInstaller
*.spec

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
*.cover
*.py,cover
.hypothesis/
.cache
pytest_cache/

# Jupyter Notebook
.ipynb_checkpoints

# pyenv
.python-version

# Editor / IDE
.vscode/
.idea/
*.swp
*.swo
*.swn
*.bak
*.tmp
*.orig

# macOS
.DS_Store
.AppleDouble
.LSOverride

# Windows
Thumbs.db
ehthumbs.db
Desktop.ini

# Logs and temporary files
*.log
*.tmp
*.bak
*.old

# Python build artifacts
*.manifest
*.spec

# MyPy
.mypy_cache/

# PyCharm
*.iml
.idea/

# Local configuration files
.env
.env.*

# TestPyPI artifacts
testpypi-*

# GitHub Actions
.github/workflows/*.log
19 changes: 19 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
MIT License

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
48 changes: 48 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# invokeai-python

`invokeai-python` is a powerful library designed for working with graphs, models, and API integrations, aimed at simplifying the creation and management of tasks related to generative AI.

## Installation

1. Ensure you have Python version 3.6 or higher installed.
2. Install the library using pip:

```bash
pip install invokeai-python
```

## Usage

### Importing Key Components

```python
from invoke import Invoke
from invoke.api import BaseModels, ModelType
```

### Example 1: Querying Models

```python
import asyncio
from invoke import Invoke
from invoke.api import BaseModels, ModelType

async def main():
invoke = Invoke()

print("Waiting for invoke...")
version = await invoke.wait_invoke()
print(f"Version: {version}")

models = await invoke.models.list(base_models=[BaseModels.SDXL], model_type=[ModelType.Main])
print(models)

if __name__ == "__main__":
asyncio.run(main())
```

## Contributing
If you would like to contribute, feel free to submit an issue or a pull request on the [GitHub repository](/~https://github.com/veydlin/invokeai-python).

## License
MIT License
10 changes: 10 additions & 0 deletions invoke/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Path: invoke\__init__.py
from .api import *
from .graph_builder import *
from .presets import TextToImageSD1
from .invoke import Invoke

__all__ = [
"TextToImageSD1",
"Invoke",
]
24 changes: 24 additions & 0 deletions invoke/api/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Path: invoke\api\__init__.py
from .api import Api
from .app import AppApi
from .boards import BoardsApi
from .download_queue import DownloadQueueApi
from .images import ImagesApi, ImageOrigin, Categories
from .models import ModelsApi, BaseModels, ModelType
from .queue import QueueApi
from .utilities import UtilitiesApi

__all__ = [
"Api",
"AppApi",
"BoardsApi",
"DownloadQueueApi",
"ImagesApi",
"ImageOrigin",
"Categories",
"ModelsApi",
"BaseModels",
"ModelType",
"QueueApi",
"UtilitiesApi",
]
98 changes: 98 additions & 0 deletions invoke/api/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
# Path: invoke\api\api.py
from typing import Optional, List, Tuple, Any, Callable
from enum import Enum
from urllib.parse import quote
import aiohttp
import json


QueryParams = List[Tuple[str, Optional[str]]]

class ResponseType(Enum):
JSON = "json",
TEXT = "json",
RAW = "raw"
RESPONSE = "response"


class Api:
host: str

def __init__(self, client: aiohttp.ClientSession, host: str):
self.host = host
self.client = client


async def get_async(self, api_path: str, version: int, prams: Optional[QueryParams] = None, type: ResponseType = ResponseType.JSON) -> Any:
url = self.query_string(api_path, version, prams)
async with self.client.get(url) as response:
return await self.from_response_async(response, type)


async def delete_async(self, api_path: str, version: int, prams: Optional[QueryParams] = None, type: ResponseType = ResponseType.JSON) -> Any:
url = self.query_string(api_path, version, prams)
async with self.client.delete(url) as response:
return await self.from_response_async(response, type)


async def upload_async(self, api_path: str, version: int, name: str, file_bytes: bytes, prams: Optional[QueryParams] = None, type: ResponseType = ResponseType.JSON) -> Any:
url = self.query_string(api_path, version, prams)
data = aiohttp.FormData()
data.add_field(name, file_bytes, filename=name, content_type="application/octet-stream")
async with self.client.post(url, data=data) as response:
return await self.from_response_async(response, type)


async def post_async(self, api_path: str, version: int, data: Optional[Any] = None, prams: Optional[QueryParams] = None, type: ResponseType = ResponseType.JSON) -> Any:
url = self.query_string(api_path, version, prams)
async with self.client.post(url, json=self.prepare_data(data)) as response:
return await self.from_response_async(response, type)


async def put_async(self, api_path: str, version: int, data: Optional[Any] = None, prams: Optional[QueryParams] = None, type: ResponseType = ResponseType.JSON) -> Any:
url = self.query_string(api_path, version, prams)
async with self.client.put(url, json=self.prepare_data(data)) as response:
return await self.from_response_async(response, type)


def query_string(self, api_path: str, version: int, prams: Optional[QueryParams] = None) -> str:
base_url = f"{self.host}/api/v{version}/{api_path}"
if not prams:
return base_url
query_string = "&".join(f"{quote(key)}={quote(value)}" for key, value in prams if value is not None)
return f"{base_url}?{query_string}"


def prepare_data(self, data: Optional[Any]) -> Optional[Any]:
if data is None:
return None
if isinstance(data, str):
return data
if isinstance(data, dict):
return data
try:
return json.loads(json.dumps(data, default=lambda o: o.__dict__))
except TypeError as e:
raise ValueError(f"Cannot serialize data: {data}. Error: {e}")


async def from_response_async(self, response: aiohttp.ClientResponse, type: ResponseType) -> Any:
if type == ResponseType.RESPONSE:
return response

if response.status >= 400:
info = await response.text()
raise Exception(f"Server status: {response.status}; Info: {info}")

if type == ResponseType.JSON:
return await response.json()
if type == ResponseType.TEXT:
return await response.text()
if type == ResponseType.RAW:
return await response.content


def add_params(self, prams: QueryParams, items: Optional[List[Any]], param_name: str, converter: Optional[Callable[[Any], str]] = None) -> None:
if items is not None:
for item in items:
prams.append((param_name, converter(item) if converter else str(item)))
12 changes: 12 additions & 0 deletions invoke/api/app/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Path: invoke\api\app\__init__.py
from .app_api import AppApi
from .schema import AppVersion, AppDeps, AppConfig, LogLevel, CacheStatus

__all__ = [
"AppApi",
"AppVersion",
"AppDeps",
"AppConfig",
"LogLevel",
"CacheStatus",
]
53 changes: 53 additions & 0 deletions invoke/api/app/app_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# Path: invoke\api\app\app_api.py
import aiohttp
from .schema import *
from ..api import Api


class AppApi(Api):
def __init__(self, client: aiohttp.ClientSession, host: str):
super().__init__(client, host)

async def version(self) -> AppVersion:
json_data = await self.get_async("app/version", 1)
return AppVersion(**json_data)


async def app_deps(self) -> AppDeps:
json_data = await self.get_async("app/app_deps", 1)
return AppDeps(**json_data)


async def config(self) -> AppConfig:
json_data = await self.get_async("app/config", 1)
return AppConfig(**json_data)


async def get_log_level(self) -> LogLevel:
json_data = await self.get_async("app/logging", 1)
return LogLevel(**json_data)


async def set_log_level(self, log_level: str) -> LogLevel:
data = {"log_level": log_level}
json_data = await self.post_async("app/logging", 1, data=data)
return LogLevel(**json_data)


async def clear_invocation_cache(self) -> None:
await self.delete_async("app/invocation_cache", 1)


async def enable_invocation_cache(self) -> CacheStatus:
json_data = await self.put_async("app/invocation_cache/enable", 1)
return CacheStatus(**json_data)


async def disable_invocation_cache(self) -> CacheStatus:
json_data = await self.put_async("app/invocation_cache/disable", 1)
return CacheStatus(**json_data)


async def get_invocation_cache_status(self) -> CacheStatus:
json_data = await self.get_async("app/invocation_cache/status", 1)
return CacheStatus(**json_data)
Loading

0 comments on commit aea0fce

Please sign in to comment.