From 1bb878848fb9edf793c7f6be676ffe2b8ff68154 Mon Sep 17 00:00:00 2001 From: Henri Sellis Date: Sat, 18 Oct 2025 16:44:38 +0300 Subject: [PATCH] combined pipeline running function --- .../{ => notebooks}/cloudflare_API_test.ipynb | 20 ----- .../{ => notebooks}/local_model_test.ipynb | 0 .../notebooks/subprocess_test.ipynb | 82 +++++++++++++++++++ 3d-generation-pipeline/start_pipeline.py | 75 +++++++++++++++++ 4 files changed, 157 insertions(+), 20 deletions(-) rename 3d-generation-pipeline/{ => notebooks}/cloudflare_API_test.ipynb (88%) rename 3d-generation-pipeline/{ => notebooks}/local_model_test.ipynb (100%) create mode 100644 3d-generation-pipeline/notebooks/subprocess_test.ipynb create mode 100644 3d-generation-pipeline/start_pipeline.py diff --git a/3d-generation-pipeline/cloudflare_API_test.ipynb b/3d-generation-pipeline/notebooks/cloudflare_API_test.ipynb similarity index 88% rename from 3d-generation-pipeline/cloudflare_API_test.ipynb rename to 3d-generation-pipeline/notebooks/cloudflare_API_test.ipynb index 0d32480c..c3533b0a 100644 --- a/3d-generation-pipeline/cloudflare_API_test.ipynb +++ b/3d-generation-pipeline/notebooks/cloudflare_API_test.ipynb @@ -76,18 +76,6 @@ "with open(out_path, \"wb\") as f:\n", " f.write(img_bytes)\n", "\n", - "print(f\"Saved: {out_path} ({len(img_bytes)} bytes)\")\n", - "\n", - "b64 = data[\"result\"][\"image\"]\n", - "if not b64:\n", - " raise RuntimeError(f\"Unexpected response structure: {data}\")\n", - "\n", - "img_bytes = base64.b64decode(b64)\n", - "\n", - "out_path = \"image9.jpg\"\n", - "with open(out_path, \"wb\") as f:\n", - " f.write(img_bytes)\n", - "\n", "print(f\"Saved: {out_path} ({len(img_bytes)} bytes)\")" ] }, @@ -138,14 +126,6 @@ "result_text = data[\"result\"][\"response\"]\n", "print(result_text)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "76fa21f0", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/3d-generation-pipeline/local_model_test.ipynb b/3d-generation-pipeline/notebooks/local_model_test.ipynb similarity index 100% rename from 3d-generation-pipeline/local_model_test.ipynb rename to 3d-generation-pipeline/notebooks/local_model_test.ipynb diff --git a/3d-generation-pipeline/notebooks/subprocess_test.ipynb b/3d-generation-pipeline/notebooks/subprocess_test.ipynb new file mode 100644 index 00000000..f9cc173a --- /dev/null +++ b/3d-generation-pipeline/notebooks/subprocess_test.ipynb @@ -0,0 +1,82 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "4826c91d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'2025-10-18-16-35-47'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from datetime import datetime\n", + "\n", + "datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9419e692", + "metadata": {}, + "outputs": [], + "source": [ + "import subprocess\n", + "\n", + "# Path to the Python interpreter in the other virtual environment\n", + "venv_python = r\"/path/to/other/venv/bin/python\" # On Windows: r\"C:\\path\\to\\other\\venv\\Scripts\\python.exe\"\n", + "\n", + "# Path to the .py file you want to run\n", + "script_path = r\"/path/to/your_script.py\"\n", + "\n", + "# Optional: arguments to pass to the script\n", + "args = [\"arg1\", \"arg2\"]\n", + "\n", + "# Build the command\n", + "command = [venv_python, script_path] + args\n", + "\n", + "try:\n", + " # Run the subprocess\n", + " result = subprocess.run(command, capture_output=True, text=True)\n", + "\n", + " # Print output and errors\n", + " print(\"STDOUT:\\n\", result.stdout)\n", + " print(\"STDERR:\\n\", result.stderr)\n", + " print(\"Return Code:\", result.returncode)\n", + "\n", + "except Exception as e:\n", + " print(f\"Error occurred: {e}\")\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/3d-generation-pipeline/start_pipeline.py b/3d-generation-pipeline/start_pipeline.py new file mode 100644 index 00000000..c3468779 --- /dev/null +++ b/3d-generation-pipeline/start_pipeline.py @@ -0,0 +1,75 @@ +import os +import base64 +import requests +from datetime import datetime +from dotenv import load_dotenv + +load_dotenv() + +ACCOUNT_ID = os.environ["CF_ACCOUNT_ID"] +API_TOKEN = os.environ["CF_API_TOKEN"] + + +def get_timestamp(): + return datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + + +def text_to_image(prompt, output_name): + MODEL = "@cf/black-forest-labs/flux-1-schnell" + URL = f"https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/run/{MODEL}" + + payload = { + "prompt": prompt, + } + + headers = { + "Authorization": f"Bearer {API_TOKEN}", + "Content-Type": "application/json", + } + + resp = requests.post(URL, json=payload, headers=headers, timeout=60) + resp.raise_for_status() + + data = resp.json() + b64 = data["result"]["image"] + if not b64: + raise RuntimeError(f"Unexpected response structure: {data}") + + img_bytes = base64.b64decode(b64) + + out_path = f"images/{output_name}.jpg" + with open(out_path, "wb") as f: + f.write(img_bytes) + + return out_path + +def refine_text_prompt(prompt): + MODEL = "@cf/meta/llama-3.2-3b-instruct" + URL = f"https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/run/{MODEL}" + + instructions = """ + User is talking about some object. Your task is to generate a short and concise description of it. Use only user's own words, keep it as short as possible. + Example: + User: 'Umm, okay, I would like a really cool sword, with for example a bright orange crossguard. And also it should be slightly curved.' + You: 'a slightly curved sword with bright orange crossguard' + """ + + response = requests.post(URL, + headers={"Authorization": f"Bearer {API_TOKEN}"}, + json={ + "messages": [ + {"role": "system", "content": instructions}, + {"role": "user", "content": prompt} + ] + } + ) + data = response.json() + return data["result"]["response"] + +def main(): + user_prompt = "Give epic sword" + print(f"User prompt: {user_prompt}") + refined_prompt = refine_text_prompt(user_prompt) + print(f"Refined prompt: {refined_prompt}") + image_path = text_to_image(refined_prompt, get_timestamp()) + print(f"Generated image file: {image_path}")