forked from cgvr/DeltaVR
combined pipeline running function
This commit is contained in:
152
3d-generation-pipeline/notebooks/cloudflare_API_test.ipynb
Normal file
152
3d-generation-pipeline/notebooks/cloudflare_API_test.ipynb
Normal file
@@ -0,0 +1,152 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "1dc6faae",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import base64\n",
|
||||
"import requests\n",
|
||||
"from dotenv import load_dotenv"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b3107275",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"load_dotenv()\n",
|
||||
"\n",
|
||||
"ACCOUNT_ID = os.environ[\"CF_ACCOUNT_ID\"]\n",
|
||||
"API_TOKEN = os.environ[\"CF_API_TOKEN\"]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "999adf95",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Text to image"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "40b35163",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Saved: output.jpg (263282 bytes)\n",
|
||||
"Saved: image9.jpg (263282 bytes)\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"MODEL = \"@cf/black-forest-labs/flux-1-schnell\"\n",
|
||||
"URL = f\"https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/run/{MODEL}\"\n",
|
||||
"\n",
|
||||
"payload = {\n",
|
||||
" \"prompt\": \"a slightly curved broadsword with a fancy golden crossguard\",\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"headers = {\n",
|
||||
" \"Authorization\": f\"Bearer {API_TOKEN}\",\n",
|
||||
" \"Content-Type\": \"application/json\",\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"resp = requests.post(URL, json=payload, headers=headers, timeout=60)\n",
|
||||
"resp.raise_for_status()\n",
|
||||
"\n",
|
||||
"data = resp.json()\n",
|
||||
"b64 = data[\"result\"][\"image\"]\n",
|
||||
"if not b64:\n",
|
||||
" raise RuntimeError(f\"Unexpected response structure: {data}\")\n",
|
||||
"\n",
|
||||
"img_bytes = base64.b64decode(b64)\n",
|
||||
"\n",
|
||||
"out_path = \"output.jpg\"\n",
|
||||
"with open(out_path, \"wb\") as f:\n",
|
||||
" f.write(img_bytes)\n",
|
||||
"\n",
|
||||
"print(f\"Saved: {out_path} ({len(img_bytes)} bytes)\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "14a874c4",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Text prompt refinement"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "485f6f46",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\"dark wooden battleaxe with bronze blade\"\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"MODEL = \"@cf/meta/llama-3.2-3b-instruct\"\n",
|
||||
"URL = f\"https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/run/{MODEL}\"\n",
|
||||
"\n",
|
||||
"instructions = \"\"\"\n",
|
||||
"User is talking about some object. Your task is to generate a short and concise description of it. Use only user's own words, keep it as short as possible.\n",
|
||||
"Example:\n",
|
||||
"User: 'Umm, okay, I would like a really cool sword, with for example a bright orange crossguard. And also it should be slightly curved.'\n",
|
||||
"You: 'a slightly curved sword with bright orange crossguard'\n",
|
||||
"\"\"\"\n",
|
||||
"prompt = \"Umm, alright, can you please give me an epic battleaxe? It should have a dark wooden shaft and bronze blade.\"\n",
|
||||
"\n",
|
||||
"response = requests.post(URL,\n",
|
||||
" headers={\"Authorization\": f\"Bearer {API_TOKEN}\"},\n",
|
||||
" json={\n",
|
||||
" \"messages\": [\n",
|
||||
" {\"role\": \"system\", \"content\": instructions},\n",
|
||||
" {\"role\": \"user\", \"content\": prompt}\n",
|
||||
" ]\n",
|
||||
" }\n",
|
||||
")\n",
|
||||
"data = response.json()\n",
|
||||
"result_text = data[\"result\"][\"response\"]\n",
|
||||
"print(result_text)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
105
3d-generation-pipeline/notebooks/local_model_test.ipynb
Normal file
105
3d-generation-pipeline/notebooks/local_model_test.ipynb
Normal file
@@ -0,0 +1,105 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "2c0da293",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"c:\\Users\\henrisel.DOMENIS\\DeltaVR3DModelGeneration\\3d-generation-pipeline\\.venv\\lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||
" from .autonotebook import tqdm as notebook_tqdm\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import torch\n",
|
||||
"from diffusers import FluxPipeline"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "51879ff1",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Fetching 23 files: 0%| | 0/23 [00:00<?, ?it/s]"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model_name = \"black-forest-labs/FLUX.1-schnell\"\n",
|
||||
"\n",
|
||||
"pipe = FluxPipeline.from_pretrained(model_name, torch_dtype=torch.bfloat16)\n",
|
||||
"#pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "50a38bf4",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"prompt = \"slightly curved sword, one side blue and other side green\"\n",
|
||||
"pipe_result = pipe(\n",
|
||||
" prompt,\n",
|
||||
" guidance_scale=0.0,\n",
|
||||
" num_inference_steps=4,\n",
|
||||
" max_sequence_length=256,\n",
|
||||
" generator=torch.Generator(\"gpu\")\n",
|
||||
")\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b692177a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pipe_result"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d55eb3ce",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"image = pipe_result[\"images\"][0]\n",
|
||||
"image.save(\"flux-schnell.png\")"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.11"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
82
3d-generation-pipeline/notebooks/subprocess_test.ipynb
Normal file
82
3d-generation-pipeline/notebooks/subprocess_test.ipynb
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "4826c91d",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'2025-10-18-16-35-47'"
|
||||
]
|
||||
},
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from datetime import datetime\n",
|
||||
"\n",
|
||||
"datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "9419e692",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import subprocess\n",
|
||||
"\n",
|
||||
"# Path to the Python interpreter in the other virtual environment\n",
|
||||
"venv_python = r\"/path/to/other/venv/bin/python\" # On Windows: r\"C:\\path\\to\\other\\venv\\Scripts\\python.exe\"\n",
|
||||
"\n",
|
||||
"# Path to the .py file you want to run\n",
|
||||
"script_path = r\"/path/to/your_script.py\"\n",
|
||||
"\n",
|
||||
"# Optional: arguments to pass to the script\n",
|
||||
"args = [\"arg1\", \"arg2\"]\n",
|
||||
"\n",
|
||||
"# Build the command\n",
|
||||
"command = [venv_python, script_path] + args\n",
|
||||
"\n",
|
||||
"try:\n",
|
||||
" # Run the subprocess\n",
|
||||
" result = subprocess.run(command, capture_output=True, text=True)\n",
|
||||
"\n",
|
||||
" # Print output and errors\n",
|
||||
" print(\"STDOUT:\\n\", result.stdout)\n",
|
||||
" print(\"STDERR:\\n\", result.stderr)\n",
|
||||
" print(\"Return Code:\", result.returncode)\n",
|
||||
"\n",
|
||||
"except Exception as e:\n",
|
||||
" print(f\"Error occurred: {e}\")\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user