From 1ed33f9bbe624834c186ecf9bb265f86aa9de6a2 Mon Sep 17 00:00:00 2001 From: iHeyTang Date: Tue, 12 Aug 2025 15:33:45 +0800 Subject: [PATCH] =?UTF-8?q?REFINE=20=E9=87=8D=E6=96=B0=E8=B0=83=E6=95=B430?= =?UTF-8?q?2ai=E5=9B=BE=E5=83=8F=E7=94=9F=E6=88=90=E9=80=BB=E8=BE=91?= =?UTF-8?q?=EF=BC=8C=E4=BC=98=E5=8C=96=E4=BB=BB=E5=8A=A1=E6=9F=A5=E8=AF=A2?= =?UTF-8?q?=E5=92=8C=E7=BB=93=E6=9E=9C=E5=A4=84=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nodes/image_modal_nodes.py | 63 ++++++++++++++------------------------ 1 file changed, 23 insertions(+), 40 deletions(-) diff --git a/nodes/image_modal_nodes.py b/nodes/image_modal_nodes.py index c9c605d..7afed8a 100644 --- a/nodes/image_modal_nodes.py +++ b/nodes/image_modal_nodes.py @@ -235,56 +235,39 @@ class ModalMidJourneyGenerateImage: f'image/{format.lower()}')} else: files = None + if provider == "302ai": - job_resp = send_request("post", f"https://{endpoint}/api/union/img/sync/generate/image", - headers={'Authorization': 'Bearer bowong7777'}, - data={"prompt": prompt}, + interval = 3 + job_resp = send_request("post", f"https://{endpoint}/api/custom/image/submit/task", + data={"model_name":"302ai/mj", "prompt": prompt, "mode": "turbo"}, files=files, timeout=timeout) job_resp.raise_for_status() job_resp = job_resp.json() - if not job_resp["status"]: - raise Exception("生成失败, 可能因为风控") - result_url = job_resp["data"] - if isinstance(result_url, list): - result_list = [] - for url in result_url: - logger.success("img_url: " + url) - result_list.append(url_to_tensor(url).squeeze(0)) - result_list = torch.stack(result_list, dim=0) - return (result_list,) - logger.success("img_url: " + result_url) - return (url_to_tensor(result_url),) - elif provider == "ttapi": - interval = 3 - job_resp = send_request("post", f"https://{endpoint}/api/mj/async/generate/image?prompt={prompt}", - headers={'Authorization': 'Bearer bowong7777'}, - files=files, - timeout=150) - job_resp.raise_for_status() - job_resp = job_resp.json() if not job_resp["status"]: raise Exception("生成失败, 可能因为风控") job_id = job_resp["data"] for _ in range(0, timeout // interval, interval): - logger.info("查询结果") - resp = send_request("get", f"https://{endpoint}/api/mj/async/query/status?task_id={job_id}", - headers={'Authorization': 'Bearer bowong7777'}, timeout=30) - resp.raise_for_status() - if resp.json()["status"]: - if "fail" in resp.json()["msg"]: - raise Exception("生成失败,可能因为风控") - result_url = resp.json()["data"] - if isinstance(result_url, list): - result_list = [] - for url in result_url: - logger.success("img_url: " + url) - result_list.append(url_to_tensor(url).squeeze(0)) - result_list = torch.stack(result_list, dim=0) - return (result_list,) - logger.success("img_url: " + result_url) - return (url_to_tensor(result_url),) + logger.info("等待" + str(interval) + "秒") sleep(interval) + logger.info("查询结果") + resp = send_request("get", f"https://{endpoint}/api/custom/task/status?task_id={job_id}", timeout=30) + resp.raise_for_status() + if resp.json()["status"] == "running": + logger.info("任务正在运行") + continue + if resp.json()["status"] == "failed": + raise Exception(f"生成失败: {resp.json()['msg']}") + if resp.json()["status"] == "success": + result_url = resp.json()["data"] + if not isinstance(result_url, list): + raise Exception("生成失败,返回结果为空") + result_list = [] + for url in result_url: + logger.success("img_url: " + url) + result_list.append(url_to_tensor(url).squeeze(0)) + result_list = torch.stack(result_list, dim=0) + return (result_list,) raise Exception("等待超时") except Exception as e: raise e