aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py')
-rw-r--r--.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py41
1 files changed, 41 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py b/.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py
new file mode 100644
index 00000000..d6b37356
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/huggingface_hub/inference/_providers/nebius.py
@@ -0,0 +1,41 @@
+import base64
+from typing import Any, Dict, Optional, Union
+
+from huggingface_hub.inference._common import _as_dict
+from huggingface_hub.inference._providers._common import (
+ BaseConversationalTask,
+ BaseTextGenerationTask,
+ TaskProviderHelper,
+ filter_none,
+)
+
+
+class NebiusTextGenerationTask(BaseTextGenerationTask):
+ def __init__(self):
+ super().__init__(provider="nebius", base_url="https://api.studio.nebius.ai")
+
+
+class NebiusConversationalTask(BaseConversationalTask):
+ def __init__(self):
+ super().__init__(provider="nebius", base_url="https://api.studio.nebius.ai")
+
+
+class NebiusTextToImageTask(TaskProviderHelper):
+ def __init__(self):
+ super().__init__(task="text-to-image", provider="nebius", base_url="https://api.studio.nebius.ai")
+
+ def _prepare_route(self, mapped_model: str) -> str:
+ return "/v1/images/generations"
+
+ def _prepare_payload_as_dict(self, inputs: Any, parameters: Dict, mapped_model: str) -> Optional[Dict]:
+ parameters = filter_none(parameters)
+ if "guidance_scale" in parameters:
+ parameters.pop("guidance_scale")
+ if parameters.get("response_format") not in ("b64_json", "url"):
+ parameters["response_format"] = "b64_json"
+
+ return {"prompt": inputs, **parameters, "model": mapped_model}
+
+ def get_response(self, response: Union[bytes, Dict]) -> Any:
+ response_dict = _as_dict(response)
+ return base64.b64decode(response_dict["data"][0]["b64_json"])