about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info')
-rw-r--r--.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/INSTALLER1
-rw-r--r--.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/LICENSE21
-rw-r--r--.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/METADATA195
-rw-r--r--.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/RECORD12
-rw-r--r--.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/WHEEL4
5 files changed, 233 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/INSTALLER b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/LICENSE b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/LICENSE
new file mode 100644
index 00000000..8e3dc978
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Ollama
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/METADATA b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/METADATA
new file mode 100644
index 00000000..62376c17
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/METADATA
@@ -0,0 +1,195 @@
+Metadata-Version: 2.1
+Name: ollama
+Version: 0.3.3
+Summary: The official Python client for Ollama.
+Home-page: https://ollama.ai
+License: MIT
+Author: Ollama
+Author-email: hello@ollama.com
+Requires-Python: >=3.8,<4.0
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Dist: httpx (>=0.27.0,<0.28.0)
+Project-URL: Repository, https://github.com/jmorganca/ollama-python
+Description-Content-Type: text/markdown
+
+# Ollama Python Library
+
+The Ollama Python library provides the easiest way to integrate Python 3.8+ projects with [Ollama](https://github.com/ollama/ollama).
+
+## Install
+
+```sh
+pip install ollama
+```
+
+## Usage
+
+```python
+import ollama
+response = ollama.chat(model='llama3.1', messages=[
+  {
+    'role': 'user',
+    'content': 'Why is the sky blue?',
+  },
+])
+print(response['message']['content'])
+```
+
+## Streaming responses
+
+Response streaming can be enabled by setting `stream=True`, modifying function calls to return a Python generator where each part is an object in the stream.
+
+```python
+import ollama
+
+stream = ollama.chat(
+    model='llama3.1',
+    messages=[{'role': 'user', 'content': 'Why is the sky blue?'}],
+    stream=True,
+)
+
+for chunk in stream:
+  print(chunk['message']['content'], end='', flush=True)
+```
+
+## API
+
+The Ollama Python library's API is designed around the [Ollama REST API](https://github.com/ollama/ollama/blob/main/docs/api.md)
+
+### Chat
+
+```python
+ollama.chat(model='llama3.1', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}])
+```
+
+### Generate
+
+```python
+ollama.generate(model='llama3.1', prompt='Why is the sky blue?')
+```
+
+### List
+
+```python
+ollama.list()
+```
+
+### Show
+
+```python
+ollama.show('llama3.1')
+```
+
+### Create
+
+```python
+modelfile='''
+FROM llama3.1
+SYSTEM You are mario from super mario bros.
+'''
+
+ollama.create(model='example', modelfile=modelfile)
+```
+
+### Copy
+
+```python
+ollama.copy('llama3.1', 'user/llama3.1')
+```
+
+### Delete
+
+```python
+ollama.delete('llama3.1')
+```
+
+### Pull
+
+```python
+ollama.pull('llama3.1')
+```
+
+### Push
+
+```python
+ollama.push('user/llama3.1')
+```
+
+### Embeddings
+
+```python
+ollama.embeddings(model='llama3.1', prompt='The sky is blue because of rayleigh scattering')
+```
+
+### Ps
+
+```python
+ollama.ps()
+```
+
+## Custom client
+
+A custom client can be created with the following fields:
+
+- `host`: The Ollama host to connect to
+- `timeout`: The timeout for requests
+
+```python
+from ollama import Client
+client = Client(host='http://localhost:11434')
+response = client.chat(model='llama3.1', messages=[
+  {
+    'role': 'user',
+    'content': 'Why is the sky blue?',
+  },
+])
+```
+
+## Async client
+
+```python
+import asyncio
+from ollama import AsyncClient
+
+async def chat():
+  message = {'role': 'user', 'content': 'Why is the sky blue?'}
+  response = await AsyncClient().chat(model='llama3.1', messages=[message])
+
+asyncio.run(chat())
+```
+
+Setting `stream=True` modifies functions to return a Python asynchronous generator:
+
+```python
+import asyncio
+from ollama import AsyncClient
+
+async def chat():
+  message = {'role': 'user', 'content': 'Why is the sky blue?'}
+  async for part in await AsyncClient().chat(model='llama3.1', messages=[message], stream=True):
+    print(part['message']['content'], end='', flush=True)
+
+asyncio.run(chat())
+```
+
+## Errors
+
+Errors are raised if requests return an error status or if an error is detected while streaming.
+
+```python
+model = 'does-not-yet-exist'
+
+try:
+  ollama.chat(model)
+except ollama.ResponseError as e:
+  print('Error:', e.error)
+  if e.status_code == 404:
+    ollama.pull(model)
+```
+
diff --git a/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/RECORD b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/RECORD
new file mode 100644
index 00000000..866867e0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/RECORD
@@ -0,0 +1,12 @@
+ollama-0.3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4

+ollama-0.3.3.dist-info/LICENSE,sha256=WTTtLODRUVS825yFIDIQq6wNpDFK80CB4230WZ-QsiY,1058

+ollama-0.3.3.dist-info/METADATA,sha256=vSmAA4DG4ohgsyVM1YaY_M90Oju2eKnt1IBnRdmBVaM,3837

+ollama-0.3.3.dist-info/RECORD,,

+ollama-0.3.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88

+ollama/__init__.py,sha256=KQNRzNBhOHbJhOPiudl73pw8jKlVuvK4FASySV3SGEg,777

+ollama/__pycache__/__init__.cpython-312.pyc,,

+ollama/__pycache__/_client.cpython-312.pyc,,

+ollama/__pycache__/_types.cpython-312.pyc,,

+ollama/_client.py,sha256=y78e5KmynzPUzMuy2_AtWxac8XuyTPkZN7Q2lshpSyg,32001

+ollama/_types.py,sha256=llhZ99FmnGkCJxWAz0CKphC06t_mBocK2m_r-Zlhz7g,4249

+ollama/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

diff --git a/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/WHEEL b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/WHEEL
new file mode 100644
index 00000000..d73ccaae
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/ollama-0.3.3.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 1.9.0
+Root-Is-Purelib: true
+Tag: py3-none-any