inputs
stringlengths 312
52k
| targets
stringlengths 1
3.1k
⌀ | block_type
stringclasses 11
values | scenario
stringclasses 7
values |
---|---|---|---|
<filename>microagents/agents/agent_similarity.py<fim_prefix>import logging
import numpy as np
from typing import List, Tuple, Optional
from sklearn.metrics.pairwise import cosine_similarity
from integrations.openaiwrapper import OpenAIAPIWrapper
logger = logging.getLogger()
class Agent:
def __init__(self, purpose: str):
self.purpose = purpose
self.purpose_embedding=None
class AgentSimilarity:
def __init__(self, openai_wrapper: OpenAIAPIWrapper, agents: List[Agent]):
"""
Initializes the AgentSimilarity object.
:param openai_wrapper: Instance of OpenAIAPIWrapper to interact with OpenAI API.
:param agents: List of Agent objects.
"""
self.openai_wrapper = openai_wrapper
self.agents = agents
def get_embedding(self, text: str) -> np.ndarray:
"""
Retrieves the embedding for a given text.
:param text: Text to get embedding for.
:return: Embedding as a numpy array.
"""
try:
response = self.openai_wrapper.get_embedding(text)
if 'data' in response and len(response['data']) > 0 and 'embedding' in response['data'][0]:
return np.array(response['data'][0]['embedding'])
else:
logger.exception("Invalid response format")
raise ValueError("Invalid response format")
except<fim_suffix><fim_middle> Exception as e:
logger.exception(f"Error retrieving embedding: {e}")
raise ValueError(f"Error retrieving embedding: {e}")
|
Exception as e:
logger.exception(f"Error retrieving embedding: {e}")
raise ValueError(f"Error retrieving embedding: {e}")
|
CATCH
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/agent_persistence_manager.py<fim_prefix>from agents.agent_serializer import AgentSerializer
from integrations.memoize import memoize_to_sqlite
from integrations.sqlite_agent_persistence import SQLiteAgentPersistence
class AgentPersistenceManager:
def __init__(self, db_filename="agents.db"):
self.persistence = SQLiteAgentPersistence(db_filename)
def remove_agent(self, agent):
"""
Remove an agent from the database.
"""
self.persistence.remove_agent(agent.id)
def save_agent(self, agent):
"""
Serialize and save the agent state if it is a working agent and not a prime agent.
"""
if agent.is_working_agent() and not agent.is_prime_agent():
serialized_agent = AgentSerializer.serialize(agent)
self.persistence.save_agent(serialized_agent)
def load_agent(self, purpose, agent_lifecycle, openai_wrapper):
"""
Load an agent with the given purpose from the database.
"""
serialized_agent = self.persistence.fetch_agent(purpose)
if serialized_agent:
return AgentSerializer.from_dict(serialized_agent, agent_lifecycle, openai_wrapper)
return None
def load_all_agents(self, agent_lifecycle, openai_wrapper):
"""
Load all agents from the database.
"""
purposes = self.persistence.load_all_purposes()
agents = []
for<fim_suffix><fim_middle> purpose in purposes:
agent = self.load_agent(purpose, agent_lifecycle, openai_wrapper)
if agent:
agents.append(agent)
|
purpose in purposes:
agent = self.load_agent(purpose, agent_lifecycle, openai_wrapper)
if agent:
agents.append(agent)
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/agent_similarity.py<fim_prefix>import logging
import numpy as np
from typing import List, Tuple, Optional
from sklearn.metrics.pairwise import cosine_similarity
from integrations.openaiwrapper import OpenAIAPIWrapper
logger = logging.getLogger()
class Agent:
def __init__(self, purpose: str):
self.purpose = purpose
self.purpose_embedding=None
class AgentSimilarity:
def __init__(self, openai_wrapper: OpenAIAPIWrapper, agents: List[Agent]):
"""
Initializes the AgentSimilarity object.
:param openai_wrapper: Instance of OpenAIAPIWrapper to interact with OpenAI API.
:param agents: List of Agent objects.
"""
self.openai_wrapper = openai_wrapper
self.agents = agents
def get_embedding(self, text: str) -> np.ndarray:
"""
Retrieves the embedding for a given text.
:param text: Text to get embedding for.
:return: Embedding as a numpy array.
"""
try:
response = self.openai_wrapper.get_embedding(text)
if 'data' in response and len(response['data']) > 0 and 'embedding' in response['data'][0]:
return np.array(response['data'][0]['embedding'])
else:
logger.exception("Invalid response format")
raise ValueError("Invalid response format")
except Exception as e:
logger.exception(f"Error retrieving embedding: {e}")
raise ValueError(f"Error retrieving embedding: {e}")
def calculate_similarity_threshold(self) -> float:
"""
Calculates the 98th percentile of the similarity threshold across all agents.
:return: 98th percentile of similarity threshold.
"""
try:
embeddings=[]
for agent in self.agents:
if agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
embeddings.append(agent.purpose_embedding)
if len(embeddings) < 250:
return 0.999
similarities = [cosine_similarity([e1], [e2])[0][0] for i, e1 in enumerate(embeddings) for e2 in embeddings[i+1:]]
return np.percentile(similarities, 98) if similarities else 0.999
except Exception as e:
logger.exception(f"Error calculating similarity threshold: {e}")
raise ValueError(f"Error calculating similarity threshold: {e}")
def find_closest_agent(self, purpose_embedding: np.ndarray) -> Tuple[Optional[Agent], float]:
"""
Finds the closest agent based on the given purpose embedding.
:param purpose_embedding: The embedding of the purpose to find the closest agent for.
:return: Tuple of the closest agent and the highest similarity score.
"""
closest_agent: Optional[Agent] = None
highest_similarity: float = -np.inf
try:
for<fim_suffix><fim_middle> agent in self.agents:
if agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
similarity = cosine_similarity([agent.purpose_embedding], [purpose_embedding])[0][0]
if similarity > highest_similarity:
highest_similarity = similarity
closest_agent = agent
|
agent in self.agents:
if agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
similarity = cosine_similarity([agent.purpose_embedding], [purpose_embedding])[0][0]
if similarity > highest_similarity:
highest_similarity = similarity
closest_agent = agent
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/microagent.py<fim_prefix>import logging
import uuid
from integrations.openaiwrapper import OpenAIAPIWrapper
from agents.agent_evaluation import AgentEvaluator
from agents.agent_response import AgentResponse
from agents.agent_similarity import AgentSimilarity
from agents.response_extraction import ResponseExtraction
from agents.agent_stopped_exception import AgentStoppedException
from agents.response_handler import ResponseHandler
from runtime.code_execution import CodeExecution
from prompt_management.prompt_evolution import PromptEvolution
from utils.utility import get_env_variable, time_function, log_exception
logger = logging.getLogger()
class MicroAgent:
"""
The MicroAgent class encapsulates the behavior of a small, purpose-driven agent
that interacts with the OpenAI API.
"""
def __init__(self, initial_prompt, purpose, depth, agent_lifecycle, openai_wrapper, max_depth=3, bootstrap_agent=False, is_prime=False, purpose_embedding=None, parent=None, parent_id=None, id=None) :
self.dynamic_prompt = initial_prompt
self.purpose = purpose
self.purpose_embedding = purpose_embedding
self.depth = depth
self.max_depth = max_depth
self.usage_count = 0
self.working_agent = bootstrap_agent
self.agent_lifecycle = agent_lifecycle
self.openai_wrapper = openai_wrapper
self.evolve_count = 0
self.number_of_code_executions = 0
self.current_status = None
self.active_agents = {}
self.last_input = ""
self.last_output = ""
self.last_conversation = ""
self.stopped = False
self.is_prime = is_prime
self.stop_execution = False
if parent:
self.parent_id = parent.id if parent else None
else:
self.parent_id = None
if parent_id:
self.parent_id = parent_id
if<fim_suffix><fim_middle> is_prime:
self.id = "2a5e6fe9-1bb1-426c-9521-145caa2cf66b"
else:
if id:
self.id = id
else:
self.id = str(uuid.uuid4())
|
is_prime:
self.id = "2a5e6fe9-1bb1-426c-9521-145caa2cf66b"
else:
if id:
self.id = id
else:
self.id = str(uuid.uuid4())
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/microagent.py<fim_prefix>import logging
import uuid
from integrations.openaiwrapper import OpenAIAPIWrapper
from agents.agent_evaluation import AgentEvaluator
from agents.agent_response import AgentResponse
from agents.agent_similarity import AgentSimilarity
from agents.response_extraction import ResponseExtraction
from agents.agent_stopped_exception import AgentStoppedException
from agents.response_handler import ResponseHandler
from runtime.code_execution import CodeExecution
from prompt_management.prompt_evolution import PromptEvolution
from utils.utility import get_env_variable, time_function, log_exception
logger = logging.getLogger()
class MicroAgent:
"""
The MicroAgent class encapsulates the behavior of a small, purpose-driven agent
that interacts with the OpenAI API.
"""
def __init__(self, initial_prompt, purpose, depth, agent_lifecycle, openai_wrapper, max_depth=3, bootstrap_agent=False, is_prime=False, purpose_embedding=None, parent=None, parent_id=None, id=None) :
self.dynamic_prompt = initial_prompt
self.purpose = purpose
self.purpose_embedding = purpose_embedding
self.depth = depth
self.max_depth = max_depth
self.usage_count = 0
self.working_agent = bootstrap_agent
self.agent_lifecycle = agent_lifecycle
self.openai_wrapper = openai_wrapper
self.evolve_count = 0
self.number_of_code_executions = 0
self.current_status = None
self.active_agents = {}
self.last_input = ""
self.last_output = ""
self.last_conversation = ""
self.stopped = False
self.is_prime = is_prime
self.stop_execution = False
if parent:
self.parent_id = parent.id if parent else None
else:
self.parent_id = None
if parent_id:
self.parent_id = parent_id
if is_prime:
self.id = "2a5e6fe9-1bb1-426c-9521-145caa2cf66b"
else:
if<fim_suffix><fim_middle> id:
self.id = id
else:
self.id = str(uuid.uuid4())
|
id:
self.id = id
else:
self.id = str(uuid.uuid4())
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/agent_similarity.py<fim_prefix>import logging
import numpy as np
from typing import List, Tuple, Optional
from sklearn.metrics.pairwise import cosine_similarity
from integrations.openaiwrapper import OpenAIAPIWrapper
logger = logging.getLogger()
class Agent:
def __init__(self, purpose: str):
self.purpose = purpose
self.purpose_embedding=None
class AgentSimilarity:
def __init__(self, openai_wrapper: OpenAIAPIWrapper, agents: List[Agent]):
"""
Initializes the AgentSimilarity object.
:param openai_wrapper: Instance of OpenAIAPIWrapper to interact with OpenAI API.
:param agents: List of Agent objects.
"""
self.openai_wrapper = openai_wrapper
self.agents = agents
def get_embedding(self, text: str) -> np.ndarray:
"""
Retrieves the embedding for a given text.
:param text: Text to get embedding for.
:return: Embedding as a numpy array.
"""
try:
response = self.openai_wrapper.get_embedding(text)
if 'data' in response and len(response['data']) > 0 and 'embedding' in response['data'][0]:
return np.array(response['data'][0]['embedding'])
else:
logger.exception("Invalid response format")
raise ValueError("Invalid response format")
except Exception as e:
logger.exception(f"Error retrieving embedding: {e}")
raise ValueError(f"Error retrieving embedding: {e}")
def calculate_similarity_threshold(self) -> float:
"""
Calculates the 98th percentile of the similarity threshold across all agents.
:return: 98th percentile of similarity threshold.
"""
try:
embeddings=[]
for agent in self.agents:
if agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
embeddings.append(agent.purpose_embedding)
if len(embeddings) < 250:
return 0.999
similarities = [cosine_similarity([e1], [e2])[0][0] for i, e1 in enumerate(embeddings) for e2 in embeddings[i+1:]]
return np.percentile(similarities, 98) if similarities else 0.999
except Exception as e:
logger.exception(f"Error calculating similarity threshold: {e}")
raise ValueError(f"Error calculating similarity threshold: {e}")
def find_closest_agent(self, purpose_embedding: np.ndarray) -> Tuple[Optional[Agent], float]:
"""
Finds the closest agent based on the given purpose embedding.
:param purpose_embedding: The embedding of the purpose to find the closest agent for.
:return: Tuple of the closest agent and the highest similarity score.
"""
closest_agent: Optional[Agent] = None
highest_similarity: float = -np.inf
try:
for agent in self.agents:
if agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
similarity = cosine_similarity([agent.purpose_embedding], [purpose_embedding])[0][0]
if<fim_suffix><fim_middle> similarity > highest_similarity:
highest_similarity = similarity
closest_agent = agent
|
similarity > highest_similarity:
highest_similarity = similarity
closest_agent = agent
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/agent_similarity.py<fim_prefix>import logging
import numpy as np
from typing import List, Tuple, Optional
from sklearn.metrics.pairwise import cosine_similarity
from integrations.openaiwrapper import OpenAIAPIWrapper
logger = logging.getLogger()
class Agent:
def __init__(self, purpose: str):
self.purpose = purpose
self.purpose_embedding=None
class AgentSimilarity:
def __init__(self, openai_wrapper: OpenAIAPIWrapper, agents: List[Agent]):
"""
Initializes the AgentSimilarity object.
:param openai_wrapper: Instance of OpenAIAPIWrapper to interact with OpenAI API.
:param agents: List of Agent objects.
"""
self.openai_wrapper = openai_wrapper
self.agents = agents
def get_embedding(self, text: str) -> np.ndarray:
"""
Retrieves the embedding for a given text.
:param text: Text to get embedding for.
:return: Embedding as a numpy array.
"""
try:
response = self.openai_wrapper.get_embedding(text)
if 'data' in response and len(response['data']) > 0 and 'embedding' in response['data'][0]:
return np.array(response['data'][0]['embedding'])
else:
logger.exception("Invalid response format")
raise ValueError("Invalid response format")
except Exception as e:
logger.exception(f"Error retrieving embedding: {e}")
raise ValueError(f"Error retrieving embedding: {e}")
def calculate_similarity_threshold(self) -> float:
"""
Calculates the 98th percentile of the similarity threshold across all agents.
:return: 98th percentile of similarity threshold.
"""
try:
embeddings=[]
for agent in self.agents:
if agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
embeddings.append(agent.purpose_embedding)
if len(embeddings) < 250:
return 0.999
similarities = [cosine_similarity([e1], [e2])[0][0] for i, e1 in enumerate(embeddings) for e2 in embeddings[i+1:]]
return np.percentile(similarities, 98) if similarities else 0.999
except Exception as e:
logger.exception(f"Error calculating similarity threshold: {e}")
raise ValueError(f"Error calculating similarity threshold: {e}")
def find_closest_agent(self, purpose_embedding: np.ndarray) -> Tuple[Optional[Agent], float]:
"""
Finds the closest agent based on the given purpose embedding.
:param purpose_embedding: The embedding of the purpose to find the closest agent for.
:return: Tuple of the closest agent and the highest similarity score.
"""
closest_agent: Optional[Agent] = None
highest_similarity: float = -np.inf
try:
for agent in self.agents:
if<fim_suffix><fim_middle> agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
|
agent.purpose_embedding is None:
agent.purpose_embedding = self.get_embedding(agent.purpose)
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/agent_similarity.py<fim_prefix>import logging
import numpy as np
from typing import List, Tuple, Optional
from sklearn.metrics.pairwise import cosine_similarity
from integrations.openaiwrapper import OpenAIAPIWrapper
logger = logging.getLogger()
class Agent:
def __init__(self, purpose: str):
self.purpose = purpose
self.purpose_embedding=None
class AgentSimilarity:
def __init__(self, openai_wrapper: OpenAIAPIWrapper, agents: List[Agent]):
"""
Initializes the AgentSimilarity object.
:param openai_wrapper: Instance of OpenAIAPIWrapper to interact with OpenAI API.
:param agents: List of Agent objects.
"""
self.openai_wrapper = openai_wrapper
self.agents = agents
def get_embedding(self, text: str) -> np.ndarray:
"""
Retrieves the embedding for a given text.
:param text: Text to get embedding for.
:return: Embedding as a numpy array.
"""
try:
response = self.openai_wrapper.get_embedding(text)
if<fim_suffix><fim_middle> 'data' in response and len(response['data']) > 0 and 'embedding' in response['data'][0]:
return np.array(response['data'][0]['embedding'])
else:
logger.exception("Invalid response format")
raise ValueError("Invalid response format")
|
'data' in response and len(response['data']) > 0 and 'embedding' in response['data'][0]:
return np.array(response['data'][0]['embedding'])
else:
logger.exception("Invalid response format")
raise ValueError("Invalid response format")
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/integrations/memoize.py<fim_prefix>import sqlite3
import hashlib
import json
import functools
## Originally from https://www.kevinkatz.io/posts/memoize-to-sqlite
def memoize_to_sqlite(func_name: str, filename: str = "cache.db"):
"""
Memoization decorator that caches the output of a method in a SQLite
database.
"""
def decorator(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
return wrapped
return decorator
class SQLiteMemoization:
def __init__(self, filename):
self.filename = filename
self.connection = None
def __enter__(self):
self.connection = sqlite3.connect(self.filename)
self._initialize_database()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.connection.close()
self.connection = None
def _initialize_database(self):
self.connection.execute(
"CREATE TABLE IF NOT EXISTS cache (hash TEXT PRIMARY KEY, result TEXT)"
)
self.connection.execute(
"CREATE INDEX IF NOT EXISTS cache_ndx ON cache(hash)"
)
def fetch_or_compute(self, func, func_name, *args, **kwargs):
arg_hash = self._compute_hash(func_name, *args, **kwargs)
result = self._fetch_from_cache(arg_hash)
if<fim_suffix><fim_middle> result is not None:
return result
|
result is not None:
return result
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/agent_persistence_manager.py<fim_prefix>from agents.agent_serializer import AgentSerializer
from integrations.memoize import memoize_to_sqlite
from integrations.sqlite_agent_persistence import SQLiteAgentPersistence
class AgentPersistenceManager:
def __init__(self, db_filename="agents.db"):
self.persistence = SQLiteAgentPersistence(db_filename)
def remove_agent(self, agent):
"""
Remove an agent from the database.
"""
self.persistence.remove_agent(agent.id)
def save_agent(self, agent):
"""
Serialize and save the agent state if it is a working agent and not a prime agent.
"""
if agent.is_working_agent() and not agent.is_prime_agent():
serialized_agent = AgentSerializer.serialize(agent)
self.persistence.save_agent(serialized_agent)
def load_agent(self, purpose, agent_lifecycle, openai_wrapper):
"""
Load an agent with the given purpose from the database.
"""
serialized_agent = self.persistence.fetch_agent(purpose)
if serialized_agent:
return AgentSerializer.from_dict(serialized_agent, agent_lifecycle, openai_wrapper)
return None
def load_all_agents(self, agent_lifecycle, openai_wrapper):
"""
Load all agents from the database.
"""
purposes = self.persistence.load_all_purposes()
agents = []
for purpose in purposes:
agent = self.load_agent(purpose, agent_lifecycle, openai_wrapper)
if<fim_suffix><fim_middle> agent:
agents.append(agent)
|
agent:
agents.append(agent)
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/microagent.py<fim_prefix>import logging
import uuid
from integrations.openaiwrapper import OpenAIAPIWrapper
from agents.agent_evaluation import AgentEvaluator
from agents.agent_response import AgentResponse
from agents.agent_similarity import AgentSimilarity
from agents.response_extraction import ResponseExtraction
from agents.agent_stopped_exception import AgentStoppedException
from agents.response_handler import ResponseHandler
from runtime.code_execution import CodeExecution
from prompt_management.prompt_evolution import PromptEvolution
from utils.utility import get_env_variable, time_function, log_exception
logger = logging.getLogger()
class MicroAgent:
"""
The MicroAgent class encapsulates the behavior of a small, purpose-driven agent
that interacts with the OpenAI API.
"""
def __init__(self, initial_prompt, purpose, depth, agent_lifecycle, openai_wrapper, max_depth=3, bootstrap_agent=False, is_prime=False, purpose_embedding=None, parent=None, parent_id=None, id=None) :
self.dynamic_prompt = initial_prompt
self.purpose = purpose
self.purpose_embedding = purpose_embedding
self.depth = depth
self.max_depth = max_depth
self.usage_count = 0
self.working_agent = bootstrap_agent
self.agent_lifecycle = agent_lifecycle
self.openai_wrapper = openai_wrapper
self.evolve_count = 0
self.number_of_code_executions = 0
self.current_status = None
self.active_agents = {}
self.last_input = ""
self.last_output = ""
self.last_conversation = ""
self.stopped = False
self.is_prime = is_prime
self.stop_execution = False
if<fim_suffix><fim_middle> parent:
self.parent_id = parent.id if parent else None
else:
self.parent_id = None
|
parent:
self.parent_id = parent.id if parent else None
else:
self.parent_id = None
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/integrations/memoize.py<fim_prefix>import sqlite3
import hashlib
import json
import functools
## Originally from https://www.kevinkatz.io/posts/memoize-to-sqlite
def memoize_to_sqlite(func_name: str, filename: str = "cache.db"):
"""
Memoization decorator that caches the output of a method in a SQLite
database.
"""
def<fim_suffix><fim_middle> decorator(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
return wrapped
|
decorator(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
return wrapped
|
METHOD
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/integrations/memoize.py<fim_prefix>import sqlite3
import hashlib
import json
import functools
## Originally from https://www.kevinkatz.io/posts/memoize-to-sqlite
def memoize_to_sqlite(func_name: str, filename: str = "cache.db"):
"""
Memoization decorator that caches the output of a method in a SQLite
database.
"""
def decorator(func):
@functools.wraps(func)
def<fim_suffix><fim_middle> wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
|
wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
|
METHOD
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/integrations/memoize.py<fim_prefix>import sqlite3
import hashlib
import json
import functools
## Originally from https://www.kevinkatz.io/posts/memoize-to-sqlite
def memoize_to_sqlite(func_name: str, filename: str = "cache.db"):
"""
Memoization decorator that caches the output of a method in a SQLite
database.
"""
def decorator(func):
@functools.wraps(func)
def<fim_suffix><fim_middle> wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
|
wrapped(*args, **kwargs):
with SQLiteMemoization(filename) as memoizer:
return memoizer.fetch_or_compute(func, func_name, *args, **kwargs)
|
METHOD
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/integrations/sqlite_agent_persistence.py<fim_prefix>import sqlite3
import json
from integrations.agent_persistence import AbstractAgentPersistence
class SQLiteAgentPersistence(AbstractAgentPersistence):
def __init__(self, filename="agents.db"):
self.filename = filename
self._initialize_database()
def _initialize_database(self):
"""
Initialize the SQLite database with the required schema.
"""
with sqlite3.connect(self.filename) as conn:
conn.execute("""
CREATE TABLE IF NOT EXISTS agents (
id TEXT PRIMARY KEY,
purpose TEXT,
data TEXT
)
""")
def remove_agent(self, purpose):
"""
Remove an agent from the SQLite database.
"""
with sqlite3.connect(self.filename) as conn:
conn.execute("DELETE FROM agents WHERE id = ?", (purpose,))
def save_agent(self, agent_dict):
"""
Save the serialized agent to an SQLite database.
"""
with sqlite3.connect(self.filename) as conn:
conn.execute(
#<fim_suffix><fim_middle> add id field
|
add id field
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microagents/agents/microagent.py<fim_prefix>import logging
import uuid
from integrations.openaiwrapper import OpenAIAPIWrapper
from agents.agent_evaluation import AgentEvaluator
from agents.agent_response import AgentResponse
from agents.agent_similarity import AgentSimilarity
from agents.response_extraction import ResponseExtraction
from agents.agent_stopped_exception import AgentStoppedException
from agents.response_handler import ResponseHandler
from runtime.code_execution import CodeExecution
from prompt_management.prompt_evolution import PromptEvolution
from utils.utility import get_env_variable, time_function, log_exception
logger = logging.getLogger()
class MicroAgent:
"""
The MicroAgent class encapsulates the behavior of a small, purpose-driven agent
that interacts with the OpenAI API.
"""
def __init__(self, initial_prompt, purpose, depth, agent_lifecycle, openai_wrapper, max_depth=3, bootstrap_agent=False, is_prime=False, purpose_embedding=None, parent=None, parent_id=None, id=None) :
self.dynamic_prompt = initial_prompt
self.purpose = purpose
self.purpose_embedding = purpose_embedding
self.depth = depth
self.max_depth = max_depth
self.usage_count = 0
self.working_agent = bootstrap_agent
self.agent_lifecycle = agent_lifecycle
self.openai_wrapper = openai_wrapper
self.evolve_count = 0
self.number_of_code_executions = 0
self.current_status = None
self.active_agents = {}
self.last_input = ""
self.last_output = ""
self.last_conversation = ""
self.stopped = False
self.is_prime = is_prime
self.stop_execution = False
if parent:
self.parent_id = parent.id if parent else None
else:
self.parent_id = None
if parent_id:
self.parent_id = parent_id
if is_prime:
self.id = "2a5e6fe9-1bb1-426c-9521-145caa2cf66b"
else:
if id:
self.id = id
else:
self.id = str(uuid.uuid4())
#<fim_suffix><fim_middle> Initialize components used by the agent
|
Initialize components used by the agent
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url]<fim_suffix><fim_middle> = idf_score * numerator / denominator
|
= idf_score * numerator / denominator
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url]<fim_suffix><fim_middle> += score
|
+= score
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score<fim_suffix><fim_middle> = self.idf(kw)
|
= self.idf(kw)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index:<fim_suffix><fim_middle> dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
|
dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
return result
def search(self, query: str) -> dict[str, float]:
keywords = normalize_string(query).split(" ")
url_scores: dict[str, float] = {}
for kw in keywords:
kw_urls_score = self.bm25(kw)
url_scores = update_url_scores(url_scores, kw_urls_score)
return url_scores
def index(self, url: str, content: str) -> None:
self._documents[url]<fim_suffix><fim_middle> = content
|
= content
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator<fim_suffix><fim_middle> = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
|
= freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result<fim_suffix><fim_middle> = {}
|
= {}
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces<fim_suffix><fim_middle> = " ".join(string_without_punc.split())
|
= " ".join(string_without_punc.split())
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
return result
def search(self, query: str) -> dict[str, float]:
keywords = normalize_string(query).split(" ")
url_scores: dict[str, float] = {}
for kw in keywords:
kw_urls_score = self.bm25(kw)
url_scores = update_url_scores(url_scores, kw_urls_score)
return<fim_suffix><fim_middle> url_scores
|
url_scores
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw<fim_suffix><fim_middle> = len(self.get_urls(kw))
|
= len(self.get_urls(kw))
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
return result
def search(self, query: str) -> dict[str, float]:
keywords = normalize_string(query).split(" ")
url_scores: dict[str, float] = {}
for kw in keywords:
kw_urls_score = self.bm25(kw)
url_scores = update_url_scores(url_scores, kw_urls_score)
return url_scores
def index(self, url: str, content: str) -> None:
self._documents[url] = content
words = normalize_string(content).split(" ")
for<fim_suffix><fim_middle> word in words:
self._index[word][url] += 1
|
word in words:
self._index[word][url] += 1
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for<fim_suffix><fim_middle> url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
|
url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
return result
def search(self, query: str) -> dict[str, float]:
keywords = normalize_string(query).split(" ")
url_scores: dict[str, float] = {}
for<fim_suffix><fim_middle> kw in keywords:
kw_urls_score = self.bm25(kw)
url_scores = update_url_scores(url_scores, kw_urls_score)
|
kw in keywords:
kw_urls_score = self.bm25(kw)
url_scores = update_url_scores(url_scores, kw_urls_score)
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for<fim_suffix><fim_middle> url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
|
url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if url in old:
old[url] += score
else:
old[url] = score
return old
def normalize_string(input_string: str) -> str:
translation_table = str.maketrans(string.punctuation, " " * len(string.punctuation))
string_without_punc = input_string.translate(translation_table)
string_without_double_spaces = " ".join(string_without_punc.split())
return string_without_double_spaces.lower()
class SearchEngine:
def __init__(self, k1: float = 1.5, b: float = 0.75):
self._index: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
self._documents: dict[str, str] = {}
self.k1 = k1
self.b = b
@property
def posts(self) -> list[str]:
return list(self._documents.keys())
@property
def number_of_documents(self) -> int:
return len(self._documents)
@property
def avdl(self) -> float:
if not hasattr(self, "_avdl"):
self._avdl = sum(len(d) for d in self._documents.values()) / len(self._documents)
return self._avdl
def idf(self, kw: str) -> float:
N = self.number_of_documents
n_kw = len(self.get_urls(kw))
return log((N - n_kw + 0.5) / (n_kw + 0.5) + 1)
def bm25(self, kw: str) -> dict[str, float]:
result = {}
idf_score = self.idf(kw)
avdl = self.avdl
for url, freq in self.get_urls(kw).items():
numerator = freq * (self.k1 + 1)
denominator = freq + self.k1 * (
1 - self.b + self.b * len(self._documents[url]) / avdl
)
result[url] = idf_score * numerator / denominator
return result
def search(self, query: str) -> dict[str, float]:
keywords = normalize_string(query).split(" ")
url_scores: dict[str, float] = {}
for kw in keywords:
kw_urls_score = self.bm25(kw)
url_scores = update_url_scores(url_scores, kw_urls_score)
return url_scores
def index(self, url: str, content: str) -> None:
self._documents[url] = content
words = normalize_string(content).split(" ")
for word in words:
self._index[word][url] += 1
if hasattr(self, "_avdl"):
del self._avdl
def bulk_index(self, documents: list[tuple[str, str]]):
for<fim_suffix><fim_middle> url, content in documents:
self.index(url, content)
|
url, content in documents:
self.index(url, content)
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>microsearch/src/microsearch/engine.py<fim_prefix>from collections import defaultdict
from math import log
import string
def update_url_scores(old: dict[str, float], new: dict[str, float]):
for url, score in new.items():
if<fim_suffix><fim_middle> url in old:
old[url] += score
else:
old[url] = score
|
url in old:
old[url] += score
else:
old[url] = score
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
except Exception as e:
logging.error(e)
self.num_digits = 0
def check_date(self):
if "/" in self.text or "-" in self.text:
text = self.text.replace("/", "-")
date_patterns = [
"%b-%d",
"%B-%d",
"%B-%d-%y",
"%B-%d-%Y",
"%b-%d-%Y",
"%b-%d-%y",
"%m-%d",
"%m-%d-%y",
"%m-%d-%Y",
]
for pat in date_patterns:
try:
datetime.datetime.strptime(text, pat)
self.is_date_entry = True
return
except ValueError:
pass
else:
self.is_date_entry = False
def check_numeric(self):
word = self.text.lower()
if not word.isalpha():
if word.isprintable():
if not word.isnumeric():
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
if word.startswith("-"):
self.is_negative = True
word = word[1:]
if word.startswith("$"):
self.is_dollar = True
word = word[1:]
elif word.endswith("$"):
self.is_dollar = True
word = word[0:-1]
elif word.endswith("%"):
self.is_percent = True
word = word[0:-1]
elif word.endswith("m"):
self.is_million = True
elif word.endswith("bn"):
self.is_billion = True
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
word = word.replace(",", "")
if word.isnumeric() or word.replace(".", "", 1).isnumeric():
self.is_number = True
parts = word.split("-")
if (
len(parts) == 2
and parts[0].isnumeric()
and parts[1].isnumeric()
):
self.is_number_range = True
self.parts = parts
else:
self.is_number = True
if self.is_number:
numeric_part = word
return numeric_part
class Line:
def __init__(
self,
line_str,
text_list=[],
style_dict={},
page_details={},
noun_chunk_ending_tokens=[],
):
self.text = line_str.strip()
self.visual_line = VisualLine(text_list, style_dict, page_details)
self.words = []
self.is_independent = False
self.is_header = False
self.is_header_without_comma = False
self.noun_chunks = []
self.quoted_words = quote_pattern.findall(self.text)
self.noun_chunk_ending_tokens = {x.lower() for x in noun_chunk_ending_tokens}
self.parse_line()
def check_header(self):
# Section X, Article Y, Note 1 etc.
first_word_header = self.first_word.lower() in ["section", "article", "note"]
# If there are a certain percentage of title words (first letter capitalize)
title_ratio = (
self.title_word_count / self.eff_word_count
if self.eff_word_count > 0
else 1.0
)
# print(self.title_word_count, self.eff_word_count, title_ratio)
# Section 1 is a header but Section 1: Hello 3 is not
has_enough_titles = title_ratio > 0.9 and self.eff_word_count < 10
has_header_structure = (
(first_word_header or has_enough_titles) and self.number_count == 1
) or self.numbered_line or self.text.isupper()
# has_header_structure = has_header_structure and self.eff_word_count <
last_word_number = (
self.last_word.lower() in units
or self.last_word_number
and not has_header_structure
)
last_word_date = self.last_word_date and not has_header_structure
# Find lines ending with sentence delimiter. But exclude text like "L.P."
ends_with_delim = ends_with_sentence_delimiter_pattern.search(self.text) is not None
sentence_structure = self.ends_with_period and not (
has_header_structure and title_ratio > 0.9
) and ends_with_delim
last_letter_is_punctuation = (
self.last_word[-1] in punctuations and self.last_word[-1] not in ":?.)]%" and
ends_with_delim
)
self.is_header_without_comma = (
not sentence_structure
and not self.has_list_char
and not self.first_char in footnote_types
and has_enough_titles
and not last_word_number
and (
self.number_count == 0
or (has_header_structure and self.number_count <= 1)
)
and not self.has_continuing_chars
and not last_word_date
and self.first_word_title
and not self.last_word_is_stop_word
and not self.is_zipcode_or_po
and not last_letter_is_punctuation
and not "://" in self.text # url pattern
)
self.is_header = self.is_header_without_comma and \
((not self.text.count(',') > 1) if not self.text.lower().startswith('section') else True)
def check_ends_with_period(self):
# punct_rule = self.last_char in string.punctuation and self.last_char not in [':', '.']
last_word_is_title = self.last_word in ["Mr.", "Dr.", "Mrs."]
self.ends_with_period = self.last_char in ["."] and not last_word_is_title
def check_table_row(self):
if not self.is_header:
value_count = (
self.number_count
+ self.dollar_count
+ self.pct_count
+ self.text.count(" - ")
)
word_symbols = self.word_count - self.dollar_sign_count
if word_symbols == 0:
word_symbols = 1
word_ratio = (
value_count + self.title_word_count + self.date_entry_count
) / word_symbols
self.is_table_row = (
(
(value_count > 0 or self.date_entry_count > 0)
and word_ratio > 0.7
and not self.ends_with_period
and not self.is_zipcode_or_po
)
and not self.last_word_is_stop_word
or ("...." in self.text)
)
else:
self.is_table_row = False
def check_list_item(self):
text = self.text.strip()
self.has_list_char = text[0] in list_types.keys()
# if not self.has_list_char and text[0] in ambiguous_list_chars:
# self.has_list_char = text[1:].strip()[0].isalpha()
self.is_list_item = self.has_list_char and self.first_word[-1] not in ":?.)]%$"
if self.is_list_item:
self.list_type<fim_suffix><fim_middle> = list_types[text[0]]
|
= list_types[text[0]]
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)<fim_suffix><fim_middle>
| null |
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block<fim_suffix><fim_middle> = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
|
= {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length<fim_suffix><fim_middle> = len(self.text)
|
= len(self.text)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n<fim_suffix><fim_middle> = self.check_numeric()
|
= self.check_numeric()
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text<fim_suffix><fim_middle> = token
|
= token
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents<fim_suffix><fim_middle> = nltk_tokenzier.tokenize(modified_text)
|
= nltk_tokenzier.tokenize(modified_text)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
except Exception as e:
logging.error(e)
self.num_digits = 0
def check_date(self):
if "/" in self.text or "-" in self.text:
text = self.text.replace("/", "-")
date_patterns = [
"%b-%d",
"%B-%d",
"%B-%d-%y",
"%B-%d-%Y",
"%b-%d-%Y",
"%b-%d-%y",
"%m-%d",
"%m-%d-%y",
"%m-%d-%Y",
]
for pat in date_patterns:
try:
datetime.datetime.strptime(text, pat)
self.is_date_entry = True
return
except ValueError:
pass
else:
self.is_date_entry = False
def check_numeric(self):
word = self.text.lower()
if not word.isalpha():
if word.isprintable():
if not word.isnumeric():
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
if word.startswith("-"):
self.is_negative = True
word = word[1:]
if word.startswith("$"):
self.is_dollar = True
word = word[1:]
elif word.endswith("$"):
self.is_dollar = True
word = word[0:-1]
elif word.endswith("%"):
self.is_percent = True
word = word[0:-1]
elif word.endswith("m"):
self.is_million = True
elif word.endswith("bn"):
self.is_billion = True
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
word = word.replace(",", "")
if word.isnumeric() or word.replace(".", "", 1).isnumeric():
self.is_number = True
parts = word.split("-")
if (
len(parts) == 2
and parts[0].isnumeric()
and parts[1].isnumeric()
):
self.is_number_range = True
self.parts = parts
else:
self.is_number = True
if self.is_number:
numeric_part = word
return numeric_part
class Line:
def __init__(
self,
line_str,
text_list=[],
style_dict={},
page_details={},
noun_chunk_ending_tokens=[],
):
self.text = line_str.strip()
self.visual_line = VisualLine(text_list, style_dict, page_details)
self.words = []
self.is_independent = False
self.is_header = False
self.is_header_without_comma = False
self.noun_chunks = []
self.quoted_words = quote_pattern.findall(self.text)
self.noun_chunk_ending_tokens = {x.lower() for x in noun_chunk_ending_tokens}
self.parse_line()
def check_header(self):
# Section X, Article Y, Note 1 etc.
first_word_header = self.first_word.lower() in ["section", "article", "note"]
# If there are a certain percentage of title words (first letter capitalize)
title_ratio = (
self.title_word_count / self.eff_word_count
if self.eff_word_count > 0
else 1.0
)
# print(self.title_word_count, self.eff_word_count, title_ratio)
# Section 1 is a header but Section 1: Hello 3 is not
has_enough_titles = title_ratio > 0.9 and self.eff_word_count < 10
has_header_structure = (
(first_word_header or has_enough_titles) and self.number_count == 1
) or self.numbered_line or self.text.isupper()
# has_header_structure = has_header_structure and self.eff_word_count <
last_word_number = (
self.last_word.lower() in units
or self.last_word_number
and not has_header_structure
)
last_word_date = self.last_word_date and not has_header_structure
# Find lines ending with sentence delimiter. But exclude text like "L.P."
ends_with_delim = ends_with_sentence_delimiter_pattern.search(self.text) is not None
sentence_structure = self.ends_with_period and not (
has_header_structure and title_ratio > 0.9
) and ends_with_delim
last_letter_is_punctuation = (
self.last_word[-1] in punctuations and self.last_word[-1] not in ":?.)]%" and
ends_with_delim
)
self.is_header_without_comma = (
not sentence_structure
and not self.has_list_char
and not self.first_char in footnote_types
and has_enough_titles
and not last_word_number
and (
self.number_count == 0
or (has_header_structure and self.number_count <= 1)
)
and not self.has_continuing_chars
and not last_word_date
and self.first_word_title
and not self.last_word_is_stop_word
and not self.is_zipcode_or_po
and not last_letter_is_punctuation
and not "://" in self.text # url pattern
)
self.is_header = self.is_header_without_comma and \
((not self.text.count(',') > 1) if not self.text.lower().startswith('section') else True)
def check_ends_with_period(self):
# punct_rule = self.last_char in string.punctuation and self.last_char not in [':', '.']
last_word_is_title = self.last_word in ["Mr.", "Dr.", "Mrs."]
self.ends_with_period = self.last_char in ["."] and not last_word_is_title
def check_table_row(self):
if not self.is_header:
value_count = (
self.number_count
+ self.dollar_count
+ self.pct_count
+ self.text.count(" - ")
)
word_symbols = self.word_count - self.dollar_sign_count
if word_symbols == 0:
word_symbols = 1
word_ratio = (
value_count + self.title_word_count + self.date_entry_count
) / word_symbols
self.is_table_row = (
(
(value_count > 0 or self.date_entry_count > 0)
and word_ratio > 0.7
and not self.ends_with_period
and not self.is_zipcode_or_po
)
and not self.last_word_is_stop_word
or ("...." in self.text)
)
else:
self.is_table_row<fim_suffix><fim_middle> = False
|
= False
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/spell_utils.py<fim_prefix>import logging
import os
import string
from symspellpy.symspellpy import SymSpell
from symspellpy.symspellpy import Verbosity
import nlm_ingestor.ingestor as ingestor
from nlm_ingestor.ingestor import patterns
logger = logging.getLogger(__name__)
class SpellUtil:
def __init__(self):
self.sym_spell = SymSpell(2, 7)
dictionary_path = os.path.join(
os.path.dirname(os.path.abspath(ingestor.__file__)),
"../ingestor_models/symspell/frequency_dictionary_en_82_765.txt",
)
bigram_path = os.path.join(
os.path.dirname(os.path.abspath(ingestor.__file__)),
"../ingestor_models/symspell/frequency_dictionary_en_82_765.txt",
)
if not self.sym_spell.load_dictionary(
dictionary_path, term_index=0, count_index=1,
):
logging.error(f"Dictionary file not found: {dictionary_path}")
return
if not self.sym_spell.load_bigram_dictionary(
bigram_path, term_index=0, count_index=2,
):
logger.error(f"Bigram dictionary file not found: {bigram_path}")
return
def lookup_word(self, input_term):
max_edit_distance_lookup = 2
suggestion_verbosity = Verbosity.CLOSEST
# ignore_token = None
ignore_token = "|".join(patterns.spell_check)
suggestions = self.sym_spell.lookup(
input_term,
suggestion_verbosity,
max_edit_distance_lookup,
transfer_casing=False,
ignore_token=ignore_token,
)
# print(suggestions)
# for suggestion in suggestions:
# print("{}, {}, {}".format(suggestion.term, suggestion.distance,
# suggestion.count))
if len(suggestions) > 0:
return suggestions[0].term
else:
return input_term
# def lookup_sentence(self, input_term):
def lookup_compound(self, input_term):
max_edit_distance_lookup = 2
suggestions = self.sym_spell.lookup_compound(
input_term,
max_edit_distance_lookup,
transfer_casing=True,
ignore_non_words=True,
)
# for suggestion in suggestions:
# print("{}, {}, {}".format(suggestion.term, suggestion.distance,
# suggestion.count))
if len(suggestions) > 0:
return suggestions[0].term
else:
return input_term
def segment(self, input_term):
is_mixed_case_term = not input_term.islower()
if is_mixed_case_term:
input_term = input_term.lower()
suggestion<fim_suffix><fim_middle> = self.sym_spell.word_segmentation(input_term)
|
= self.sym_spell.word_segmentation(input_term)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
if len(sents) >= 2 and re.match(r"^.\.$", sents[0]):
sents[1] = sents[0] + " " + sents[1]
sents = sents[1:]
return<fim_suffix><fim_middle> sents
|
sents
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if<fim_suffix><fim_middle> len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
logger.debug("========")
else:
running_line = curr_line.text
line_type = curr_line.line_type
line_buffer = [curr_line]
|
len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
logger.debug("========")
else:
running_line = curr_line.text
line_type = curr_line.line_type
line_buffer = [curr_line]
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if<fim_suffix><fim_middle> not line_type == "list_item":
line_type = "para"
|
not line_type == "list_item":
line_type = "para"
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/spell_utils.py<fim_prefix>import logging
import os
import string
from symspellpy.symspellpy import SymSpell
from symspellpy.symspellpy import Verbosity
import nlm_ingestor.ingestor as ingestor
from nlm_ingestor.ingestor import patterns
logger = logging.getLogger(__name__)
class SpellUtil:
def __init__(self):
self.sym_spell = SymSpell(2, 7)
dictionary_path = os.path.join(
os.path.dirname(os.path.abspath(ingestor.__file__)),
"../ingestor_models/symspell/frequency_dictionary_en_82_765.txt",
)
bigram_path = os.path.join(
os.path.dirname(os.path.abspath(ingestor.__file__)),
"../ingestor_models/symspell/frequency_dictionary_en_82_765.txt",
)
if not self.sym_spell.load_dictionary(
dictionary_path, term_index=0, count_index=1,
):
logging.error(f"Dictionary file not found: {dictionary_path}")
return
if not self.sym_spell.load_bigram_dictionary(
bigram_path, term_index=0, count_index=2,
):
logger.error(f"Bigram dictionary file not found: {bigram_path}")
return
def lookup_word(self, input_term):
max_edit_distance_lookup = 2
suggestion_verbosity = Verbosity.CLOSEST
# ignore_token = None
ignore_token = "|".join(patterns.spell_check)
suggestions = self.sym_spell.lookup(
input_term,
suggestion_verbosity,
max_edit_distance_lookup,
transfer_casing=False,
ignore_token=ignore_token,
)
# print(suggestions)
# for suggestion in suggestions:
# print("{}, {}, {}".format(suggestion.term, suggestion.distance,
# suggestion.count))
if len(suggestions) > 0:
return suggestions[0].term
else:
return input_term
# def lookup_sentence(self, input_term):
def lookup_compound(self, input_term):
max_edit_distance_lookup = 2
suggestions = self.sym_spell.lookup_compound(
input_term,
max_edit_distance_lookup,
transfer_casing=True,
ignore_non_words=True,
)
# for suggestion in suggestions:
# print("{}, {}, {}".format(suggestion.term, suggestion.distance,
# suggestion.count))
if len(suggestions) > 0:
return suggestions[0].term
else:
return input_term
def segment(self, input_term):
is_mixed_case_term = not input_term.islower()
if is_mixed_case_term:
input_term = input_term.lower()
suggestion = self.sym_spell.word_segmentation(input_term)
corrected_string = suggestion.corrected_string
if<fim_suffix><fim_middle> is_mixed_case_term:
corrected_string = string.capwords(corrected_string)
|
is_mixed_case_term:
corrected_string = string.capwords(corrected_string)
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if<fim_suffix><fim_middle> n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
|
n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if<fim_suffix><fim_middle> (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
|
(
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if n:
n = round(float(n))
if<fim_suffix><fim_middle> n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
|
n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
except Exception as e:
logging.error(e)
self.num_digits = 0
def check_date(self):
if "/" in self.text or "-" in self.text:
text = self.text.replace("/", "-")
date_patterns = [
"%b-%d",
"%B-%d",
"%B-%d-%y",
"%B-%d-%Y",
"%b-%d-%Y",
"%b-%d-%y",
"%m-%d",
"%m-%d-%y",
"%m-%d-%Y",
]
for pat in date_patterns:
try:
datetime.datetime.strptime(text, pat)
self.is_date_entry = True
return
except ValueError:
pass
else:
self.is_date_entry = False
def check_numeric(self):
word = self.text.lower()
if not word.isalpha():
if word.isprintable():
if not word.isnumeric():
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
if word.startswith("-"):
self.is_negative = True
word = word[1:]
if word.startswith("$"):
self.is_dollar = True
word = word[1:]
elif word.endswith("$"):
self.is_dollar = True
word = word[0:-1]
elif word.endswith("%"):
self.is_percent = True
word = word[0:-1]
elif word.endswith("m"):
self.is_million = True
elif word.endswith("bn"):
self.is_billion = True
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
word = word.replace(",", "")
if word.isnumeric() or word.replace(".", "", 1).isnumeric():
self.is_number = True
parts = word.split("-")
if (
len(parts) == 2
and parts[0].isnumeric()
and parts[1].isnumeric()
):
self.is_number_range = True
self.parts = parts
else:
self.is_number = True
if self.is_number:
numeric_part = word
return numeric_part
class Line:
def __init__(
self,
line_str,
text_list=[],
style_dict={},
page_details={},
noun_chunk_ending_tokens=[],
):
self.text = line_str.strip()
self.visual_line = VisualLine(text_list, style_dict, page_details)
self.words = []
self.is_independent = False
self.is_header = False
self.is_header_without_comma = False
self.noun_chunks = []
self.quoted_words = quote_pattern.findall(self.text)
self.noun_chunk_ending_tokens = {x.lower() for x in noun_chunk_ending_tokens}
self.parse_line()
def check_header(self):
# Section X, Article Y, Note 1 etc.
first_word_header = self.first_word.lower() in ["section", "article", "note"]
# If there are a certain percentage of title words (first letter capitalize)
title_ratio = (
self.title_word_count / self.eff_word_count
if self.eff_word_count > 0
else 1.0
)
# print(self.title_word_count, self.eff_word_count, title_ratio)
# Section 1 is a header but Section 1: Hello 3 is not
has_enough_titles = title_ratio > 0.9 and self.eff_word_count < 10
has_header_structure = (
(first_word_header or has_enough_titles) and self.number_count == 1
) or self.numbered_line or self.text.isupper()
# has_header_structure = has_header_structure and self.eff_word_count <
last_word_number = (
self.last_word.lower() in units
or self.last_word_number
and not has_header_structure
)
last_word_date = self.last_word_date and not has_header_structure
# Find lines ending with sentence delimiter. But exclude text like "L.P."
ends_with_delim = ends_with_sentence_delimiter_pattern.search(self.text) is not None
sentence_structure = self.ends_with_period and not (
has_header_structure and title_ratio > 0.9
) and ends_with_delim
last_letter_is_punctuation = (
self.last_word[-1] in punctuations and self.last_word[-1] not in ":?.)]%" and
ends_with_delim
)
self.is_header_without_comma = (
not sentence_structure
and not self.has_list_char
and not self.first_char in footnote_types
and has_enough_titles
and not last_word_number
and (
self.number_count == 0
or (has_header_structure and self.number_count <= 1)
)
and not self.has_continuing_chars
and not last_word_date
and self.first_word_title
and not self.last_word_is_stop_word
and not self.is_zipcode_or_po
and not last_letter_is_punctuation
and not "://" in self.text # url pattern
)
self.is_header = self.is_header_without_comma and \
((not self.text.count(',') > 1) if not self.text.lower().startswith('section') else True)
def check_ends_with_period(self):
# punct_rule = self.last_char in string.punctuation and self.last_char not in [':', '.']
last_word_is_title = self.last_word in ["Mr.", "Dr.", "Mrs."]
self.ends_with_period = self.last_char in ["."] and not last_word_is_title
def check_table_row(self):
if<fim_suffix><fim_middle> not self.is_header:
value_count = (
self.number_count
+ self.dollar_count
+ self.pct_count
+ self.text.count(" - ")
)
word_symbols = self.word_count - self.dollar_sign_count
if word_symbols == 0:
word_symbols = 1
word_ratio = (
value_count + self.title_word_count + self.date_entry_count
) / word_symbols
self.is_table_row = (
(
(value_count > 0 or self.date_entry_count > 0)
and word_ratio > 0.7
and not self.ends_with_period
and not self.is_zipcode_or_po
)
and not self.last_word_is_stop_word
or ("...." in self.text)
)
else:
self.is_table_row = False
|
not self.is_header:
value_count = (
self.number_count
+ self.dollar_count
+ self.pct_count
+ self.text.count(" - ")
)
word_symbols = self.word_count - self.dollar_sign_count
if word_symbols == 0:
word_symbols = 1
word_ratio = (
value_count + self.title_word_count + self.date_entry_count
) / word_symbols
self.is_table_row = (
(
(value_count > 0 or self.date_entry_count > 0)
and word_ratio > 0.7
and not self.ends_with_period
and not self.is_zipcode_or_po
)
and not self.last_word_is_stop_word
or ("...." in self.text)
)
else:
self.is_table_row = False
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for<fim_suffix><fim_middle> org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
|
org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for<fim_suffix><fim_middle> rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
|
rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for<fim_suffix><fim_middle> line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
logger.debug("========")
else:
running_line = curr_line.text
line_type = curr_line.line_type
line_buffer = [curr_line]
|
line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
logger.debug("========")
else:
running_line = curr_line.text
line_type = curr_line.line_type
line_buffer = [curr_line]
|
FOR
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
#<fim_suffix><fim_middle> if line_type == "list_item":
|
if line_type == "list_item":
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
#<fim_suffix><fim_middle> if len(span.split()) >= 10:
|
if len(span.split()) >= 10:
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False #<fim_suffix><fim_middle> year does not count as a number
|
year does not count as a number
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
#<fim_suffix><fim_middle> edge case for html and markdown
|
edge case for html and markdown
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
#<fim_suffix><fim_middle> cut org_text based on lengths of modified_sent
|
cut org_text based on lengths of modified_sent
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: #<fim_suffix><fim_middle> commit the line and start a new line
|
commit the line and start a new line
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
#<fim_suffix><fim_middle> skip this logic when span is too big? disabled for now
|
skip this logic when span is too big? disabled for now
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
#<fim_suffix><fim_middle> if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
|
if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
#<fim_suffix><fim_middle> running_line = running_line[1:].lstrip()
|
running_line = running_line[1:].lstrip()
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
#<fim_suffix><fim_middle> do not break bracket
|
do not break bracket
|
LINE_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while<fim_suffix><fim_middle> offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
|
offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
|
WHILE
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:<fim_suffix><fim_middle>
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
|
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
|
TRY
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
except<fim_suffix><fim_middle> Exception as e:
logging.error(e)
self.num_digits = 0
|
Exception as e:
logging.error(e)
self.num_digits = 0
|
CATCH
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if<fim_suffix><fim_middle> (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
|
(realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if<fim_suffix><fim_middle> stream:
return await self._stream(*args, **kwargs)
|
stream:
return await self._stream(*args, **kwargs)
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if<fim_suffix><fim_middle> path.exists():
args = f'@{await self._create_blob(path)}\n'
|
path.exists():
args = f'@{await self._create_blob(path)}\n'
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if<fim_suffix><fim_middle> command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
|
command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if<fim_suffix><fim_middle> b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
|
b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if<fim_suffix><fim_middle> command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
|
command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if<fim_suffix><fim_middle> (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
|
(realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if<fim_suffix><fim_middle> not chunk:
break
|
not chunk:
break
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if<fim_suffix><fim_middle> not chunk:
break
|
not chunk:
break
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if<fim_suffix><fim_middle> isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
|
isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
|
IF
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command,<fim_suffix><fim_middle> args, end='', file=out)
|
args, end='', file=out)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path<fim_suffix><fim_middle> = path if path.is_absolute() else base / path
|
= path if path.is_absolute() else base / path
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return<fim_suffix><fim_middle> b64encode(b.read()).decode('utf-8')
|
b64encode(b.read()).decode('utf-8')
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image,<fim_suffix><fim_middle> validate=True)
|
validate=True)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command,<fim_suffix><fim_middle> _, args = line.partition(' ')
|
_, args = line.partition(' ')
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return<fim_suffix><fim_middle> self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
|
self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile<fim_suffix><fim_middle> = self._parse_modelfile(modelfile)
|
= self._parse_modelfile(modelfile)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_types.py<fim_prefix>import json
from typing import Any, TypedDict, Sequence, Literal
import sys
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
class BaseGenerateResponse(TypedDict):
model: str
'Model used to generate response.'
created_at: str
'Time when the request was created.'
done: bool
'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
total_duration: int
'Total duration in nanoseconds.'
load_duration: int
'Load duration in nanoseconds.'
prompt_eval_count: int
'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int
'Duration of evaluating the prompt in nanoseconds.'
eval_count: int
'Number of tokens evaluated in inference.'
eval_duration: int
'Duration of evaluating inference in nanoseconds.'
class GenerateResponse(BaseGenerateResponse):
"""
Response returned by generate requests.
"""
response: str
'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int]
'Tokenized history up to the point of the response.'
class Message(TypedDict):
"""
Chat message.
"""
role: Literal['user', 'assistant', 'system']
"Assumed role of the message. Response messages always has role 'assistant'."
content: str
'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]]
"""
Optional list of image data for multimodal models.
Valid input types are:
- `str` or path-like object: path to image file
- `bytes` or bytes-like object: raw image data
Valid image formats depend on the model. See the model card for more information.
"""
class ChatResponse(BaseGenerateResponse):
"""
Response returned by chat requests.
"""
message: Message
'Response message.'
class ProgressResponse(TypedDict):
status: str
completed: int
total: int
digest: str
class Options(TypedDict, total=False):
# load time options
numa: bool
num_ctx: int
num_batch: int
num_gqa: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
rope_frequency_base: float
rope_frequency_scale: float
num_thread: int
# runtime options
num_keep: int
seed: int
num_predict: int
top_k: int
top_p: float
tfs_z: float
typical_p: float
repeat_last_n: int
temperature: float
repeat_penalty: float
presence_penalty: float
frequency_penalty: float
mirostat: int
mirostat_tau: float
mirostat_eta: float
penalize_newline: bool
stop: Sequence[str]
class RequestError(Exception):
"""
Common class for request errors.
"""
def __init__(self, error: str):
super().__init__(error)
self.error = error
'Reason for the error.'
class ResponseError(Exception):
"""
Common class for response errors.
"""
def __init__(self, error: str, status_code: int = -1):
try:
# try to parse content as JSON and extract 'error'
# fallback to raw content if JSON parsing fails
error<fim_suffix><fim_middle> = json.loads(error).get('error', error)
|
= json.loads(error).get('error', error)
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split<fim_suffix><fim_middle> = urllib.parse.urlsplit('://'.join([scheme, hostport]))
|
= urllib.parse.urlsplit('://'.join([scheme, hostport]))
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield<fim_suffix><fim_middle> partial
|
partial
|
STATEMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""<fim_suffix><fim_middle>
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
|
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
|
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
|
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""<fim_suffix><fim_middle>
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
|
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""<fim_suffix><fim_middle>
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
|
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""<fim_suffix><fim_middle>
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
|
BLOCK_COMMENT
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:<fim_suffix><fim_middle>
r.raise_for_status()
|
r.raise_for_status()
|
TRY
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:<fim_suffix><fim_middle>
await self._request('HEAD', f'/api/blobs/{digest}')
|
await self._request('HEAD', f'/api/blobs/{digest}')
|
TRY
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:<fim_suffix><fim_middle>
self._request('HEAD', f'/api/blobs/{digest}')
|
self._request('HEAD', f'/api/blobs/{digest}')
|
TRY
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:<fim_suffix><fim_middle>
r.raise_for_status()
|
r.raise_for_status()
|
TRY
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:<fim_suffix><fim_middle>
r.raise_for_status()
|
r.raise_for_status()
|
TRY
|
prefix_full_suffix_empty_complete_current_block_no_evidence
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.