inputs
stringlengths 312
52k
| targets
stringlengths 1
3.1k
⌀ | block_type
stringclasses 11
values | scenario
stringclasses 7
values |
---|---|---|---|
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a p<fim_suffix>ara unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
logger.debug("========")
else:
running_line = curr_line.text
line_type = curr_line.line_type
line_buffer = [curr_line]
if line_type == "list_item" and running_line[0] in "�\\*,.?•\\➢ƒ–\\'\"—":
running_line = running_line[1:].lstrip()
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
return result
def line_list_check(prev_line, curr_line, list_char):
# if prev_line is list_item and list_char matches curr_line
if list_char == curr_line.text[0] and list_char not in ["”", "'", '"', "("]:
return True
# same char is alpha
if prev_line.text[0] == curr_line.text[0] and prev_line.text[0].isalpha():
if len(prev_line.text) >= 2 and prev_line.text[1].isupper():
# spell check first word
first_word = prev_line.text.split(" ")[0]
first_word = first_word.replace("'", "")
correct_word = su.segment(first_word)
if first_word[1:] == correct_word:
return True
# same char is not alpha but not digit
if prev_line.text[0] == curr_line.text[0] and not (
prev_line.text[0].isalpha()
or prev_line.text[0].isdigit()
or list_char not in ["”", "'", '"', "("]
):
return True
return False
def should_join_table(prev_line, curr_line, ents_aligned):
"""
Check if next line should be joined as a tr. This makes no assumption if the current line is a table
"""
# print()
# print("Checking to join tr", prev_line.visual_line.text_list, "\n", curr_line.visual_line.text_list)
# check list of spaced words
curr_line_ents = len(prev_line.visual_line.text_list)
next_line_ents = len(curr_line.visual_line.text_list)
ent_match = (
curr_line_ents == next_line_ents and curr_line_ents >= 2
) # tr should have at least two elements
# print("tab check", prev_line.visual_line.tab_count, curr_line.visual_line.tab_count)
tab_match = (
prev_line.visual_line.tab_count == curr_line.visual_line.tab_count
and curr_line.visual_line.tab_count > 0
)
# casing should also be the same
same_case = (
prev_line.text[0].islower() == curr_line.text[0].islower()
or prev_line.text[0].isupper() == curr_line.text[0].isupper()
)
colon_check = (
prev_line.hit_colon
and curr_line.hit_colon
and prev_line
and same_case
and not prev_line.incomplete_line
)
# if prev_line.hit_colon and curr_line.hit_colon:
# print()
# print("colon check")
# print(prev_line.visual_line.text_list)
# print(curr_line.visual_line.text_list)
# col_check
# print(tab_match, ent_match, colon_check)
tab_check = prev_line.visual_line.tab_count or curr_line.visual_line.tab_count
return (
(tab_match and ent_match)
or colon_check
or (ents_aligned and ent_match and tab_check)
)
def check_page_spacing(prev_line, curr_line, spacing_dict):
# print("^"*50)
# print("checking page stats")
# print(prev_line.visual_line.start_fs, prev_line.visual_line.end_fs, prev_line.text)
# print(curr_line.visual_line.start_fs, curr_line.visual_line.end_fs, curr_line.text)
# print()
diff_top = round(curr_line.visual_line.start_y - prev_line.visual_line.end_y)
# find best fs reference
prev_line_fs = {prev_line.visual_line.start_fs, prev_line.visual_line.end_fs}
curr_line_fs = {curr_line.visual_line.start_fs, curr_line.visual_line.end_fs}
same_fs = prev_line_fs.intersection(curr_line_fs)
fs = min(same_fs) if same_fs else curr_line.visual_line.start_fs
min_check = (
spacing_dict[(fs, diff_top - 1)] if (fs, diff_top - 1) in spacing_dict else None
)
max_check = (
spacing_dict[(fs, diff_top + 1)] if (fs, diff_top + 1) in spacing_dict else None
)
normal_check = (fs, diff_top) in spacing_dict and spacing_dict[(fs, diff_top)] > 3
if min_check or normal_check or max_check:
# get all fs in spacing dict
# see if the diff top is a min
# print("checking space dict")
distance_list = []
for val in spacing_dict:
if val[0] == fs and val[1] > 0 and spacing_dict[val] > 2:
distance_list.append((val, val[1]))
# print(distance_list)
val = min(distance_list) if len(distance_list) else []
if len(val):
join_fs, join_top = val[0]
if len(val):
join_fs, join_top = val[0]
if val[0] == (fs, diff_top): # or close
# print("SHOULDJOIN")
return True
elif (
join_fs == fs
and ((diff_top - 1) == join_top)
or ((diff_top + 1) == join_top)
):
return True
return False
def compute_overlap(
start_x0: float,
end_x0: float,
start_x1: float,
end_x1: float,
divide_by_min=True,
) -> float:
"""
Computes the % of intersection (overlap) of two lines w.r.t. the shortest line
"""
width_x0 = abs(end_x0 - start_x0)
width_x1 = abs(end_x1 - start_x1)
if start_x0 <= start_x1 <= end_x0:
intersect = min(abs(end_x0 - start_x1), width_x1)
elif start_x0 <= end_x1 <= end_x0:
intersect = min(abs(end_x1 - start_x0), width_x1)
elif start_x1 <= start_x0 <= end_x0 <= end_x1:
intersect = abs(end_x0 - start_x0)
else:
intersect = 0.0
if divide_by_min:
intersect /= min(width_x0, width_x1) + 1e-5
else:
intersect /= max(width_x0, width_x1) + 1e-5
return intersect
def compute_overlap_top_bottom(
start_x0: float,
end_x0: float,
start_x1: float,
end_x1: float,
) -> float:
"""
This is different from the above function.
Finds percentage overlap of top to bottom.
Score of 100% is possible doesn't reference the shortest line
"""
width_x1 = abs(end_x1 - start_x1)
if width_x1 == 0:
return 0.0
if start_x0 <= start_x1:
# measure from left to right
if end_x1 <= end_x0:
# if start and end both less, full in subset
return 1.0
return (end_x1 - start_x0) / width_x1
else:
# measure from bottom start
if end_x1 <= start_x0:
return 0.0
return (end_x1 - start_x0) / width_x1
def compute_bottom_top_overlap(start_x0, end_x0, start_x1, end_x1):
"""
This is different from the above function.
Finds percentage overlap of top to bottom.
Score of 100% is possible doesn't reference the shortest line
"""
# print(start_x0, end_x0)
# print(start_x1, end_x1)
if start_x0 == start_x1 and end_x0 != start_x0: # aligned with bottom line
# print()
# print("bottom overlap", (end_x1 - start_x1) / (end_x0 - start_x0))
return (end_x1 - start_x1) / (end_x0 - start_x0)
# other conditions
# elif start_x0 < start_x1 and end_x0 > end_x1: # to the left of bottom line
# return
# else: #to the right of bottom line
return 1.0
# header check for lines with similar font
# header check for lines with similar font
def visual_header_check(prev_line, curr_line, same_font):
# check top overlap (small) if the font size is bigger
# print()
# print("visual_header check:")
# print("prev", prev_line.text)
# print("checking", curr_line.text)
# top also has to be higher
# print("prev_line.visual_line.start_y, prev_line.visual_line.end_y")
# print(prev_line.visual_line.start_y, prev_line.visual_line.end_y)
# print(prev_line.visual_line.start_y, curr_line.visual_line.start_y)
if prev_line.visual_line.wrapped_page:
return False
if prev_line.visual_line.start_y < curr_line.visual_line.start_y:
prev_line_width = prev_line.visual_line.max_x - prev_line.visual_line.min_x
curr_line_width = curr_line.visual_line.max_x - curr_line.visual_line.min_x
# print("prev_line.visual_line.min_x, prev_line.visual_line.max_x, prev_line.visual_line.end_x")
# print(prev_line.visual_line.min_x, prev_line.visual_line.max_x, prev_line.visual_line.end_x)
# print("curr_line.visual_line.min_x, curr_line.visual_line.max_x")
# print(curr_line.visual_line.min_x, curr_line.visual_line.max_x)
# print("prev_line_width / curr_line_width")
# print(prev_line_width / curr_line_width)
# print("prev_line_width, curr_line_width")
# print(prev_line_width, curr_line_width)
if curr_line_width == 0:
return False
# print(round(prev_line.visual_line.min_x), round(curr_line.visual_line.min_x))
if round(prev_line.visual_line.min_x) == round(curr_line.visual_line.min_x):
if round(prev_line_width) == round(curr_line_width):
# print()
# print("NOT A HEADER1")
return False
offset = 0
# print(prev_line.visual_line.min_x, curr_line.visual_line.min_x)
# print(prev_line.visual_line.min_x <= curr_line.visual_line.min_x)
if prev_line.visual_line.min_x <= curr_line.visual_line.min_x:
offset = curr_line.visual_line.min_x - prev_line.visual_line.min_x # offset
# print("(prev_line_width - offset) / curr_line_width")
# print((prev_line_width - offset) / curr_line_width)
overlap_percentage = (prev_line_width - offset) / curr_line_width
different_font_style = (
prev_line.visual_line.fw != curr_line.visual_line.fw
or prev_line.visual_line[1] != curr_line.visual_line[1]
or prev_line.visual_line.fs > curr_line.visual_line.fs
)
if (
overlap_percentage < 0.3
or (different_font_style and overlap_percentage < 0.6)
or (prev_line.line_type == "header" and different_font_style)
# or (prev_line.is_header and different_font_style)
):
# print("HEADER INDENT", prev_line.is_header)
# print("overlap rule::", (prev_line_width - offset) / curr_line_width)
# print(True)
return True
# print(False)
# print()
# print("NOT A HEADER")
return False
def visual_header_from_stats(prev_line, curr_line, page_stats):
prev_fs = prev_line.visual_line.fs
curr_fs = curr_line.visual_line.fs
median_val = round(page_stats["median_fs"])
max_val = round(max(page_stats["fs_list"]))
max_val_diff = ((max_val - prev_fs) / max_val) < 0.2 if max_val != 0 else True
prev_fs_diff = round(prev_fs - median_val)
curr_fs_diff = (
round(curr_fs - median_val) if round(curr_fs - median_val) else 0.8
) # curr_fs is the median
varied_set = len(set(page_stats["fs_list"])) >= 4
rounded_fs_count = Counter([round(x, 3) for x in page_stats["fs_list"]])
unique_text = rounded_fs_count[round(prev_fs, 3)] / len(page_stats["fs_list"])
prev_curr_ratio_from_median = prev_fs_diff / curr_fs_diff
# print("prev_fs, curr_fs", prev_fs, curr_fs)
# print("unique text")
# print(rounded_fs_count[round(prev_fs, 3)], len(page_stats["fs_list"]) )
# print("visual_header check", len(set(page_stats["fs_list"])))
# print("varied_set", varied_set, "unique_text", unique_text)
# print(rounded_fs_count)
# print()
# close from max or far enough from median
bigger_text = max_val_diff or (
prev_curr_ratio_from_median > 2
) # TODO text must also be relatively uncommon
if varied_set and (unique_text <= 0.08):
if bigger_text and (prev_fs_diff > 1) and (prev_fs_diff - curr_fs_diff) > 0.3:
# print(max_val_diff)
# print(prev_fs, prev_line.text)
# print(curr_fs, curr_line.text)
# print()
return True
# header join
if bigger_text and curr_fs == prev_fs and (prev_fs_diff > 1):
# print(max_val_diff)
# print(prev_fs, prev_line.text)
# print(curr_fs, curr_line.text)
# print()
return True
return False
# def visual_clean_lines(lines, page_stats={}, page_info_dict={}):
# def visual_clean_lines(lines, page_stats={}, page_info_dict={}):
# def visual_clean_lines(lines, page_stats={}, page_info_dict={}):
def check_tr_alignment(prev_line, curr_line):
# print("-=" * 50)
# print("check_tr_alignment!")
# print(prev_line.text)
# print(curr_line.text)
# print()
prev_ents = len(prev_line.visual_line.text_list)
curr_ents = len(curr_line.visual_line.text_list)
prev_positions = prev_line.visual_line.start_x_list
curr_positions = curr_line.visual_line.start_x_list
prev_line_start_ents = prev_line.visual_line.start_x_list_single_ent
curr_line_start_ents = curr_line.visual_line.start_x_list_single_ent
# print(prev_line_start_ents)
# print(curr_line_start_ents)
same_ents = prev_ents > 1 and abs(prev_ents - curr_ents) <= 1
if len(prev_line_start_ents) == len(curr_line_start_ents):
prev_positions = prev_line_start_ents
curr_positions = curr_line_start_ents
if len(prev_line_start_ents) == len(curr_positions) and len(
prev_line_start_ents,
) != len(
prev_positions,
): # joined p_tags
prev_positions = prev_line_start_ents
if not same_ents:
# print("check_tr_alignment False1")
# print(prev_ents, curr_ents)
return False
# print("CHECKING POSITIONS")
# print(prev_positions)
# print(curr_positions)
for p_x, c_x in zip(prev_positions, curr_positions):
p_x = round(p_x)
c_x = round(c_x)
if abs(p_x - c_x) > 100:
# print("False")
# print("check_tr_alignment False3")
return False
# print("check_tr_alignment True")
return True
def check_layout(prev_line, curr_line, prev_above_curr):
prev_line_width = range(
int(prev_line.visual_line.min_x),
int(prev_line.visual_line.max_x),
)
# weird edge case
if not prev_line_width:
prev_line_width = range(
int(prev_line.visual_line.max_x),
int(prev_line.visual_line.min_x),
)
curr_line_width = range(
int(curr_line.visual_line.min_x),
int(curr_line.visual_line.max_x),
)
prev_line_width = set(prev_line_width)
prev_curr_overlap = prev_line_width.intersection(curr_line_width)
if prev_curr_overlap and not prev_above_curr:
# print(prev_line.text)
# print(curr_line.text)
# print("misplaced text group")
# print()
return True
return False
def order_blocks(blocks):
block_group_dict = defaultdict(list)
for idx, block in enumerate(blocks):
# print(idx, "block-group", block["group_id"], block["block_type"], block['block_text'])
group_id = block["group_id"]
block_group_dict[group_id].append(block)
block_group_list = [] # list that holds tuples (group_id, y_pos)
for block_group_id in block_group_dict:
block_group_list.append(
(block_group_id, block_group_dict[block_group_id][0]["y"]),
) # append starting y position of group
block_group_list = sorted(
block_group_list,
key=lambda x: x[1],
) # sort block groups by y position
# get list of ordered block group keys
ordered_blocks = []
for block_group_id, y in block_group_list:
ordered_blocks += block_group_dict[block_group_id]
# for b in original_blocks:
# re-index blocks and headers based off of new ordering
header_idx = 0
for idx, block in enumerate(ordered_blocks):
block["block_idx"] = idx
if block["block_type"] == "header":
header_idx = idx
ordered_blocks[idx]["header_block_idx"] = header_idx
return ordered_blocks
def visual_clean_lines(
lines,
page_stats={},
page_info_dict={},
page_idx=0,
line_set={},
):
page_blocks = []
header_block_idx = -1
block_idx = 0
# block_idx = page_idx
style_dict = {}
join_font_spacing = False
prev_line = None
text_list = []
prev_ents = 0
curr_ents = 0
is_incomplete = False
colon_rule = False
text_group_start = True
text_group_start_idx = 0
prev_line = None
next_line = None
# for idx, line in enumerate(lines[12:14]):
sentence_visual_end = False
group_id = 0
for idx, line in enumerate(lines):
# print(idx)
line_str, style_dict, text_list = (
line["text"],
line["style"],
line["text_list"],
)
line_str = " ".join(line_str.split())
if should_skip(line_str):
continue
if line_str in line_set:
continue
if len(line_str.split()) > 8:
line_set.add(line_str)
curr_line = line_parser.Line(
line_str=line_str,
style_dict=style_dict,
text_list=text_list,
page_details=page_stats,
)
if prev_line is None:
# initialize memory of previous line.
# this will update with join decisions
list_char = ""
if curr_line.line_type == "list_item":
list_char = curr_line.text[0]
curr_line.text = curr_line.text[1:].lstrip()
if curr_line.line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": curr_line.text,
"block_type": curr_line.line_type,
"header_block_idx": header_block_idx,
"block_group": [curr_line.visual_line.text_list],
"list_char": list_char,
"fs": curr_line.visual_line.start_fs,
"text_group_start_idx": text_group_start_idx,
"block_list": curr_line.visual_line.text_list,
"line": curr_line,
"y": curr_line.visual_line.start_y,
"group_id": group_id,
}
prev_line = curr_line
block_idx += 1
# if (idx <= 3) or (idx >= len(lines) - 3):
# line_without_numbers = re.sub(r"[^a-zA-Z]+", "", line_str).strip()
# if line_without_numbers:
# # track block_idx for de-duplication
# line_set[line_without_numbers].append((page_idx, block_idx))
page_blocks.append(block)
continue
# print("--" * 50)
# print(prev_line.line_type, "\n", prev_line.text)
# print(prev_ents)
# print(prev_line.visual_line.fw_list)
# print(prev_line.visual_line.font_family)
# print(prev_line.visual_line.fs, prev_line.visual_line.fw, "prev_line:", prev_line.line_type, prev_line.text)
# print(prev_line.visual_line.mode_fs)
# print(curr_line.line_type, "\n", curr_line.text)
# print(curr_ents)
# print()
# print(curr_line.visual_line.font_family)
# print(curr_line.visual_line.mode_fs)
# print(curr_line.visual_line.fs, curr_line.visual_line.fw, "curr_line:", curr_line.line_type, curr_line.text)
if (
len(prev_line.text) > 1
and len(curr_line.text) > 1
and prev_line.text[:2] == curr_line.text[:2]
and prev_line.text[1] == " "
and not (prev_line.text[0].isdigit() or curr_line.text[0].isdigit())
and not (prev_line.text[0].isalpha() or curr_line.text[0].isalpha())
):
curr_line.line_type = "list_item"
curr_line.is_list_item = True
curr_line.is_list_or_row = True
if page_blocks[-1]["block_type"] != "list_item":
page_blocks[-1]["block_type"] = "list_item"
page_blocks[-1]["list_char"] = page_blocks[-1]["block_text"][0]
page_blocks[-1]["block_text"] = page_blocks[-1]["block_text"][
1:
].lstrip()
same_start_fs = (
abs(prev_line.visual_line.start_fs - curr_line.visual_line.start_fs) < 0.5
)
same_end_fs = (
abs(prev_line.visual_line.end_fs - curr_line.visual_line.end_fs) < 0.5
)
same_end_start_fs = (
abs(prev_line.visual_line.end_fs - curr_line.visual_line.start_fs) < 0.5
)
prev_above_curr = (
True
if prev_line.visual_line.end_y < curr_line.visual_line.start_y
else False
)
y_diff = curr_line.visual_line.start_y - prev_line.visual_line.start_y
top_overlap = compute_overlap_top_bottom(
start_x0=prev_line.visual_line.start_x,
end_x0=prev_line.visual_line.end_x,
start_x1=curr_line.visual_line.start_x,
end_x1=curr_line.visual_line.end_x,
)
bottom_overlap = compute_bottom_top_overlap(
start_x0=prev_line.visual_line.start_x,
end_x0=prev_line.visual_line.end_x,
start_x1=curr_line.visual_line.start_x,
end_x1=curr_line.visual_line.end_x,
)
prev_overlap_curr = True if bottom_overlap or top_overlap else False
use_visual_join = True if prev_above_curr and prev_overlap_curr else False
if not use_visual_join and prev_line.incomplete_line:
join_font_spacing = True
if not (prev_line.is_table_row or curr_line.is_table_row):
if page_stats["n_lines"] <= 3:
join_font_spacing = True
else:
join_font_spacing = check_page_spacing(
prev_line,
curr_line,
page_stats["fs_and_diff_next_y"],
)
# if the font is different and font-family is different
different_font_family = (
curr_line.visual_line.font_family != prev_line.visual_line.font_family
)
different_common_fs = (
prev_line.visual_line.mode_fs != curr_line.visual_line.mode_fs
and prev_line.visual_line.start_fs != curr_line.visual_line.start_fs
)
different_font = (
different_font_family and different_common_fs and not join_font_spacing
)
# start and end characters are same font or the mode of fonts of both lines is the same
same_font = (
(prev_line.visual_line.fs == curr_line.visual_line.fs)
or (same_start_fs and same_end_fs)
or same_end_start_fs
or prev_line.visual_line.mode_fs == curr_line.visual_line.mode_fs
) and not different_font
prev_ents = (
len(prev_line.visual_line.text_list)
if not prev_line.line_type == "list_item"
else 0
)
curr_ents = (
len(curr_line.visual_line.text_list) if not curr_line.is_list_item else 0
)
ents_aligned = check_tr_alignment(prev_line, curr_line)
is_incomplete_sent = (
prev_line.incomplete_line
and not prev_line.ends_with_period
or prev_line.ends_with_comma
)
# logic using line after curr
if idx + 1 < len(lines):
# this is inefficent as line_parser is called twice,
# once for next_line and once for curr_line.
next_line = lines[idx + 1]
# print("NEXT LINE\n", next_line['text'])
next_line_str, next_style_dict, next_text_list = (
next_line["text"],
next_line["style"],
next_line["text_list"],
)
next_line = line_parser.Line(
line_str=next_line_str,
style_dict=next_style_dict,
text_list=next_text_list,
page_details=page_stats,
)
# if the last line was not a table, check if the next line is a table to avoid single tr
if prev_line.line_type != "table_row" and not ents_aligned:
# check if the next line is a table and matches curr_line
next_line_tr = next_line.line_type == "table_row" or should_join_table(
curr_line,
next_line,
False,
)
if not next_line_tr and curr_line.line_type == "table_row":
curr_line.line_type = "para"
# if the next line is joinable by visual stats but prev and curr are not
# don't join the line (only true by x-span check and y is below for prev cur)
# if this is not true ignore the rule
prev_not_above_next = (
next_line and prev_line.visual_line.start_y > next_line.visual_line.start_y
)
next_line_join = False
if next_line and check_layout(prev_line, next_line, prev_not_above_next):
next_line_join = check_page_spacing(
curr_line,
next_line,
page_stats["fs_and_diff_next_y"],
)
# if the prev line is not visually joinable and the curr_next is
# make sure the prev_line doesn't join the curr_line
curr_next_visual_join = not join_font_spacing and next_line_join
# print()
# print("is_incomplete_sent, (join_font_spacing and not sentence_visual_end), curr_line.continuing_line")
# print(is_incomplete_sent, (join_font_spacing and not sentence_visual_end), curr_line.continuing_line)
# print("join_font_spacing:,", join_font_spacing)
is_incomplete = (
is_incomplete_sent
or (join_font_spacing and not sentence_visual_end)
or curr_line.continuing_line
)
# print("is_incomplete", is_incomplete)
has_overlap_with_min = (
compute_overlap(
curr_line.visual_line.start_x,
curr_line.visual_line.end_x,
prev_line.visual_line.start_x,
prev_line.visual_line.end_x,
divide_by_min=True,
)
> 0.7
)
is_below = curr_line.visual_line.start_y - prev_line.visual_line.start_y > 0
is_visually_apart = (has_overlap_with_min and not is_below) or (
not has_overlap_with_min and is_below
)
above_bold_below_not = (
prev_line.visual_line.fw >= 600.0 and curr_line.visual_line.fw <= 400.0
)
has_overlap_with_max = (
compute_overlap(
curr_line.visual_line.start_x,
curr_line.visual_line.end_x,
prev_line.visual_line.start_x,
prev_line.visual_line.end_x,
divide_by_min=False,
)
> 0.3
)
is_not_header_over_para = True
if (
above_bold_below_not
and not has_overlap_with_max
and prev_line.line_type == "header"
and not prev_line.incomplete_line
):
is_not_header_over_para = False
# print("header over para check")
# print("""above_bold_below_not
# and not has_overlap_with_max
# and prev_line.line_type == "header"
# """)
# print(above_bold_below_not)
# print(has_overlap_with_max, j)
# print(prev_line.line_type == "header")
# print()
# print(is_not_header_over_para)
###########
# List item
if line_list_check(prev_line, curr_line, page_blocks[-1]["list_char"]):
prev_line.line_type = "list_item"
curr_line.line_type = "list_item"
curr_line.is_list_item = True
# change prev_line to list item
if page_blocks[-1]["block_type"] != "list_item":
page_blocks[-1]["list_char"] = page_blocks[-1]["block_text"][0]
page_blocks[-1]["block_text"] = page_blocks[-1]["block_text"][
1:
].lstrip()
page_blocks[-1]["block_type"] = "list_item"
close_text_y = (
curr_line.visual_line.start_y
- curr_line.visual_line.mode_fs
- prev_line.visual_line.start_y
- prev_line.visual_line.mode_fs
) <= 0
aligned_text = curr_line.visual_line.start_x == prev_line.visual_line.start_x
title_text = False
if len(lines) < 10:
title_text = top_overlap == 1.0 and close_text_y and aligned_text
visual_header = visual_header_check(prev_line, curr_line, same_font)
list_item_rule = curr_line.has_list_char or (
curr_line.numbered_line
and not (
(prev_line.incomplete_line and curr_line.continuing_line)
or join_font_spacing
)
)
last_2_block_tr = False
if len(page_blocks) >= 2:
last_block_tr = (
page_blocks[-1]["block_type"] == "table_row"
and page_blocks[-2]["block_type"] == "table_row"
)
if not last_block_tr and curr_line.line_type == "para":
# check to join
if prev_line.incomplete_line and curr_line.continuing_line:
last_2_block_tr = True
no_space_join = prev_line.ends_with_period and curr_line.text[0] != " "
visual_header_by_stats = visual_header_from_stats(
prev_line,
curr_line,
page_stats,
)
header_join = False
common_list = curr_line.has_list_char or prev_line.has_list_char
if (
visual_header_by_stats
and curr_line.incomplete_line
and same_font
and not (prev_line.is_table_row or curr_line.is_table_row or common_list)
):
header_join = True
# print("LINEJOIN CHECK")
# print("positive\n", "*" * 10)
# print(f"\nsame_font:{same_font}",
# f"\nis_incomplete:{is_incomplete}",
# f"\nis_not_header_over_para:{is_not_header_over_para}")
# print("join_font_spacing", join_font_spacing)
# print("header join", header_join)
# print()
# print("negative\n", "*" * 10)
# print(f"\nis_visually_apart:{is_visually_apart}",
# f"\nshould_join_table(prev_line, curr_line): {should_join_table(prev_line, curr_line, ents_aligned)}",
# f"\ncurr_line.is_list_or_row:{curr_line.is_list_or_row}",
# f"\ncurr_line table {curr_line.line_type == 'table_row'}",
# f"\ncurr_line list {curr_line.is_list_item}",
# f"\nvisual_header {visual_header}",
# f'\nprev_line.line_type == "table_row", {prev_line.line_type == "table_row"}')
if (
same_font
and not should_join_table(prev_line, curr_line, ents_aligned)
and not (curr_line.line_type == "table_row" or list_item_rule)
and not (prev_line.line_type == "table_row" and not last_2_block_tr)
and is_incomplete
and not curr_next_visual_join # is_visually_apart
and not visual_header
or not check_parentheses(prev_line.text)
and is_not_header_over_para
and not no_space_join
or title_text
or header_join
):
# print("JOIN")
if not is_visually_apart and bottom_overlap < 0.5:
# this would signify end of paragraph
sentence_visual_end = True
else:
sentence_visual_end = False
if page_stats["n_lines"] <= 3:
page_blocks[-1]["block_type"] = "header"
elif (
not prev_line.line_type == "list_item"
): # and not curr_line.visual_line.is_header:
page_blocks[-1]["block_type"] = "para"
new_text = formatter.connect(
prev_line.text.rstrip(),
curr_line.text.lstrip(),
)
new_text_list = (
prev_line.visual_line.text_list + curr_line.visual_line.text_list
)
# print("Max ex min ex assignment")
max_x = max(prev_line.visual_line.max_x, prev_line.visual_line.max_x)
min_x = min(prev_line.visual_line.min_x, curr_line.visual_line.min_x)
prev_line_type = prev_line.line_type
page_blocks[-1]["block_text"] = new_text
prev_start_y = prev_line.visual_line.start_y
curr_start_y = curr_line.visual_line.start_y
prev_end_y = prev_line.visual_line.end_y
wrapped_page = prev_line.visual_line.wrapped_page
# pass the line parser attributes
prev_line = curr_line
# add appended text and text_list, preserve the line type
prev_line.text = new_text
prev_line.visual_line.start_y = prev_start_y
prev_line.visual_line.text_list = new_text_list
prev_line.line_type = prev_line_type
prev_line.visual_line.min_x = min_x
prev_line.visual_line.max_x = max_x
prev_line.visual_line.wrapped_page = wrapped_page
if curr_start_y < prev_end_y:
prev_line.visual_line.wrapped_page = True
# print(prev_start_y)
# print("Join")
# print()
# print("-" * 50)
# print()
# new block
else:
# print("NEW block")
# print("*" * 50)
if not is_visually_apart and bottom_overlap < 0.5:
# this would signify end of paragraph
sentence_visual_end = True
else:
sentence_visual_end = False
# print("-"*50)
colon_rule = (
prev_line.hit_colon and curr_line.hit_colon and prev_ents == curr_ents
)
# normal case
tab_check_join = {
prev_line.visual_line.tab_count_join,
prev_line.visual_line.tab_count,
} & {curr_line.visual_line.tab_count_join, curr_line.visual_line.tab_count}
tab_check = sum(tab_check_join) > 0
# print("-+" * 50)
# print("TAB POSITIONS")
# print(prev_line.text)
# print(prev_line.visual_line.start_x_list)
# print(prev_line.visual_line.start_x_list_single_ent)
# print(prev_line.visual_line.tab_count)
# print(prev_line.visual_line.tab_count_join)
#
# print(curr_line.text)
# print(curr_line.visual_line.start_x_list)
# print(curr_line.visual_line.start_x_list_single_ent)
# print(curr_line.visual_line.tab_count)
# print(curr_line.visual_line.tab_count_join)
# print("tabcheck", tab_check)
# print("ents_aligned", ents_aligned)
# print(prev_ents, curr_ents)
# print(curr_line.visual_line.text_list)
# print("-+" * 50)
if visual_header_by_stats and prev_line.line_type != "table_row":
page_blocks[-1]["block_type"] = "header"
elif (
colon_rule
and prev_ents == 1
and prev_line.line_type != "list_item"
and not (prev_line.incomplete_line and curr_line.continuing_line)
):
# print("Table Conversion")
# print()
# print("colon check")
# print(prev_line.text.split(":"))
# print(curr_line.text.split(":"))
# print("TR1")
new_text_list = prev_line.text.split(":")
new_text_list = [new_text_list[0] + ":", new_text_list[1:]]
page_blocks[-1]["block_type"] = "table_row"
page_blocks[-1]["block_list"]: new_text_list
if text_group_start:
text_group_start = False
text_group_start_idx = page_blocks[-1]["block_idx"]
page_blocks[-1]["text_group_start_idx"] = text_group_start_idx
curr_line.line_type = "table_row"
curr_line.is_list_or_row = True
# print("Table Conversion!")
# print(prev_ents, curr_ents)
# print(page_blocks[-1]["block_text"])
# print("TR3")
elif (
tab_check and ents_aligned and prev_line.line_type != "list_item"
) or (colon_rule and not prev_line.incomplete_line):
# print("Table Conversion")
# print(prev_ents, curr_ents)
# print(page_blocks[-1]["block_text"])
# print("TR2")
page_blocks[-1]["block_type"] = "table_row"
if text_group_start:
text_group_start = False
text_group_start_idx = page_blocks[-1]["block_idx"]
page_blocks[-1]["text_group_start_idx"] = text_group_start_idx
curr_line.line_type = "table_row"
else:
text_group_start = True
text_group_start_idx = -1
list_char = ""
if curr_line.line_type == "list_item":
list_char = curr_line.text[0]
curr_line.text = curr_line.text[1:].lstrip()
if curr_line.line_type == "header":
header_block_idx = block_idx
if (visual_header or visual_header_by_stats) and not (
prev_line.line_type == "list_item"
or prev_line.line_type == "numbered_list_item"
):
page_blocks[-1]["block_type"] = "header"
# print()
# print("*" * 40)
# print("NEW BLOCK")
# print()
# print("*" * 40)
# print(curr_line.line_type, curr_line.text)
# group attribute
if check_layout(prev_line, curr_line, prev_above_curr) or y_diff < 0:
group_id += 1
block = {
"block_idx": block_idx,
"block_text": curr_line.text,
"block_type": curr_line.line_type,
"header_block_idx": header_block_idx,
"block_group": [curr_line.visual_line.text_list],
"text_group_start_idx": text_group_start_idx,
"list_char": list_char,
"group_id": group_id,
"fs": curr_line.visual_line.start_fs,
"x": curr_line.visual_line.start_x,
"y": curr_line.visual_line.start_y,
"line": curr_line,
"block_list": curr_line.visual_line.text_list,
}
# This is to account for when the headers get false positive #TODO improve header code
prev_text = page_blocks[-1]["block_text"]
if page_blocks[-1]["block_type"] == "header" and (
len(sent_tokenize(prev_text)) >= 2 or len(prev_text.split()) > 16
):
page_blocks[-1]["block_type"] = "para"
prev_line = curr_line
block_idx += 1
page_blocks.append(block)
# not too many blocks there may be title text missed
if len(page_blocks) <= 2:
for idx, block in enumerate(page_blocks):
if "." not in block["block_text"] and len(block["block_text"].split()) < 10:
page_blocks[idx]["block_type"] = "header"
page_blocks = order_blocks(page_blocks)
return page_blocks, line_set
def clean_line(line):
line = line.replace("\n", " ")
line = line.replace("\t", " ")
line = line.strip()
return line
def fix_spaced_characters(line_text):
line_text = re.sub(r"\s+", "", line_text)
return su.segment(line_text)
def connect(prev, curr):
has_space = prev.endswith(" ")
result = prev + ("" if has_space else " ") + curr
return result
def get_numbers(line):
# test = re.compile(r"[0-9]+\.?[0-9]?")
regex = re.compile(r"\$?(\d*(\d\.?|\.\d{1,2}))$")
return regex.search(line)
def check_block_join(prev_block, block):
prev_text = prev_block["block_text"]
curr_text = block["block_text"]
blocks_are_paras = (
prev_block["block_type"] == "para" and block["block_type"] == "para"
)
if len(prev_text.strip()) and len(curr_text.strip()) and blocks_are_paras:
prev_line = line_parser.Line(prev_block["block_text"])
curr_line = line_parser.Line(block["block_text"])
if prev_line.incomplete_line or curr_line.continuing_line:
return True
return False
def join_blocks(page_blocks, blocks):
prev_last_block = page_blocks[-1][-1]
# update page blocks and blocks
# prev_blocks = page_blocks[-1]
# last_prev_block = prev_blocks[-1]
# check to join last_prev_block with first blocks[0]
# if it's a join, pop the block and join, subtract block indexes
prev_last_block["block_text"] = (
prev_last_block["block_text"].rstrip() + " " + blocks[0]["block_text"].lstrip()
)
prev_last_block["block_list"].append(blocks[0]["block_list"])
# print(prev_block)
page_blocks[-1][-1] = prev_last_block
for block in blocks[1:]:
block["block_idx"] -= 1
return page_blocks, blocks[1:]
<fim_middle> | null | LINE_COMMENT | complete_current_header_empty_completion |
<filename>nlm-ingestor/nlm_ingestor/ingestor/processors.py<fim_prefix>import logging
import re
from collections import Counter
from collections import defaultdict
from . import formatter
from . import line_parser
from . import patterns
from nlm_ingestor.ingestor_utils import spell_utils
from nlm_ingestor.ingestor_utils.utils import sent_tokenize
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
su = spell_utils.SpellUtil()
def stem(line):
line = line.replace("'s", "")
line = line.replace("’s", "")
return line
def check_parentheses(text):
count = 0
for i in text:
if i == "(":
count += 1
elif i == ")":
count -= 1
return count == 0
def nlm_tokenize(line):
# print(line)
tokens = []
if not line:
line = ""
line = line.lower()
trans_table = line.maketrans("-/", " ")
line = line.translate(trans_table)
line = line.translate(str.maketrans("", "", "�\\(*,.?•\\➢ƒ–\\)'\"—"))
# line = patterns.num_unit.sub(r"100 \1", line)
line = patterns.num_unit.sub(r"", line)
line = stem(line)
words = line.split()
for word in words:
if (
not word.isdigit()
and not word.endswith("%")
and not word.startswith("$")
and not word.endswith("$")
):
tokens.append(word)
if len(tokens) == 0:
tokens.append("unknown")
return tokens
# make sure that there is at least one word which is greater than two characters
def find_floating_chars(line):
words = line.split(" ")
for word in words:
if len(word) > 2:
return False
return True
def is_table_row(line):
line = line_parser.Line(line)
return line.is_table_row
def should_skip(line, xml=False):
return len(line) <= 2 if not xml else len(line) == 0
def clean_lines(lines, xml=False):
result = []
running_line = ""
line_buffer = []
line_type = "para"
header_block_idx = -1
block_idx = 0
line_set = set()
for line_str in lines:
# print(line_str)
line_str = clean_line(line_str)
if should_skip(line_str, xml=xml):
continue
line_without_numbers = re.sub(r"\d+", "", line_str)
if line_without_numbers in line_set:
continue
else:
line_set.add(line_without_numbers)
curr_line = line_parser.Line(line_str)
# this converst strings like 'e x e c u t i v e summary' to 'executive summary'
if not xml and curr_line.has_spaced_characters:
line_str = fix_spaced_characters(line_str)
curr_line = line_parser.Line(line_str)
if len(line_buffer) > 0:
# find out if previous line was a discontinous line
prev_line = line_buffer[-1]
logger.debug("========")
logger.debug(f"{prev_line.incomplete_line} >> {prev_line.text} \n")
logger.debug(f"{curr_line.continuing_line} >> {curr_line.text} \n")
# keep connecting lines as long as they seem incomplete
is_incomplete = prev_line.incomplete_line or (
len(line_buffer) > 1 and not prev_line.ends_with_period
)
logger.debug(
f"incomplete: {is_incomplete}, is_list_or_row: {curr_line.is_list_or_row}, continuing_line: {curr_line.continuing_line}",
)
if (
is_incomplete
and not (curr_line.is_list_or_row or curr_line.line_type == "list_item")
) or curr_line.continuing_line:
logger.debug("connecting..")
running_line = formatter.connect(running_line, curr_line.text)
line_buffer.append(curr_line)
# if we are connecting lines, then this has to be a para unless it is a list_item, basically no headers
if not line_type == "list_item":
line_type = "para"
else: # commit the line and start a new line
# remove different types of bulletted list (for better formatting) but do not touch numbered line
logger.debug("starting new line..")
# if line_type == "list_item":
# running_line = running_line[1:<fim_suffix>].lstrip()
if line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
block_idx = block_idx + 1
running_line = curr_line.text
line_buffer = [curr_line]
line_type = curr_line.line_type
logger.debug("========")
else:
running_line = curr_line.text
line_type = curr_line.line_type
line_buffer = [curr_line]
if line_type == "list_item" and running_line[0] in "�\\*,.?•\\➢ƒ–\\'\"—":
running_line = running_line[1:].lstrip()
block = {
"block_idx": block_idx,
"block_text": running_line,
"block_type": line_type,
"text_group_start_idx": -1,
"block_list": [],
"header_block_idx": header_block_idx,
"level": 0,
}
result.append(block)
return result
def line_list_check(prev_line, curr_line, list_char):
# if prev_line is list_item and list_char matches curr_line
if list_char == curr_line.text[0] and list_char not in ["”", "'", '"', "("]:
return True
# same char is alpha
if prev_line.text[0] == curr_line.text[0] and prev_line.text[0].isalpha():
if len(prev_line.text) >= 2 and prev_line.text[1].isupper():
# spell check first word
first_word = prev_line.text.split(" ")[0]
first_word = first_word.replace("'", "")
correct_word = su.segment(first_word)
if first_word[1:] == correct_word:
return True
# same char is not alpha but not digit
if prev_line.text[0] == curr_line.text[0] and not (
prev_line.text[0].isalpha()
or prev_line.text[0].isdigit()
or list_char not in ["”", "'", '"', "("]
):
return True
return False
def should_join_table(prev_line, curr_line, ents_aligned):
"""
Check if next line should be joined as a tr. This makes no assumption if the current line is a table
"""
# print()
# print("Checking to join tr", prev_line.visual_line.text_list, "\n", curr_line.visual_line.text_list)
# check list of spaced words
curr_line_ents = len(prev_line.visual_line.text_list)
next_line_ents = len(curr_line.visual_line.text_list)
ent_match = (
curr_line_ents == next_line_ents and curr_line_ents >= 2
) # tr should have at least two elements
# print("tab check", prev_line.visual_line.tab_count, curr_line.visual_line.tab_count)
tab_match = (
prev_line.visual_line.tab_count == curr_line.visual_line.tab_count
and curr_line.visual_line.tab_count > 0
)
# casing should also be the same
same_case = (
prev_line.text[0].islower() == curr_line.text[0].islower()
or prev_line.text[0].isupper() == curr_line.text[0].isupper()
)
colon_check = (
prev_line.hit_colon
and curr_line.hit_colon
and prev_line
and same_case
and not prev_line.incomplete_line
)
# if prev_line.hit_colon and curr_line.hit_colon:
# print()
# print("colon check")
# print(prev_line.visual_line.text_list)
# print(curr_line.visual_line.text_list)
# col_check
# print(tab_match, ent_match, colon_check)
tab_check = prev_line.visual_line.tab_count or curr_line.visual_line.tab_count
return (
(tab_match and ent_match)
or colon_check
or (ents_aligned and ent_match and tab_check)
)
def check_page_spacing(prev_line, curr_line, spacing_dict):
# print("^"*50)
# print("checking page stats")
# print(prev_line.visual_line.start_fs, prev_line.visual_line.end_fs, prev_line.text)
# print(curr_line.visual_line.start_fs, curr_line.visual_line.end_fs, curr_line.text)
# print()
diff_top = round(curr_line.visual_line.start_y - prev_line.visual_line.end_y)
# find best fs reference
prev_line_fs = {prev_line.visual_line.start_fs, prev_line.visual_line.end_fs}
curr_line_fs = {curr_line.visual_line.start_fs, curr_line.visual_line.end_fs}
same_fs = prev_line_fs.intersection(curr_line_fs)
fs = min(same_fs) if same_fs else curr_line.visual_line.start_fs
min_check = (
spacing_dict[(fs, diff_top - 1)] if (fs, diff_top - 1) in spacing_dict else None
)
max_check = (
spacing_dict[(fs, diff_top + 1)] if (fs, diff_top + 1) in spacing_dict else None
)
normal_check = (fs, diff_top) in spacing_dict and spacing_dict[(fs, diff_top)] > 3
if min_check or normal_check or max_check:
# get all fs in spacing dict
# see if the diff top is a min
# print("checking space dict")
distance_list = []
for val in spacing_dict:
if val[0] == fs and val[1] > 0 and spacing_dict[val] > 2:
distance_list.append((val, val[1]))
# print(distance_list)
val = min(distance_list) if len(distance_list) else []
if len(val):
join_fs, join_top = val[0]
if len(val):
join_fs, join_top = val[0]
if val[0] == (fs, diff_top): # or close
# print("SHOULDJOIN")
return True
elif (
join_fs == fs
and ((diff_top - 1) == join_top)
or ((diff_top + 1) == join_top)
):
return True
return False
def compute_overlap(
start_x0: float,
end_x0: float,
start_x1: float,
end_x1: float,
divide_by_min=True,
) -> float:
"""
Computes the % of intersection (overlap) of two lines w.r.t. the shortest line
"""
width_x0 = abs(end_x0 - start_x0)
width_x1 = abs(end_x1 - start_x1)
if start_x0 <= start_x1 <= end_x0:
intersect = min(abs(end_x0 - start_x1), width_x1)
elif start_x0 <= end_x1 <= end_x0:
intersect = min(abs(end_x1 - start_x0), width_x1)
elif start_x1 <= start_x0 <= end_x0 <= end_x1:
intersect = abs(end_x0 - start_x0)
else:
intersect = 0.0
if divide_by_min:
intersect /= min(width_x0, width_x1) + 1e-5
else:
intersect /= max(width_x0, width_x1) + 1e-5
return intersect
def compute_overlap_top_bottom(
start_x0: float,
end_x0: float,
start_x1: float,
end_x1: float,
) -> float:
"""
This is different from the above function.
Finds percentage overlap of top to bottom.
Score of 100% is possible doesn't reference the shortest line
"""
width_x1 = abs(end_x1 - start_x1)
if width_x1 == 0:
return 0.0
if start_x0 <= start_x1:
# measure from left to right
if end_x1 <= end_x0:
# if start and end both less, full in subset
return 1.0
return (end_x1 - start_x0) / width_x1
else:
# measure from bottom start
if end_x1 <= start_x0:
return 0.0
return (end_x1 - start_x0) / width_x1
def compute_bottom_top_overlap(start_x0, end_x0, start_x1, end_x1):
"""
This is different from the above function.
Finds percentage overlap of top to bottom.
Score of 100% is possible doesn't reference the shortest line
"""
# print(start_x0, end_x0)
# print(start_x1, end_x1)
if start_x0 == start_x1 and end_x0 != start_x0: # aligned with bottom line
# print()
# print("bottom overlap", (end_x1 - start_x1) / (end_x0 - start_x0))
return (end_x1 - start_x1) / (end_x0 - start_x0)
# other conditions
# elif start_x0 < start_x1 and end_x0 > end_x1: # to the left of bottom line
# return
# else: #to the right of bottom line
return 1.0
# header check for lines with similar font
# header check for lines with similar font
def visual_header_check(prev_line, curr_line, same_font):
# check top overlap (small) if the font size is bigger
# print()
# print("visual_header check:")
# print("prev", prev_line.text)
# print("checking", curr_line.text)
# top also has to be higher
# print("prev_line.visual_line.start_y, prev_line.visual_line.end_y")
# print(prev_line.visual_line.start_y, prev_line.visual_line.end_y)
# print(prev_line.visual_line.start_y, curr_line.visual_line.start_y)
if prev_line.visual_line.wrapped_page:
return False
if prev_line.visual_line.start_y < curr_line.visual_line.start_y:
prev_line_width = prev_line.visual_line.max_x - prev_line.visual_line.min_x
curr_line_width = curr_line.visual_line.max_x - curr_line.visual_line.min_x
# print("prev_line.visual_line.min_x, prev_line.visual_line.max_x, prev_line.visual_line.end_x")
# print(prev_line.visual_line.min_x, prev_line.visual_line.max_x, prev_line.visual_line.end_x)
# print("curr_line.visual_line.min_x, curr_line.visual_line.max_x")
# print(curr_line.visual_line.min_x, curr_line.visual_line.max_x)
# print("prev_line_width / curr_line_width")
# print(prev_line_width / curr_line_width)
# print("prev_line_width, curr_line_width")
# print(prev_line_width, curr_line_width)
if curr_line_width == 0:
return False
# print(round(prev_line.visual_line.min_x), round(curr_line.visual_line.min_x))
if round(prev_line.visual_line.min_x) == round(curr_line.visual_line.min_x):
if round(prev_line_width) == round(curr_line_width):
# print()
# print("NOT A HEADER1")
return False
offset = 0
# print(prev_line.visual_line.min_x, curr_line.visual_line.min_x)
# print(prev_line.visual_line.min_x <= curr_line.visual_line.min_x)
if prev_line.visual_line.min_x <= curr_line.visual_line.min_x:
offset = curr_line.visual_line.min_x - prev_line.visual_line.min_x # offset
# print("(prev_line_width - offset) / curr_line_width")
# print((prev_line_width - offset) / curr_line_width)
overlap_percentage = (prev_line_width - offset) / curr_line_width
different_font_style = (
prev_line.visual_line.fw != curr_line.visual_line.fw
or prev_line.visual_line[1] != curr_line.visual_line[1]
or prev_line.visual_line.fs > curr_line.visual_line.fs
)
if (
overlap_percentage < 0.3
or (different_font_style and overlap_percentage < 0.6)
or (prev_line.line_type == "header" and different_font_style)
# or (prev_line.is_header and different_font_style)
):
# print("HEADER INDENT", prev_line.is_header)
# print("overlap rule::", (prev_line_width - offset) / curr_line_width)
# print(True)
return True
# print(False)
# print()
# print("NOT A HEADER")
return False
def visual_header_from_stats(prev_line, curr_line, page_stats):
prev_fs = prev_line.visual_line.fs
curr_fs = curr_line.visual_line.fs
median_val = round(page_stats["median_fs"])
max_val = round(max(page_stats["fs_list"]))
max_val_diff = ((max_val - prev_fs) / max_val) < 0.2 if max_val != 0 else True
prev_fs_diff = round(prev_fs - median_val)
curr_fs_diff = (
round(curr_fs - median_val) if round(curr_fs - median_val) else 0.8
) # curr_fs is the median
varied_set = len(set(page_stats["fs_list"])) >= 4
rounded_fs_count = Counter([round(x, 3) for x in page_stats["fs_list"]])
unique_text = rounded_fs_count[round(prev_fs, 3)] / len(page_stats["fs_list"])
prev_curr_ratio_from_median = prev_fs_diff / curr_fs_diff
# print("prev_fs, curr_fs", prev_fs, curr_fs)
# print("unique text")
# print(rounded_fs_count[round(prev_fs, 3)], len(page_stats["fs_list"]) )
# print("visual_header check", len(set(page_stats["fs_list"])))
# print("varied_set", varied_set, "unique_text", unique_text)
# print(rounded_fs_count)
# print()
# close from max or far enough from median
bigger_text = max_val_diff or (
prev_curr_ratio_from_median > 2
) # TODO text must also be relatively uncommon
if varied_set and (unique_text <= 0.08):
if bigger_text and (prev_fs_diff > 1) and (prev_fs_diff - curr_fs_diff) > 0.3:
# print(max_val_diff)
# print(prev_fs, prev_line.text)
# print(curr_fs, curr_line.text)
# print()
return True
# header join
if bigger_text and curr_fs == prev_fs and (prev_fs_diff > 1):
# print(max_val_diff)
# print(prev_fs, prev_line.text)
# print(curr_fs, curr_line.text)
# print()
return True
return False
# def visual_clean_lines(lines, page_stats={}, page_info_dict={}):
# def visual_clean_lines(lines, page_stats={}, page_info_dict={}):
# def visual_clean_lines(lines, page_stats={}, page_info_dict={}):
def check_tr_alignment(prev_line, curr_line):
# print("-=" * 50)
# print("check_tr_alignment!")
# print(prev_line.text)
# print(curr_line.text)
# print()
prev_ents = len(prev_line.visual_line.text_list)
curr_ents = len(curr_line.visual_line.text_list)
prev_positions = prev_line.visual_line.start_x_list
curr_positions = curr_line.visual_line.start_x_list
prev_line_start_ents = prev_line.visual_line.start_x_list_single_ent
curr_line_start_ents = curr_line.visual_line.start_x_list_single_ent
# print(prev_line_start_ents)
# print(curr_line_start_ents)
same_ents = prev_ents > 1 and abs(prev_ents - curr_ents) <= 1
if len(prev_line_start_ents) == len(curr_line_start_ents):
prev_positions = prev_line_start_ents
curr_positions = curr_line_start_ents
if len(prev_line_start_ents) == len(curr_positions) and len(
prev_line_start_ents,
) != len(
prev_positions,
): # joined p_tags
prev_positions = prev_line_start_ents
if not same_ents:
# print("check_tr_alignment False1")
# print(prev_ents, curr_ents)
return False
# print("CHECKING POSITIONS")
# print(prev_positions)
# print(curr_positions)
for p_x, c_x in zip(prev_positions, curr_positions):
p_x = round(p_x)
c_x = round(c_x)
if abs(p_x - c_x) > 100:
# print("False")
# print("check_tr_alignment False3")
return False
# print("check_tr_alignment True")
return True
def check_layout(prev_line, curr_line, prev_above_curr):
prev_line_width = range(
int(prev_line.visual_line.min_x),
int(prev_line.visual_line.max_x),
)
# weird edge case
if not prev_line_width:
prev_line_width = range(
int(prev_line.visual_line.max_x),
int(prev_line.visual_line.min_x),
)
curr_line_width = range(
int(curr_line.visual_line.min_x),
int(curr_line.visual_line.max_x),
)
prev_line_width = set(prev_line_width)
prev_curr_overlap = prev_line_width.intersection(curr_line_width)
if prev_curr_overlap and not prev_above_curr:
# print(prev_line.text)
# print(curr_line.text)
# print("misplaced text group")
# print()
return True
return False
def order_blocks(blocks):
block_group_dict = defaultdict(list)
for idx, block in enumerate(blocks):
# print(idx, "block-group", block["group_id"], block["block_type"], block['block_text'])
group_id = block["group_id"]
block_group_dict[group_id].append(block)
block_group_list = [] # list that holds tuples (group_id, y_pos)
for block_group_id in block_group_dict:
block_group_list.append(
(block_group_id, block_group_dict[block_group_id][0]["y"]),
) # append starting y position of group
block_group_list = sorted(
block_group_list,
key=lambda x: x[1],
) # sort block groups by y position
# get list of ordered block group keys
ordered_blocks = []
for block_group_id, y in block_group_list:
ordered_blocks += block_group_dict[block_group_id]
# for b in original_blocks:
# re-index blocks and headers based off of new ordering
header_idx = 0
for idx, block in enumerate(ordered_blocks):
block["block_idx"] = idx
if block["block_type"] == "header":
header_idx = idx
ordered_blocks[idx]["header_block_idx"] = header_idx
return ordered_blocks
def visual_clean_lines(
lines,
page_stats={},
page_info_dict={},
page_idx=0,
line_set={},
):
page_blocks = []
header_block_idx = -1
block_idx = 0
# block_idx = page_idx
style_dict = {}
join_font_spacing = False
prev_line = None
text_list = []
prev_ents = 0
curr_ents = 0
is_incomplete = False
colon_rule = False
text_group_start = True
text_group_start_idx = 0
prev_line = None
next_line = None
# for idx, line in enumerate(lines[12:14]):
sentence_visual_end = False
group_id = 0
for idx, line in enumerate(lines):
# print(idx)
line_str, style_dict, text_list = (
line["text"],
line["style"],
line["text_list"],
)
line_str = " ".join(line_str.split())
if should_skip(line_str):
continue
if line_str in line_set:
continue
if len(line_str.split()) > 8:
line_set.add(line_str)
curr_line = line_parser.Line(
line_str=line_str,
style_dict=style_dict,
text_list=text_list,
page_details=page_stats,
)
if prev_line is None:
# initialize memory of previous line.
# this will update with join decisions
list_char = ""
if curr_line.line_type == "list_item":
list_char = curr_line.text[0]
curr_line.text = curr_line.text[1:].lstrip()
if curr_line.line_type == "header":
header_block_idx = block_idx
block = {
"block_idx": block_idx,
"block_text": curr_line.text,
"block_type": curr_line.line_type,
"header_block_idx": header_block_idx,
"block_group": [curr_line.visual_line.text_list],
"list_char": list_char,
"fs": curr_line.visual_line.start_fs,
"text_group_start_idx": text_group_start_idx,
"block_list": curr_line.visual_line.text_list,
"line": curr_line,
"y": curr_line.visual_line.start_y,
"group_id": group_id,
}
prev_line = curr_line
block_idx += 1
# if (idx <= 3) or (idx >= len(lines) - 3):
# line_without_numbers = re.sub(r"[^a-zA-Z]+", "", line_str).strip()
# if line_without_numbers:
# # track block_idx for de-duplication
# line_set[line_without_numbers].append((page_idx, block_idx))
page_blocks.append(block)
continue
# print("--" * 50)
# print(prev_line.line_type, "\n", prev_line.text)
# print(prev_ents)
# print(prev_line.visual_line.fw_list)
# print(prev_line.visual_line.font_family)
# print(prev_line.visual_line.fs, prev_line.visual_line.fw, "prev_line:", prev_line.line_type, prev_line.text)
# print(prev_line.visual_line.mode_fs)
# print(curr_line.line_type, "\n", curr_line.text)
# print(curr_ents)
# print()
# print(curr_line.visual_line.font_family)
# print(curr_line.visual_line.mode_fs)
# print(curr_line.visual_line.fs, curr_line.visual_line.fw, "curr_line:", curr_line.line_type, curr_line.text)
if (
len(prev_line.text) > 1
and len(curr_line.text) > 1
and prev_line.text[:2] == curr_line.text[:2]
and prev_line.text[1] == " "
and not (prev_line.text[0].isdigit() or curr_line.text[0].isdigit())
and not (prev_line.text[0].isalpha() or curr_line.text[0].isalpha())
):
curr_line.line_type = "list_item"
curr_line.is_list_item = True
curr_line.is_list_or_row = True
if page_blocks[-1]["block_type"] != "list_item":
page_blocks[-1]["block_type"] = "list_item"
page_blocks[-1]["list_char"] = page_blocks[-1]["block_text"][0]
page_blocks[-1]["block_text"] = page_blocks[-1]["block_text"][
1:
].lstrip()
same_start_fs = (
abs(prev_line.visual_line.start_fs - curr_line.visual_line.start_fs) < 0.5
)
same_end_fs = (
abs(prev_line.visual_line.end_fs - curr_line.visual_line.end_fs) < 0.5
)
same_end_start_fs = (
abs(prev_line.visual_line.end_fs - curr_line.visual_line.start_fs) < 0.5
)
prev_above_curr = (
True
if prev_line.visual_line.end_y < curr_line.visual_line.start_y
else False
)
y_diff = curr_line.visual_line.start_y - prev_line.visual_line.start_y
top_overlap = compute_overlap_top_bottom(
start_x0=prev_line.visual_line.start_x,
end_x0=prev_line.visual_line.end_x,
start_x1=curr_line.visual_line.start_x,
end_x1=curr_line.visual_line.end_x,
)
bottom_overlap = compute_bottom_top_overlap(
start_x0=prev_line.visual_line.start_x,
end_x0=prev_line.visual_line.end_x,
start_x1=curr_line.visual_line.start_x,
end_x1=curr_line.visual_line.end_x,
)
prev_overlap_curr = True if bottom_overlap or top_overlap else False
use_visual_join = True if prev_above_curr and prev_overlap_curr else False
if not use_visual_join and prev_line.incomplete_line:
join_font_spacing = True
if not (prev_line.is_table_row or curr_line.is_table_row):
if page_stats["n_lines"] <= 3:
join_font_spacing = True
else:
join_font_spacing = check_page_spacing(
prev_line,
curr_line,
page_stats["fs_and_diff_next_y"],
)
# if the font is different and font-family is different
different_font_family = (
curr_line.visual_line.font_family != prev_line.visual_line.font_family
)
different_common_fs = (
prev_line.visual_line.mode_fs != curr_line.visual_line.mode_fs
and prev_line.visual_line.start_fs != curr_line.visual_line.start_fs
)
different_font = (
different_font_family and different_common_fs and not join_font_spacing
)
# start and end characters are same font or the mode of fonts of both lines is the same
same_font = (
(prev_line.visual_line.fs == curr_line.visual_line.fs)
or (same_start_fs and same_end_fs)
or same_end_start_fs
or prev_line.visual_line.mode_fs == curr_line.visual_line.mode_fs
) and not different_font
prev_ents = (
len(prev_line.visual_line.text_list)
if not prev_line.line_type == "list_item"
else 0
)
curr_ents = (
len(curr_line.visual_line.text_list) if not curr_line.is_list_item else 0
)
ents_aligned = check_tr_alignment(prev_line, curr_line)
is_incomplete_sent = (
prev_line.incomplete_line
and not prev_line.ends_with_period
or prev_line.ends_with_comma
)
# logic using line after curr
if idx + 1 < len(lines):
# this is inefficent as line_parser is called twice,
# once for next_line and once for curr_line.
next_line = lines[idx + 1]
# print("NEXT LINE\n", next_line['text'])
next_line_str, next_style_dict, next_text_list = (
next_line["text"],
next_line["style"],
next_line["text_list"],
)
next_line = line_parser.Line(
line_str=next_line_str,
style_dict=next_style_dict,
text_list=next_text_list,
page_details=page_stats,
)
# if the last line was not a table, check if the next line is a table to avoid single tr
if prev_line.line_type != "table_row" and not ents_aligned:
# check if the next line is a table and matches curr_line
next_line_tr = next_line.line_type == "table_row" or should_join_table(
curr_line,
next_line,
False,
)
if not next_line_tr and curr_line.line_type == "table_row":
curr_line.line_type = "para"
# if the next line is joinable by visual stats but prev and curr are not
# don't join the line (only true by x-span check and y is below for prev cur)
# if this is not true ignore the rule
prev_not_above_next = (
next_line and prev_line.visual_line.start_y > next_line.visual_line.start_y
)
next_line_join = False
if next_line and check_layout(prev_line, next_line, prev_not_above_next):
next_line_join = check_page_spacing(
curr_line,
next_line,
page_stats["fs_and_diff_next_y"],
)
# if the prev line is not visually joinable and the curr_next is
# make sure the prev_line doesn't join the curr_line
curr_next_visual_join = not join_font_spacing and next_line_join
# print()
# print("is_incomplete_sent, (join_font_spacing and not sentence_visual_end), curr_line.continuing_line")
# print(is_incomplete_sent, (join_font_spacing and not sentence_visual_end), curr_line.continuing_line)
# print("join_font_spacing:,", join_font_spacing)
is_incomplete = (
is_incomplete_sent
or (join_font_spacing and not sentence_visual_end)
or curr_line.continuing_line
)
# print("is_incomplete", is_incomplete)
has_overlap_with_min = (
compute_overlap(
curr_line.visual_line.start_x,
curr_line.visual_line.end_x,
prev_line.visual_line.start_x,
prev_line.visual_line.end_x,
divide_by_min=True,
)
> 0.7
)
is_below = curr_line.visual_line.start_y - prev_line.visual_line.start_y > 0
is_visually_apart = (has_overlap_with_min and not is_below) or (
not has_overlap_with_min and is_below
)
above_bold_below_not = (
prev_line.visual_line.fw >= 600.0 and curr_line.visual_line.fw <= 400.0
)
has_overlap_with_max = (
compute_overlap(
curr_line.visual_line.start_x,
curr_line.visual_line.end_x,
prev_line.visual_line.start_x,
prev_line.visual_line.end_x,
divide_by_min=False,
)
> 0.3
)
is_not_header_over_para = True
if (
above_bold_below_not
and not has_overlap_with_max
and prev_line.line_type == "header"
and not prev_line.incomplete_line
):
is_not_header_over_para = False
# print("header over para check")
# print("""above_bold_below_not
# and not has_overlap_with_max
# and prev_line.line_type == "header"
# """)
# print(above_bold_below_not)
# print(has_overlap_with_max, j)
# print(prev_line.line_type == "header")
# print()
# print(is_not_header_over_para)
###########
# List item
if line_list_check(prev_line, curr_line, page_blocks[-1]["list_char"]):
prev_line.line_type = "list_item"
curr_line.line_type = "list_item"
curr_line.is_list_item = True
# change prev_line to list item
if page_blocks[-1]["block_type"] != "list_item":
page_blocks[-1]["list_char"] = page_blocks[-1]["block_text"][0]
page_blocks[-1]["block_text"] = page_blocks[-1]["block_text"][
1:
].lstrip()
page_blocks[-1]["block_type"] = "list_item"
close_text_y = (
curr_line.visual_line.start_y
- curr_line.visual_line.mode_fs
- prev_line.visual_line.start_y
- prev_line.visual_line.mode_fs
) <= 0
aligned_text = curr_line.visual_line.start_x == prev_line.visual_line.start_x
title_text = False
if len(lines) < 10:
title_text = top_overlap == 1.0 and close_text_y and aligned_text
visual_header = visual_header_check(prev_line, curr_line, same_font)
list_item_rule = curr_line.has_list_char or (
curr_line.numbered_line
and not (
(prev_line.incomplete_line and curr_line.continuing_line)
or join_font_spacing
)
)
last_2_block_tr = False
if len(page_blocks) >= 2:
last_block_tr = (
page_blocks[-1]["block_type"] == "table_row"
and page_blocks[-2]["block_type"] == "table_row"
)
if not last_block_tr and curr_line.line_type == "para":
# check to join
if prev_line.incomplete_line and curr_line.continuing_line:
last_2_block_tr = True
no_space_join = prev_line.ends_with_period and curr_line.text[0] != " "
visual_header_by_stats = visual_header_from_stats(
prev_line,
curr_line,
page_stats,
)
header_join = False
common_list = curr_line.has_list_char or prev_line.has_list_char
if (
visual_header_by_stats
and curr_line.incomplete_line
and same_font
and not (prev_line.is_table_row or curr_line.is_table_row or common_list)
):
header_join = True
# print("LINEJOIN CHECK")
# print("positive\n", "*" * 10)
# print(f"\nsame_font:{same_font}",
# f"\nis_incomplete:{is_incomplete}",
# f"\nis_not_header_over_para:{is_not_header_over_para}")
# print("join_font_spacing", join_font_spacing)
# print("header join", header_join)
# print()
# print("negative\n", "*" * 10)
# print(f"\nis_visually_apart:{is_visually_apart}",
# f"\nshould_join_table(prev_line, curr_line): {should_join_table(prev_line, curr_line, ents_aligned)}",
# f"\ncurr_line.is_list_or_row:{curr_line.is_list_or_row}",
# f"\ncurr_line table {curr_line.line_type == 'table_row'}",
# f"\ncurr_line list {curr_line.is_list_item}",
# f"\nvisual_header {visual_header}",
# f'\nprev_line.line_type == "table_row", {prev_line.line_type == "table_row"}')
if (
same_font
and not should_join_table(prev_line, curr_line, ents_aligned)
and not (curr_line.line_type == "table_row" or list_item_rule)
and not (prev_line.line_type == "table_row" and not last_2_block_tr)
and is_incomplete
and not curr_next_visual_join # is_visually_apart
and not visual_header
or not check_parentheses(prev_line.text)
and is_not_header_over_para
and not no_space_join
or title_text
or header_join
):
# print("JOIN")
if not is_visually_apart and bottom_overlap < 0.5:
# this would signify end of paragraph
sentence_visual_end = True
else:
sentence_visual_end = False
if page_stats["n_lines"] <= 3:
page_blocks[-1]["block_type"] = "header"
elif (
not prev_line.line_type == "list_item"
): # and not curr_line.visual_line.is_header:
page_blocks[-1]["block_type"] = "para"
new_text = formatter.connect(
prev_line.text.rstrip(),
curr_line.text.lstrip(),
)
new_text_list = (
prev_line.visual_line.text_list + curr_line.visual_line.text_list
)
# print("Max ex min ex assignment")
max_x = max(prev_line.visual_line.max_x, prev_line.visual_line.max_x)
min_x = min(prev_line.visual_line.min_x, curr_line.visual_line.min_x)
prev_line_type = prev_line.line_type
page_blocks[-1]["block_text"] = new_text
prev_start_y = prev_line.visual_line.start_y
curr_start_y = curr_line.visual_line.start_y
prev_end_y = prev_line.visual_line.end_y
wrapped_page = prev_line.visual_line.wrapped_page
# pass the line parser attributes
prev_line = curr_line
# add appended text and text_list, preserve the line type
prev_line.text = new_text
prev_line.visual_line.start_y = prev_start_y
prev_line.visual_line.text_list = new_text_list
prev_line.line_type = prev_line_type
prev_line.visual_line.min_x = min_x
prev_line.visual_line.max_x = max_x
prev_line.visual_line.wrapped_page = wrapped_page
if curr_start_y < prev_end_y:
prev_line.visual_line.wrapped_page = True
# print(prev_start_y)
# print("Join")
# print()
# print("-" * 50)
# print()
# new block
else:
# print("NEW block")
# print("*" * 50)
if not is_visually_apart and bottom_overlap < 0.5:
# this would signify end of paragraph
sentence_visual_end = True
else:
sentence_visual_end = False
# print("-"*50)
colon_rule = (
prev_line.hit_colon and curr_line.hit_colon and prev_ents == curr_ents
)
# normal case
tab_check_join = {
prev_line.visual_line.tab_count_join,
prev_line.visual_line.tab_count,
} & {curr_line.visual_line.tab_count_join, curr_line.visual_line.tab_count}
tab_check = sum(tab_check_join) > 0
# print("-+" * 50)
# print("TAB POSITIONS")
# print(prev_line.text)
# print(prev_line.visual_line.start_x_list)
# print(prev_line.visual_line.start_x_list_single_ent)
# print(prev_line.visual_line.tab_count)
# print(prev_line.visual_line.tab_count_join)
#
# print(curr_line.text)
# print(curr_line.visual_line.start_x_list)
# print(curr_line.visual_line.start_x_list_single_ent)
# print(curr_line.visual_line.tab_count)
# print(curr_line.visual_line.tab_count_join)
# print("tabcheck", tab_check)
# print("ents_aligned", ents_aligned)
# print(prev_ents, curr_ents)
# print(curr_line.visual_line.text_list)
# print("-+" * 50)
if visual_header_by_stats and prev_line.line_type != "table_row":
page_blocks[-1]["block_type"] = "header"
elif (
colon_rule
and prev_ents == 1
and prev_line.line_type != "list_item"
and not (prev_line.incomplete_line and curr_line.continuing_line)
):
# print("Table Conversion")
# print()
# print("colon check")
# print(prev_line.text.split(":"))
# print(curr_line.text.split(":"))
# print("TR1")
new_text_list = prev_line.text.split(":")
new_text_list = [new_text_list[0] + ":", new_text_list[1:]]
page_blocks[-1]["block_type"] = "table_row"
page_blocks[-1]["block_list"]: new_text_list
if text_group_start:
text_group_start = False
text_group_start_idx = page_blocks[-1]["block_idx"]
page_blocks[-1]["text_group_start_idx"] = text_group_start_idx
curr_line.line_type = "table_row"
curr_line.is_list_or_row = True
# print("Table Conversion!")
# print(prev_ents, curr_ents)
# print(page_blocks[-1]["block_text"])
# print("TR3")
elif (
tab_check and ents_aligned and prev_line.line_type != "list_item"
) or (colon_rule and not prev_line.incomplete_line):
# print("Table Conversion")
# print(prev_ents, curr_ents)
# print(page_blocks[-1]["block_text"])
# print("TR2")
page_blocks[-1]["block_type"] = "table_row"
if text_group_start:
text_group_start = False
text_group_start_idx = page_blocks[-1]["block_idx"]
page_blocks[-1]["text_group_start_idx"] = text_group_start_idx
curr_line.line_type = "table_row"
else:
text_group_start = True
text_group_start_idx = -1
list_char = ""
if curr_line.line_type == "list_item":
list_char = curr_line.text[0]
curr_line.text = curr_line.text[1:].lstrip()
if curr_line.line_type == "header":
header_block_idx = block_idx
if (visual_header or visual_header_by_stats) and not (
prev_line.line_type == "list_item"
or prev_line.line_type == "numbered_list_item"
):
page_blocks[-1]["block_type"] = "header"
# print()
# print("*" * 40)
# print("NEW BLOCK")
# print()
# print("*" * 40)
# print(curr_line.line_type, curr_line.text)
# group attribute
if check_layout(prev_line, curr_line, prev_above_curr) or y_diff < 0:
group_id += 1
block = {
"block_idx": block_idx,
"block_text": curr_line.text,
"block_type": curr_line.line_type,
"header_block_idx": header_block_idx,
"block_group": [curr_line.visual_line.text_list],
"text_group_start_idx": text_group_start_idx,
"list_char": list_char,
"group_id": group_id,
"fs": curr_line.visual_line.start_fs,
"x": curr_line.visual_line.start_x,
"y": curr_line.visual_line.start_y,
"line": curr_line,
"block_list": curr_line.visual_line.text_list,
}
# This is to account for when the headers get false positive #TODO improve header code
prev_text = page_blocks[-1]["block_text"]
if page_blocks[-1]["block_type"] == "header" and (
len(sent_tokenize(prev_text)) >= 2 or len(prev_text.split()) > 16
):
page_blocks[-1]["block_type"] = "para"
prev_line = curr_line
block_idx += 1
page_blocks.append(block)
# not too many blocks there may be title text missed
if len(page_blocks) <= 2:
for idx, block in enumerate(page_blocks):
if "." not in block["block_text"] and len(block["block_text"].split()) < 10:
page_blocks[idx]["block_type"] = "header"
page_blocks = order_blocks(page_blocks)
return page_blocks, line_set
def clean_line(line):
line = line.replace("\n", " ")
line = line.replace("\t", " ")
line = line.strip()
return line
def fix_spaced_characters(line_text):
line_text = re.sub(r"\s+", "", line_text)
return su.segment(line_text)
def connect(prev, curr):
has_space = prev.endswith(" ")
result = prev + ("" if has_space else " ") + curr
return result
def get_numbers(line):
# test = re.compile(r"[0-9]+\.?[0-9]?")
regex = re.compile(r"\$?(\d*(\d\.?|\.\d{1,2}))$")
return regex.search(line)
def check_block_join(prev_block, block):
prev_text = prev_block["block_text"]
curr_text = block["block_text"]
blocks_are_paras = (
prev_block["block_type"] == "para" and block["block_type"] == "para"
)
if len(prev_text.strip()) and len(curr_text.strip()) and blocks_are_paras:
prev_line = line_parser.Line(prev_block["block_text"])
curr_line = line_parser.Line(block["block_text"])
if prev_line.incomplete_line or curr_line.continuing_line:
return True
return False
def join_blocks(page_blocks, blocks):
prev_last_block = page_blocks[-1][-1]
# update page blocks and blocks
# prev_blocks = page_blocks[-1]
# last_prev_block = prev_blocks[-1]
# check to join last_prev_block with first blocks[0]
# if it's a join, pop the block and join, subtract block indexes
prev_last_block["block_text"] = (
prev_last_block["block_text"].rstrip() + " " + blocks[0]["block_text"].lstrip()
)
prev_last_block["block_list"].append(blocks[0]["block_list"])
# print(prev_block)
page_blocks[-1][-1] = prev_last_block
for block in blocks[1:]:
block["block_idx"] -= 1
return page_blocks, blocks[1:]
<fim_middle> | null | LINE_COMMENT | complete_current_header_empty_completion |
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break b<fim_suffix>racket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while offset < len(modified_text) and sent_idx < len(modified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
if len(sents) >= 2 and re.match(r"^.\.$", sents[0]):
sents[1] = sents[0] + " " + sents[1]
sents = sents[1:]
return sents
def divide_list_into_chunks(lst, n):
# looping till length l
for i in range(0, len(lst), n):
yield lst[i : i + n]
def normalize(X):
norms = np.einsum("ij,ij->i", X, X)
np.sqrt(norms, norms)
X /= norms[:, np.newaxis]
return X
def detect_block_center_aligned(block, page_width):
center_location = block["box_style"][1] + block["box_style"][3] / 2
center_aligned = abs(center_location - page_width / 2) < page_width * 0.01
width_check = block["box_style"][3] * 2 < page_width
return center_aligned and width_check
def detect_block_center_of_page(block, page_height):
bottom = block["box_style"][0] + block["box_style"][4]
center_of_page = (page_height / 3) <= bottom <= ((2 * page_height) / 3)
return center_of_page
def check_char_is_word_boundary(c):
if c.isalnum():
return False
if c in ['-', '_']:
return False
return True
def blocks_to_sents(blocks, flatten_merged_table=False, debug=False):
block_texts = []
block_info = []
header_block_idx = -1
header_match_idx = -1
header_match_idx_offset = -1
header_block_text = ""
is_rendering_table = False
is_rendering_merged_cells = False
table_idx = 0
levels = []
prev_header = None
block_idx = 0
for block_idx, block in enumerate(blocks):
block_type = block["block_type"]
if block_type == "header":
if debug:
print("---", block["level"], block["block_text"])
header_block_text = block["block_text"]
header_block_idx = block["block_idx"]
header_match_idx = header_match_idx_offset + 1
if prev_header and block["level"] <= prev_header['level'] and len(levels) > 0:
while len(levels) > 0 and levels[-1]["level"] >= block["level"]:
if debug:
print("<<", levels[-1]["level"], levels[-1]["block_text"])
levels.pop(-1)
if debug:
print(">>", block["block_text"])
levels.append(block)
prev_header = block
if debug:
print("-", [str(level['level']) + "-" + level['block_text'] for level in levels])
block["header_text"] = header_block_text
block["header_block_idx"] = header_block_idx
block["header_match_idx"] = header_match_idx
block["block_idx"] = block_idx
level_chain = []
for level in levels:
level_chain.append({"block_idx": level["block_idx"], "block_text": level["block_text"]})
# remove a level for header
if block_type == "header":
level_chain = level_chain[:-1]
level_chain.reverse()
block["level_chain"] = level_chain
# if block_type == "header" or block_type == "table_row":
if (
block_type == "header"
and not is_rendering_table and 'is_table_start' not in block
):
block_texts.append(block["block_text"])
# append text from next block to header block
# TODO: something happened here, it messed up the match_text
# if block_type == "header" and block_idx + 1 < len(blocks):
# block[
# "block_text"
# ] += blocks[block_idx+1]['block_text']
block_info.append(block)
header_match_idx_offset += 1
elif (
block_type == "list_item" or block_type == "para" or block_type == "numbered_list_item"
) and not is_rendering_table:
block_sents = block["block_sents"]
header_match_idx_offset += len(block_sents)
for sent in block_sents:
block_texts.append(sent)
block_info.append(block)
elif 'is_table_start' in block:
is_rendering_table = True
if 'has_merged_cells' in block:
is_rendering_merged_cells = True
elif 'is_table_start' not in block and not is_rendering_table and block_type == "table_row":
block_info.append(block)
block_texts.append(block["block_text"])
header_match_idx_offset += 1
if is_rendering_table:
if is_rendering_merged_cells and "effective_para" in block and flatten_merged_table:
eff_header_block = block["effective_header"]
eff_para_block = block["effective_para"]
eff_header_block["header_text"] = block["header_text"]
eff_header_block["header_block_idx"] = block["block_idx"]
eff_header_block["header_match_idx"] = header_match_idx_offset + 1
eff_header_block["level"] = block["level"] + 1
eff_header_block["level_chain"] = block["level_chain"]
eff_para_block["header_block_idx"] = block["block_idx"]
eff_para_block["header_match_idx"] = header_match_idx_offset + 1
eff_para_block["level"] = block["level"] + 2
eff_para_block["level_chain"] = [
{
"block_idx": eff_header_block["block_idx"],
"block_text": eff_header_block["block_text"],
},
] + eff_header_block["level_chain"]
header_match_idx_offset += 1
block_info.append(block["effective_header"])
block_texts.append(block["effective_header"]["block_text"])
for sent in block["effective_para"]["block_sents"]:
block_texts.append(sent)
block_info.append(block["effective_para"])
header_match_idx_offset += len(block["effective_para"]["block_sents"])
else:
block["table_idx"] = table_idx
block_info.append(block)
block_texts.append(block["block_text"])
header_match_idx_offset += 1
if 'is_table_end' in block:
is_rendering_table = False
table_idx += 1
return block_texts, block_info
def get_block_texts(blocks):
block_texts = []
block_info = []
for block in blocks:
block_type = block["block_type"]
if (
block_type == "list_item"
or block_type == "para"
or block_type == "numbered_list_item"
or block_type == "header"
):
block_texts.append(block["block_text"])
block_info.append(block)
return block_texts, block_info<fim_middle> | null | LINE_COMMENT | complete_current_header_empty_completion |
<filename>nlm-ingestor/nlm_ingestor/ingestor_utils/utils.py<fim_prefix>import json
import re
import numpy as np
from nltk import load
from nltk import PunktSentenceTokenizer
nltk_abbs = load("tokenizers/punkt/{}.pickle".format("english"))._params.abbrev_types
class NpEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.floating):
return float(obj)
if isinstance(obj, np.ndarray):
return obj.tolist()
return super(NpEncoder, self).default(obj)
nlm_abbs = {
"u.s",
"u.s.a",
"n.w",
"p.o",
"po",
"st",
"ave",
"blvd",
"ctr",
"cir",
"ct",
"dr",
"mtn",
"apt",
"hwy",
"esq",
"fig",
"no",
"sec",
"n.a",
"s.a.b",
"non-u.s",
"cap",
'u.s.c',
"ste",
}
nlm_special_abbs = {
"inc",
}
abbs = nltk_abbs | nlm_abbs
nltk_tokenzier = PunktSentenceTokenizer()
rules = []
for abb in abbs:
# match start of the sentence
pattern = fr"^{abb}.\s"
replaced = f"{abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match token in sentence
pattern = fr"\s{abb}.\s"
replaced = f" {abb}_ "
# case insensitive replacement for synonyms
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
for abb in nlm_special_abbs:
pattern = fr"{abb}\."
replaced = f"{abb}_"
rule = re.compile(pattern, re.IGNORECASE)
rules.append((rule, replaced))
# match content inside brackets
# (?<=\() ==> starts with "("
# ([^)]+) ==> repeat not ")"
# (?=\))") ==> ends with ")"
bracket_rule = re.compile(r"(?<=\()([^)]+)(?=\))")
space_rule = re.compile(r"\s([.'](?:\s|$|\D))", re.IGNORECASE) # Remove any space between punctuations (.')
quotation_pattern = re.compile(r'[”“"‘’\']')
def sent_tokenize(org_texts):
if not org_texts:
return org_texts
sents = []
# in case org_texts has \n, break it into multiple paragraph
# edge case for html and markdown
for org_text in org_texts.split("\n"):
org_text = space_rule.sub(r'\1', org_text)
modified_text = re.sub(r'^([.,?!]\s+)+', "", org_text) # To handle bug https://github.com/nltk/nltk/issues/2925
orig_offset = abs(len(org_text) - len(modified_text))
# do not break bracket
for span_group in bracket_rule.finditer(modified_text):
start_byte, end_byte = span_group.span()
span = modified_text[start_byte:end_byte]
# skip this logic when span is too big? disabled for now
# if len(span.split()) >= 10:
# continue
modified_text = modified_text.replace(
f"({span})", f"_{span.replace('.','_')}_",
)
for rule, replaced in rules:
modified_text = rule.sub(replaced, modified_text)
# Normalize all the quotation.
modified_text = quotation_pattern.sub("\"", modified_text)
modified_sents = nltk_tokenzier.tokenize(modified_text)
offset = orig_offset
sent_idx = 0
while offset < len(modified_text) and sent_idx < len(mod<fim_suffix>ified_sents):
if modified_text[offset] == " ":
offset += 1
continue
# cut org_text based on lengths of modified_sent
modified_sent = modified_sents[sent_idx]
sents.append(org_text[offset: offset + len(modified_sent)])
offset += len(modified_sent)
sent_idx += 1
if len(sents) >= 2 and re.match(r"^.\.$", sents[0]):
sents[1] = sents[0] + " " + sents[1]
sents = sents[1:]
return sents
def divide_list_into_chunks(lst, n):
# looping till length l
for i in range(0, len(lst), n):
yield lst[i : i + n]
def normalize(X):
norms = np.einsum("ij,ij->i", X, X)
np.sqrt(norms, norms)
X /= norms[:, np.newaxis]
return X
def detect_block_center_aligned(block, page_width):
center_location = block["box_style"][1] + block["box_style"][3] / 2
center_aligned = abs(center_location - page_width / 2) < page_width * 0.01
width_check = block["box_style"][3] * 2 < page_width
return center_aligned and width_check
def detect_block_center_of_page(block, page_height):
bottom = block["box_style"][0] + block["box_style"][4]
center_of_page = (page_height / 3) <= bottom <= ((2 * page_height) / 3)
return center_of_page
def check_char_is_word_boundary(c):
if c.isalnum():
return False
if c in ['-', '_']:
return False
return True
def blocks_to_sents(blocks, flatten_merged_table=False, debug=False):
block_texts = []
block_info = []
header_block_idx = -1
header_match_idx = -1
header_match_idx_offset = -1
header_block_text = ""
is_rendering_table = False
is_rendering_merged_cells = False
table_idx = 0
levels = []
prev_header = None
block_idx = 0
for block_idx, block in enumerate(blocks):
block_type = block["block_type"]
if block_type == "header":
if debug:
print("---", block["level"], block["block_text"])
header_block_text = block["block_text"]
header_block_idx = block["block_idx"]
header_match_idx = header_match_idx_offset + 1
if prev_header and block["level"] <= prev_header['level'] and len(levels) > 0:
while len(levels) > 0 and levels[-1]["level"] >= block["level"]:
if debug:
print("<<", levels[-1]["level"], levels[-1]["block_text"])
levels.pop(-1)
if debug:
print(">>", block["block_text"])
levels.append(block)
prev_header = block
if debug:
print("-", [str(level['level']) + "-" + level['block_text'] for level in levels])
block["header_text"] = header_block_text
block["header_block_idx"] = header_block_idx
block["header_match_idx"] = header_match_idx
block["block_idx"] = block_idx
level_chain = []
for level in levels:
level_chain.append({"block_idx": level["block_idx"], "block_text": level["block_text"]})
# remove a level for header
if block_type == "header":
level_chain = level_chain[:-1]
level_chain.reverse()
block["level_chain"] = level_chain
# if block_type == "header" or block_type == "table_row":
if (
block_type == "header"
and not is_rendering_table and 'is_table_start' not in block
):
block_texts.append(block["block_text"])
# append text from next block to header block
# TODO: something happened here, it messed up the match_text
# if block_type == "header" and block_idx + 1 < len(blocks):
# block[
# "block_text"
# ] += blocks[block_idx+1]['block_text']
block_info.append(block)
header_match_idx_offset += 1
elif (
block_type == "list_item" or block_type == "para" or block_type == "numbered_list_item"
) and not is_rendering_table:
block_sents = block["block_sents"]
header_match_idx_offset += len(block_sents)
for sent in block_sents:
block_texts.append(sent)
block_info.append(block)
elif 'is_table_start' in block:
is_rendering_table = True
if 'has_merged_cells' in block:
is_rendering_merged_cells = True
elif 'is_table_start' not in block and not is_rendering_table and block_type == "table_row":
block_info.append(block)
block_texts.append(block["block_text"])
header_match_idx_offset += 1
if is_rendering_table:
if is_rendering_merged_cells and "effective_para" in block and flatten_merged_table:
eff_header_block = block["effective_header"]
eff_para_block = block["effective_para"]
eff_header_block["header_text"] = block["header_text"]
eff_header_block["header_block_idx"] = block["block_idx"]
eff_header_block["header_match_idx"] = header_match_idx_offset + 1
eff_header_block["level"] = block["level"] + 1
eff_header_block["level_chain"] = block["level_chain"]
eff_para_block["header_block_idx"] = block["block_idx"]
eff_para_block["header_match_idx"] = header_match_idx_offset + 1
eff_para_block["level"] = block["level"] + 2
eff_para_block["level_chain"] = [
{
"block_idx": eff_header_block["block_idx"],
"block_text": eff_header_block["block_text"],
},
] + eff_header_block["level_chain"]
header_match_idx_offset += 1
block_info.append(block["effective_header"])
block_texts.append(block["effective_header"]["block_text"])
for sent in block["effective_para"]["block_sents"]:
block_texts.append(sent)
block_info.append(block["effective_para"])
header_match_idx_offset += len(block["effective_para"]["block_sents"])
else:
block["table_idx"] = table_idx
block_info.append(block)
block_texts.append(block["block_text"])
header_match_idx_offset += 1
if 'is_table_end' in block:
is_rendering_table = False
table_idx += 1
return block_texts, block_info
def get_block_texts(blocks):
block_texts = []
block_info = []
for block in blocks:
block_type = block["block_type"]
if (
block_type == "list_item"
or block_type == "para"
or block_type == "numbered_list_item"
or block_type == "header"
):
block_texts.append(block["block_text"])
block_info.append(block)
return block_texts, block_info<fim_middle> | null | WHILE | complete_current_header_empty_completion |
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
tr<fim_suffix>y:
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
except Exception as e:
logging.error(e)
self.num_digits = 0
def check_date(self):
if "/" in self.text or "-" in self.text:
text = self.text.replace("/", "-")
date_patterns = [
"%b-%d",
"%B-%d",
"%B-%d-%y",
"%B-%d-%Y",
"%b-%d-%Y",
"%b-%d-%y",
"%m-%d",
"%m-%d-%y",
"%m-%d-%Y",
]
for pat in date_patterns:
try:
datetime.datetime.strptime(text, pat)
self.is_date_entry = True
return
except ValueError:
pass
else:
self.is_date_entry = False
def check_numeric(self):
word = self.text.lower()
if not word.isalpha():
if word.isprintable():
if not word.isnumeric():
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
if word.startswith("-"):
self.is_negative = True
word = word[1:]
if word.startswith("$"):
self.is_dollar = True
word = word[1:]
elif word.endswith("$"):
self.is_dollar = True
word = word[0:-1]
elif word.endswith("%"):
self.is_percent = True
word = word[0:-1]
elif word.endswith("m"):
self.is_million = True
elif word.endswith("bn"):
self.is_billion = True
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
word = word.replace(",", "")
if word.isnumeric() or word.replace(".", "", 1).isnumeric():
self.is_number = True
parts = word.split("-")
if (
len(parts) == 2
and parts[0].isnumeric()
and parts[1].isnumeric()
):
self.is_number_range = True
self.parts = parts
else:
self.is_number = True
if self.is_number:
numeric_part = word
return numeric_part
class Line:
def __init__(
self,
line_str,
text_list=[],
style_dict={},
page_details={},
noun_chunk_ending_tokens=[],
):
self.text = line_str.strip()
self.visual_line = VisualLine(text_list, style_dict, page_details)
self.words = []
self.is_independent = False
self.is_header = False
self.is_header_without_comma = False
self.noun_chunks = []
self.quoted_words = quote_pattern.findall(self.text)
self.noun_chunk_ending_tokens = {x.lower() for x in noun_chunk_ending_tokens}
self.parse_line()
def check_header(self):
# Section X, Article Y, Note 1 etc.
first_word_header = self.first_word.lower() in ["section", "article", "note"]
# If there are a certain percentage of title words (first letter capitalize)
title_ratio = (
self.title_word_count / self.eff_word_count
if self.eff_word_count > 0
else 1.0
)
# print(self.title_word_count, self.eff_word_count, title_ratio)
# Section 1 is a header but Section 1: Hello 3 is not
has_enough_titles = title_ratio > 0.9 and self.eff_word_count < 10
has_header_structure = (
(first_word_header or has_enough_titles) and self.number_count == 1
) or self.numbered_line or self.text.isupper()
# has_header_structure = has_header_structure and self.eff_word_count <
last_word_number = (
self.last_word.lower() in units
or self.last_word_number
and not has_header_structure
)
last_word_date = self.last_word_date and not has_header_structure
# Find lines ending with sentence delimiter. But exclude text like "L.P."
ends_with_delim = ends_with_sentence_delimiter_pattern.search(self.text) is not None
sentence_structure = self.ends_with_period and not (
has_header_structure and title_ratio > 0.9
) and ends_with_delim
last_letter_is_punctuation = (
self.last_word[-1] in punctuations and self.last_word[-1] not in ":?.)]%" and
ends_with_delim
)
self.is_header_without_comma = (
not sentence_structure
and not self.has_list_char
and not self.first_char in footnote_types
and has_enough_titles
and not last_word_number
and (
self.number_count == 0
or (has_header_structure and self.number_count <= 1)
)
and not self.has_continuing_chars
and not last_word_date
and self.first_word_title
and not self.last_word_is_stop_word
and not self.is_zipcode_or_po
and not last_letter_is_punctuation
and not "://" in self.text # url pattern
)
self.is_header = self.is_header_without_comma and \
((not self.text.count(',') > 1) if not self.text.lower().startswith('section') else True)
def check_ends_with_period(self):
# punct_rule = self.last_char in string.punctuation and self.last_char not in [':', '.']
last_word_is_title = self.last_word in ["Mr.", "Dr.", "Mrs."]
self.ends_with_period = self.last_char in ["."] and not last_word_is_title
def check_table_row(self):
if not self.is_header:
value_count = (
self.number_count
+ self.dollar_count
+ self.pct_count
+ self.text.count(" - ")
)
word_symbols = self.word_count - self.dollar_sign_count
if word_symbols == 0:
word_symbols = 1
word_ratio = (
value_count + self.title_word_count + self.date_entry_count
) / word_symbols
self.is_table_row = (
(
(value_count > 0 or self.date_entry_count > 0)
and word_ratio > 0.7
and not self.ends_with_period
and not self.is_zipcode_or_po
)
and not self.last_word_is_stop_word
or ("...." in self.text)
)
else:
self.is_table_row = False
def check_list_item(self):
text = self.text.strip()
self.has_list_char = text[0] in list_types.keys()
# if not self.has_list_char and text[0] in ambiguous_list_chars:
# self.has_list_char = text[1:].strip()[0].isalpha()
self.is_list_item = self.has_list_char and self.first_word[-1] not in ":?.)]%$"
if self.is_list_item:
self.list_type = list_types[text[0]]
# matches 1.1 1.2.1 1 etc.
def check_numbered_line(self, word):
trunc_word = word
ends_with_parens = word.endswith(")")
number_end_char = word.endswith(".") or ends_with_parens
number_start_char = word.startswith("(")
if number_start_char and not ends_with_parens:
return False
if word[-1] in ["%", "$", ","]:
return False
if number_end_char:
trunc_word = word[:-1]
if number_start_char:
trunc_word = trunc_word[1:]
# To handle scenarios like (ii)(A)
if ")(" in trunc_word:
trunc_word = trunc_word.split(")(")[0]
parts = trunc_word.split(".")
self.integer_numbered_line = False
self.roman_numbered_line = False
self.letter_numbered_line = False
self.dot_numbered_line = False
mixed_list_items = False
max_digits = 2
max_roman = 6
for idx, part in enumerate(parts):
# print(">part: ", part, re.sub(r"[a-zA-Z]+", "", part).isdigit() or idx > 0)
if len(part) <= max_digits:
# (1), (2), (3)
self.integer_numbered_line = part.isdigit() and (
len(parts) > 1 or word.endswith(")")
)
# 1. 2. 3.
self.dot_numbered_line = part.isdigit() and (
len(parts) > 1 or word.endswith(".")
)
# a. b. c. or a) b) c)
# idx > 0 for patterns like 10.a
# a1 b1 c1 etc.
self.letter_numbered_line = (
True
if single_char_pattern.match(part)
and (
(number_end_char and len(part) == 1 and len(parts) == 1)
or multi_char_pattern.sub("", part).isdigit()
or idx > 0
)
else False
)
if len(part) <= max_roman:
# xi, i, iv
self.roman_numbered_line = (
True if roman_number_pattern.match(part) and idx == 0 else False
)
if part.endswith(")") and part[0].isalnum() and "(" in part:
mixed_list_items = True
# else:
# self.integer_numbered_line = False
# A-1
# self.letter_numbered_line = (
# True if re.match("[a-zA-Z]+-?[0-9]+$", part) else False
# )
self.numbered_line = (
self.integer_numbered_line
or self.roman_numbered_line
or self.letter_numbered_line
or self.dot_numbered_line
) and not mixed_list_items
if not self.numbered_line:
break
if self.numbered_line:
self.start_number = trunc_word
self.line_without_number = self.text[len(word) + 1 :]
self.full_number = self.text[:len(word)]
# check if line is part of address
def check_zipcode_or_pobox(self):
# check if line matches format P.O. box xxxxx
pobox = (
self.word_count == 3
and self.last_word_number
and self.first_word.lower() in ["po", "p.o", "p.o."]
)
# check if line is last part of address, matching format "city, state zipcode"
zipcode = (
self.word_count
< 7 # ensure line is standalone address, not part of larger sentence
and (
self.contains_state # line contains comma followed by state name or abbreviation
# line ends in zipcode, with format xxxxx or xxxxx-xxxx
and (
(self.last_word_number or self.last_word[-4:].isdigit())
and (
(len(self.last_word) == 10 and self.last_word[-5] == "-")
or len(self.last_word) == 5
)
)
and not self.ends_with_period
)
)
self.is_zipcode_or_po = pobox or zipcode
def set_line_type(self):
line_type = "para"
if self.is_table_row:
line_type = "table_row"
elif self.is_header:
line_type = "header"
elif self.is_list_item or self.numbered_line:
line_type = "list_item"
else:
line_type = "para"
self.line_type = line_type
def parse_line(self):
self.words = []
self.title_word_count = 0
self.alpha_count = 0
self.list_type = ""
self.integer_numbered_line = False
self.roman_numbered_line = False
self.dot_numbered_line = False
self.numbered_line = False
self.stop_word_count = 0
self.dollar_count = 0
self.pct_count = 0
self.number_count = 0
self.last_word_number = False
self.first_word_title = False
self.letter_numbered_line = False
self.ends_with_hyphen = False
self.last_word_date = False
self.is_reference_author_name = False
self.date_entry_count = 0
self.last_word_is_stop_word = False # self.last_word in self.stopwords
self.hit_colon = False
self.is_zipcode_or_po = False
self.contains_state = False
self.addresses = []
# todo - this is a stopgap solution, need to make it more efficient
tokens = self.text.split()
self.length = len(self.text)
self.word_count = len(tokens)
self.dollar_sign_count = tokens.count("$")
last_idx = self.word_count - 1
first_alpha_found = False
prev_token_comma = False
self.eff_length = 0
single_letter_word_count = 0
noun_chunk_buf = []
if self.length == 0:
return
for idx, token in enumerate(tokens):
if token in unicode_list_types.keys():
token = unicode_list_types[token]
if token.__contains__(":"):
self.hit_colon = True
# remove punctuation unless (word) or unless it is the first token or if it has colon
last_char = token[-1]
# remove punctuation unless (word) or unless it is the first token
if (
(token[-1] in string.punctuation or token[-1] in end_quotations)
and not (token[0] in string.punctuation or token[0] in start_quotations)
and (not idx == 0 or token[-1] == ":")
):
token = token[0:-1]
if len(token) == 0:
continue
# if prev token contained comma, check if current token is state name
if prev_token_comma and (
token.lower() in states or token.lower() in states_abbreviations
):
self.contains_state = True
prev_token_comma = False
if prev_token_comma:
prev_token_comma = False
if last_char == ",":
prev_token_comma = True
if idx == 0 and not token.lower() == "i" and not token.lower() == "a":
self.check_numbered_line(token)
if token.istitle() or token.isupper(): # and not self.hit_colon:
self.title_word_count = self.title_word_count + 1
if token.isalpha():
# if not self.hit_colon:
self.alpha_count = self.alpha_count + 1
if not first_alpha_found:
first_alpha_found = True
if idx == 0:
self.first_word_title = token[0].isupper()
word = Word(token)
if word.is_number:
self.number_count = self.number_count + 1
if idx == last_idx:
self.last_word_number = True
if word.is_date_entry:
self.date_entry_count += 1
if idx == last_idx:
self.last_word_date = True
if word.is_dollar:
self.dollar_count = self.dollar_count + 1
if idx == last_idx:
self.last_word_number = True
if word.is_percent:
self.pct_count = self.pct_count + 1
if idx == last_idx:
self.last_word_number = True
self.eff_length += word.length
if word.length == 1:
single_letter_word_count += 1
if word.is_stop_word:
if not self.hit_colon:
self.stop_word_count = self.stop_word_count + 1
if idx == last_idx and len(token) != 1 and not token.isupper():
self.last_word_is_stop_word = True
if word.is_noun or word.text == "&":
noun = word.text_without_punct
prev_word = self.words[-1] if len(self.words) > 0 else None
if prev_word and (prev_word.is_number or prev_word.is_number_range) and not noun_chunk_buf:
noun_chunk_buf.append(prev_word.text_without_punct) # get stuff like 150 Broadway
if noun.endswith("'s"):
noun = noun[0:-2]
noun_chunk_buf.append(noun)
self.noun_chunks.append(" ".join(noun_chunk_buf))
noun_chunk_buf = []
elif (
"".join([x.lower() for x in noun if x not in {".", ","}])
in self.noun_chunk_ending_tokens
):
noun_chunk_buf.append(noun)
self.noun_chunks.append(" ".join(noun_chunk_buf))
noun_chunk_buf = []
else:
noun_chunk_buf.append(noun)
elif len(noun_chunk_buf) and word.is_number and word.text[0] not in ["$"]:
noun_chunk_buf.append(word.text_without_punct)
elif len(noun_chunk_buf):
self.noun_chunks.append(" ".join(noun_chunk_buf))
noun_chunk_buf = []
self.words.append(word)
if len(noun_chunk_buf) > 0:
self.noun_chunks.append(" ".join(noun_chunk_buf))
self.noun_chunks = sorted(list(set(filter(lambda x: x.lower() not in stop_words, self.noun_chunks))))
self.first_word = tokens[0]
self.last_word = tokens[-1]
self.last_char = self.text[-1]
self.ends_with_period = self.last_char == "."
self.ends_with_comma = self.last_char == ","
self.end_with_period_single_char = len(self.text) > 2 and self.text[-2] == "."
self.eff_word_count = self.alpha_count - self.stop_word_count
self.check_ends_with_period()
self.first_char = self.text[0]
self.has_continuing_chars = not self.numbered_line and (
self.first_char.islower() or self.first_char in continuing_chars
)
self.last_continuing_char = self.last_char in continuing_chars
self.check_zipcode_or_pobox()
self.check_list_item()
self.check_header()
self.check_table_row()
self.separate_line = (
self.is_header
or self.is_table_row
or self.is_list_item
or self.is_zipcode_or_po
)
self.is_list_or_row = self.is_table_row or self.is_list_item
self.is_header_or_row = (
self.is_header or self.is_table_row or self.is_zipcode_or_po
)
self.ends_with_abbreviation = self.ends_with_period and (
(self.last_word.find(".") != len(self.last_word) - 1)
or self.last_word.lower() in abbreviations
or len(self.last_word) <= 3
)
self.incomplete_line = not self.is_header_or_row and (
not self.ends_with_period
or self.ends_with_abbreviation
or self.end_with_period_single_char
)
self.continuing_line = self.has_continuing_chars and not self.separate_line
self.has_spaced_characters = single_letter_word_count / self.word_count > 0.8
self.set_line_type()
if self.is_header or self.is_header_without_comma:
if "," in self.text or self.last_word.isupper() and len(self.last_word) <= 2:
self.is_reference_author_name = True
self.last_word_is_co_ordinate_conjunction = self.ends_with_comma or self.last_word in conjunction_list
# print(self.separate_line)
# self.continuing_line = not self.separate_line and
def to_json(self):
json_lp = dict(self.__dict__)
del json_lp["visual_line"]
words = []
for word in self.words:
words.append(word.__dict__)
json_lp["words"] = words
return json_lp
class VisualLine:
def __init__(self, text_list=[], style_dict={}, page_stats={}):
self.text_list = text_list
self.start_x = None
self.start_y = None
self.end_x = None
self.end_y = None
self.fs = None
self.fw = None
self.start_fs = None
self.end_fs = None
self.diff_prev_y = None
self.diff_next_y = None
self.is_comparably_sized = False
self.is_comparably_bolded = False
self.is_prev_space_smallest = False
self.is_next_space_smallest = False
self.wrapped_page = False
self.text = " ".join(self.text_list)
if style_dict:
self.start_x = style_dict["start_x"][0]
self.start_y = style_dict["start_y"][0]
self.end_x = style_dict["end_x"][-1]
self.end_y = style_dict["end_y"][-1]
self.fs = style_dict["line_fs"][0]
self.fw = style_dict["line_fw"][0]
self.diff_prev_y = style_dict["diff_prev_y"][0]
self.diff_next_y = style_dict["diff_next_y"][0]
self.font_family = (
style_dict["font_family"][0] if len(style_dict["font_family"]) else None
)
self.font_style = (
style_dict["font_style"][0] if len(style_dict["font_style"]) else None
)
self.min_x = (
self.start_x
) # these variables are adjustable during line joins for line width
self.max_x = self.end_x
self.start_x_list = style_dict["start_x"] # joined ents
self.end_x_list = style_dict["end_x"] # joined ents
self.start_x_list_single_ent = style_dict["start_x_list"][0]
self.end_x_list_single_ent = style_dict["end_x_list"][0]
self.mode_fs = mode_of_list(style_dict["line_fs"])
self.tab_count = 0
# calculates tabs for when tika misses word split
if len(self.start_x_list_single_ent) == len(self.end_x_list_single_ent):
self.start_end_list = list(
zip(self.start_x_list_single_ent, self.end_x_list_single_ent),
)
for word_x, next_word_x in zip(
self.start_end_list[:-1],
self.start_end_list[1:],
):
word_start_x, word_end_x = word_x
next_word_start_x, next_word_end_x = next_word_x
word_distance = next_word_start_x - word_end_x
if word_distance > 20:
self.tab_count += 1
else:
self.start_end_list = []
self.tab_count_join = 0 # tab count after join in ptolines
# calculates tabs for when tika misses word split
if len(self.start_x_list) == len(self.end_x_list):
self.start_end_list_join = list(
zip(self.start_x_list, self.end_x_list),
)
for word_x, next_word_x in zip(
self.start_end_list_join[:-1],
self.start_end_list_join[1:],
):
word_start_x, word_end_x = word_x
next_word_start_x, next_word_end_x = next_word_x
word_distance = next_word_start_x - word_end_x
if word_distance > 20:
self.tab_count_join += 1
else:
self.start_end_list_join = []
if len(self.text.split()) == 2 and self.tab_count == 1:
self.text_list = self.text.split()
# Count tabs in text list, Eventually make it a function of font size
self.start_fs = round(style_dict["start_fs"][0], 1)
self.end_fs = round(style_dict["end_fs"][-1], 1)
self.compute_visual_features(page_stats)
def compute_visual_features(self, page_stats):
# compute font size relative to most common font
font_sizes_mode = page_stats["mode_fs"]
if self.fs > (4 / 3) * font_sizes_mode:
self.is_comparably_sized = True
else:
self.is_comparably_sized = False
# compute font weight relative to 600.0 which has generally
# been observed to correspond to bolding of some sort
font_weights_mode = page_stats["mode_fw"]
if font_weights_mode >= 600.0:
self.is_comparably_bolded = False
elif self.fw > 600.0:
self.is_comparably_bolded = True
# compare line height for similar type (same font) lines
if page_stats["fs_and_diff_prev_y"].get((self.fs, self.diff_prev_y), 0) > 2:
for k, v in page_stats["fs_and_diff_prev_y"].items():
if k == self.fs and 0 <= v < self.diff_prev_y:
break
else:
self.is_prev_space_smallest = True
if page_stats["fs_and_diff_next_y"].get((self.fs, self.diff_next_y), 0) > 2:
for k, v in page_stats["fs_and_diff_next_y"].items():
if k == self.fs and 0 <= v < self.diff_next_y:
break
else:
self.is_next_space_smallest = True
def should_join_table(self, next_line):
"""
Check if next line should be joined as a tr. This makes no assumption if the current line is a table
"""
# check list of spaced words
curr_line_ents = len(self.text_list)
next_line_ents = len(next_line.text_list)
ent_match = (
curr_line_ents == next_line_ents and curr_line_ents >= 2
) # tr should have at least two elements
# compare alignment of elements in both lists
if ent_match:
return
return False
def should_join_para(self):
return False
def should_join_header(self):
return False
def __str__(self):
output_str = f"\ntext_list = {self.text_list},\nstart_x = {self.start_x}, \nstart_y = {self.start_y}\nend_x = {self.end_x},\nend_y = {self.end_y},\nfs = {self.fs},\nfw = {self.fw},\nstart_fs = {self.start_fs},\nend_fs = {self.end_fs},\ndiff_prev_y = {self.diff_prev_y},\ndiff_next_y = {self.diff_next_y},\nis_comparably_sized = {self.is_comparably_sized},\nis_comparably_bolded = {self.is_comparably_bolded},\nis_prev_space_small = {self.is_prev_space_smallest}\nis_next_space_small = {self.is_next_space_smallest},"
output_str += f"\nfont_style = {self.font_style}"
return output_str
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>nlm-ingestor/nlm_ingestor/ingestor/line_parser.py<fim_prefix>import datetime
import logging
import math
import re
import string
from nltk.corpus import stopwords
from .patterns import abbreviations
from .patterns import states
from .patterns import states_abbreviations
from .styling_utils import mode_of_list
try:
stop_words = set(stopwords.words("english"))
except Exception as e:
logging.error(e)
import nltk
stopwords = nltk.download("stopwords")
stop_words = set(stopwords.words("english"))
stop_words.add("per")
continuing_chars = "!\"&'+,./:;<=?@\\]^_`|}~"
list_chars = [
"•",
"➢",
"*",
"ƒ",
"",
"",
"",
"",
"»",
"☐",
"·",
"�",
"▪",
"▪",
"○",
"",
"–",
]
list_types = {
"•": "circle",
"➢": "wide_symbol_arrow",
"*": "star",
"ƒ": "f",
"": "clock",
"": "small_square",
"": "narrow_symbol_arrow",
"": "large_square",
"»": "double_arrow",
"☐": "hollow_square",
"·": "circle",
"�": "special_char",
"▪": "very_small_square",
"▪": "very_small_square",
"○": "hollow_circle",
"": "hollow_squere",
"–": "dash",
"‒": "another-dash",
"̶": "underscore",
}
unicode_list_types = {
"\\uf0b7": "•",
"\\uf0fc": "",
}
footnote_types = {
"©"
}
ambiguous_list_chars = ["+", "-"]
units = ["acres", "miles", "-"] # - could represent a null value in a row
punctuations = string.punctuation + "“"
start_quotations = ["'", '"', "“"]
end_quotations = ["'", '"', "”"]
"""
Quote Pattern details:
\\W ==> Match non-alphanumeric characters. Helps in mitigating words like O'Reilly.
["“\'] ==> Quote patterns
(?!\\D\\s) ==> Negative Lookahead for single character following the quote.
Helps in removing words like Macy's, don't ...
(?!\\d+) ==> Negative Lookahead for one or more digits following the pattern.
Helps in removing words like '19, '2019
(.*?)[,;.]?[”"\'] ==> Match all other data.
"""
# Add / Modify Quotation pattern in ingestor_utils/utils.py also.
quote_pattern = re.compile(
r'(?:(?<=\W)|(?<=^))["“‘’\']+(?!\D\s)(?!\d+)(.*?)[,;.]?[”"‘’\']+',
) # (r'["“\'](.*?)[,;.]?[”"\']')
single_char_pattern = re.compile(r'[a-zA-Z]')
multi_char_pattern = re.compile(r'[a-zA-Z]+')
roman_number_pattern = re.compile(r'[ixvIXV]+$')
ends_with_sentence_delimiter_pattern = re.compile(r"(?<![.;:][a-zA-Z0-9])(?<!INC|inc|Inc)[.;:]+(?![\w])[\"“‘’”\'\s]*$")
conjunction_list = ["for", "and", "not", "but", "or", "yet", "so", "between"]
class Word:
def __init__(self, token):
self.text = token
self.is_percent = False
self.is_number = False
self.is_year = False # year does not count as a number
self.is_dollar = False
self.is_million = False
self.is_billion = False
self.is_thousand = False
self.is_date_entry = False
self.is_negative = False
self.length = len(self.text)
self.is_stop_word = self.text.lower() in stop_words
self.is_number_range = False
self.parts = []
text_without_punct = self.text
while (
len(text_without_punct) > 1 and
(text_without_punct[-1] in string.punctuation or text_without_punct[-1] in end_quotations)
):
text_without_punct = text_without_punct[0:-1]
# remove leading unbalancced punctuations
while (
len(text_without_punct) > 1 and
(text_without_punct[0] in string.punctuation or text_without_punct[0] in start_quotations)
):
text_without_punct = text_without_punct[1:]
self.text_without_punct = text_without_punct
self.is_noun = self.text_without_punct[0].isupper()
n = self.check_numeric()
self.check_date()
try:
if n:
n = round(float(n))
if n > 0:
digits = int(math.log10(n)) + 1
elif n == 0:
digits = 1
else:
digits = int(math.log10(-n)) + 2
self.num_digits = digits
if digits == 4 and self.text.replace(",", "") == self.text:
self.is_year = True
self.is_number = False
else:
self.num_digits = 0
excep<fim_suffix>t Exception as e:
logging.error(e)
self.num_digits = 0
def check_date(self):
if "/" in self.text or "-" in self.text:
text = self.text.replace("/", "-")
date_patterns = [
"%b-%d",
"%B-%d",
"%B-%d-%y",
"%B-%d-%Y",
"%b-%d-%Y",
"%b-%d-%y",
"%m-%d",
"%m-%d-%y",
"%m-%d-%Y",
]
for pat in date_patterns:
try:
datetime.datetime.strptime(text, pat)
self.is_date_entry = True
return
except ValueError:
pass
else:
self.is_date_entry = False
def check_numeric(self):
word = self.text.lower()
if not word.isalpha():
if word.isprintable():
if not word.isnumeric():
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
if word.startswith("-"):
self.is_negative = True
word = word[1:]
if word.startswith("$"):
self.is_dollar = True
word = word[1:]
elif word.endswith("$"):
self.is_dollar = True
word = word[0:-1]
elif word.endswith("%"):
self.is_percent = True
word = word[0:-1]
elif word.endswith("m"):
self.is_million = True
elif word.endswith("bn"):
self.is_billion = True
if word.startswith("(") and word.endswith(")"):
word = word[1:-1]
word = word.replace(",", "")
if word.isnumeric() or word.replace(".", "", 1).isnumeric():
self.is_number = True
parts = word.split("-")
if (
len(parts) == 2
and parts[0].isnumeric()
and parts[1].isnumeric()
):
self.is_number_range = True
self.parts = parts
else:
self.is_number = True
if self.is_number:
numeric_part = word
return numeric_part
class Line:
def __init__(
self,
line_str,
text_list=[],
style_dict={},
page_details={},
noun_chunk_ending_tokens=[],
):
self.text = line_str.strip()
self.visual_line = VisualLine(text_list, style_dict, page_details)
self.words = []
self.is_independent = False
self.is_header = False
self.is_header_without_comma = False
self.noun_chunks = []
self.quoted_words = quote_pattern.findall(self.text)
self.noun_chunk_ending_tokens = {x.lower() for x in noun_chunk_ending_tokens}
self.parse_line()
def check_header(self):
# Section X, Article Y, Note 1 etc.
first_word_header = self.first_word.lower() in ["section", "article", "note"]
# If there are a certain percentage of title words (first letter capitalize)
title_ratio = (
self.title_word_count / self.eff_word_count
if self.eff_word_count > 0
else 1.0
)
# print(self.title_word_count, self.eff_word_count, title_ratio)
# Section 1 is a header but Section 1: Hello 3 is not
has_enough_titles = title_ratio > 0.9 and self.eff_word_count < 10
has_header_structure = (
(first_word_header or has_enough_titles) and self.number_count == 1
) or self.numbered_line or self.text.isupper()
# has_header_structure = has_header_structure and self.eff_word_count <
last_word_number = (
self.last_word.lower() in units
or self.last_word_number
and not has_header_structure
)
last_word_date = self.last_word_date and not has_header_structure
# Find lines ending with sentence delimiter. But exclude text like "L.P."
ends_with_delim = ends_with_sentence_delimiter_pattern.search(self.text) is not None
sentence_structure = self.ends_with_period and not (
has_header_structure and title_ratio > 0.9
) and ends_with_delim
last_letter_is_punctuation = (
self.last_word[-1] in punctuations and self.last_word[-1] not in ":?.)]%" and
ends_with_delim
)
self.is_header_without_comma = (
not sentence_structure
and not self.has_list_char
and not self.first_char in footnote_types
and has_enough_titles
and not last_word_number
and (
self.number_count == 0
or (has_header_structure and self.number_count <= 1)
)
and not self.has_continuing_chars
and not last_word_date
and self.first_word_title
and not self.last_word_is_stop_word
and not self.is_zipcode_or_po
and not last_letter_is_punctuation
and not "://" in self.text # url pattern
)
self.is_header = self.is_header_without_comma and \
((not self.text.count(',') > 1) if not self.text.lower().startswith('section') else True)
def check_ends_with_period(self):
# punct_rule = self.last_char in string.punctuation and self.last_char not in [':', '.']
last_word_is_title = self.last_word in ["Mr.", "Dr.", "Mrs."]
self.ends_with_period = self.last_char in ["."] and not last_word_is_title
def check_table_row(self):
if not self.is_header:
value_count = (
self.number_count
+ self.dollar_count
+ self.pct_count
+ self.text.count(" - ")
)
word_symbols = self.word_count - self.dollar_sign_count
if word_symbols == 0:
word_symbols = 1
word_ratio = (
value_count + self.title_word_count + self.date_entry_count
) / word_symbols
self.is_table_row = (
(
(value_count > 0 or self.date_entry_count > 0)
and word_ratio > 0.7
and not self.ends_with_period
and not self.is_zipcode_or_po
)
and not self.last_word_is_stop_word
or ("...." in self.text)
)
else:
self.is_table_row = False
def check_list_item(self):
text = self.text.strip()
self.has_list_char = text[0] in list_types.keys()
# if not self.has_list_char and text[0] in ambiguous_list_chars:
# self.has_list_char = text[1:].strip()[0].isalpha()
self.is_list_item = self.has_list_char and self.first_word[-1] not in ":?.)]%$"
if self.is_list_item:
self.list_type = list_types[text[0]]
# matches 1.1 1.2.1 1 etc.
def check_numbered_line(self, word):
trunc_word = word
ends_with_parens = word.endswith(")")
number_end_char = word.endswith(".") or ends_with_parens
number_start_char = word.startswith("(")
if number_start_char and not ends_with_parens:
return False
if word[-1] in ["%", "$", ","]:
return False
if number_end_char:
trunc_word = word[:-1]
if number_start_char:
trunc_word = trunc_word[1:]
# To handle scenarios like (ii)(A)
if ")(" in trunc_word:
trunc_word = trunc_word.split(")(")[0]
parts = trunc_word.split(".")
self.integer_numbered_line = False
self.roman_numbered_line = False
self.letter_numbered_line = False
self.dot_numbered_line = False
mixed_list_items = False
max_digits = 2
max_roman = 6
for idx, part in enumerate(parts):
# print(">part: ", part, re.sub(r"[a-zA-Z]+", "", part).isdigit() or idx > 0)
if len(part) <= max_digits:
# (1), (2), (3)
self.integer_numbered_line = part.isdigit() and (
len(parts) > 1 or word.endswith(")")
)
# 1. 2. 3.
self.dot_numbered_line = part.isdigit() and (
len(parts) > 1 or word.endswith(".")
)
# a. b. c. or a) b) c)
# idx > 0 for patterns like 10.a
# a1 b1 c1 etc.
self.letter_numbered_line = (
True
if single_char_pattern.match(part)
and (
(number_end_char and len(part) == 1 and len(parts) == 1)
or multi_char_pattern.sub("", part).isdigit()
or idx > 0
)
else False
)
if len(part) <= max_roman:
# xi, i, iv
self.roman_numbered_line = (
True if roman_number_pattern.match(part) and idx == 0 else False
)
if part.endswith(")") and part[0].isalnum() and "(" in part:
mixed_list_items = True
# else:
# self.integer_numbered_line = False
# A-1
# self.letter_numbered_line = (
# True if re.match("[a-zA-Z]+-?[0-9]+$", part) else False
# )
self.numbered_line = (
self.integer_numbered_line
or self.roman_numbered_line
or self.letter_numbered_line
or self.dot_numbered_line
) and not mixed_list_items
if not self.numbered_line:
break
if self.numbered_line:
self.start_number = trunc_word
self.line_without_number = self.text[len(word) + 1 :]
self.full_number = self.text[:len(word)]
# check if line is part of address
def check_zipcode_or_pobox(self):
# check if line matches format P.O. box xxxxx
pobox = (
self.word_count == 3
and self.last_word_number
and self.first_word.lower() in ["po", "p.o", "p.o."]
)
# check if line is last part of address, matching format "city, state zipcode"
zipcode = (
self.word_count
< 7 # ensure line is standalone address, not part of larger sentence
and (
self.contains_state # line contains comma followed by state name or abbreviation
# line ends in zipcode, with format xxxxx or xxxxx-xxxx
and (
(self.last_word_number or self.last_word[-4:].isdigit())
and (
(len(self.last_word) == 10 and self.last_word[-5] == "-")
or len(self.last_word) == 5
)
)
and not self.ends_with_period
)
)
self.is_zipcode_or_po = pobox or zipcode
def set_line_type(self):
line_type = "para"
if self.is_table_row:
line_type = "table_row"
elif self.is_header:
line_type = "header"
elif self.is_list_item or self.numbered_line:
line_type = "list_item"
else:
line_type = "para"
self.line_type = line_type
def parse_line(self):
self.words = []
self.title_word_count = 0
self.alpha_count = 0
self.list_type = ""
self.integer_numbered_line = False
self.roman_numbered_line = False
self.dot_numbered_line = False
self.numbered_line = False
self.stop_word_count = 0
self.dollar_count = 0
self.pct_count = 0
self.number_count = 0
self.last_word_number = False
self.first_word_title = False
self.letter_numbered_line = False
self.ends_with_hyphen = False
self.last_word_date = False
self.is_reference_author_name = False
self.date_entry_count = 0
self.last_word_is_stop_word = False # self.last_word in self.stopwords
self.hit_colon = False
self.is_zipcode_or_po = False
self.contains_state = False
self.addresses = []
# todo - this is a stopgap solution, need to make it more efficient
tokens = self.text.split()
self.length = len(self.text)
self.word_count = len(tokens)
self.dollar_sign_count = tokens.count("$")
last_idx = self.word_count - 1
first_alpha_found = False
prev_token_comma = False
self.eff_length = 0
single_letter_word_count = 0
noun_chunk_buf = []
if self.length == 0:
return
for idx, token in enumerate(tokens):
if token in unicode_list_types.keys():
token = unicode_list_types[token]
if token.__contains__(":"):
self.hit_colon = True
# remove punctuation unless (word) or unless it is the first token or if it has colon
last_char = token[-1]
# remove punctuation unless (word) or unless it is the first token
if (
(token[-1] in string.punctuation or token[-1] in end_quotations)
and not (token[0] in string.punctuation or token[0] in start_quotations)
and (not idx == 0 or token[-1] == ":")
):
token = token[0:-1]
if len(token) == 0:
continue
# if prev token contained comma, check if current token is state name
if prev_token_comma and (
token.lower() in states or token.lower() in states_abbreviations
):
self.contains_state = True
prev_token_comma = False
if prev_token_comma:
prev_token_comma = False
if last_char == ",":
prev_token_comma = True
if idx == 0 and not token.lower() == "i" and not token.lower() == "a":
self.check_numbered_line(token)
if token.istitle() or token.isupper(): # and not self.hit_colon:
self.title_word_count = self.title_word_count + 1
if token.isalpha():
# if not self.hit_colon:
self.alpha_count = self.alpha_count + 1
if not first_alpha_found:
first_alpha_found = True
if idx == 0:
self.first_word_title = token[0].isupper()
word = Word(token)
if word.is_number:
self.number_count = self.number_count + 1
if idx == last_idx:
self.last_word_number = True
if word.is_date_entry:
self.date_entry_count += 1
if idx == last_idx:
self.last_word_date = True
if word.is_dollar:
self.dollar_count = self.dollar_count + 1
if idx == last_idx:
self.last_word_number = True
if word.is_percent:
self.pct_count = self.pct_count + 1
if idx == last_idx:
self.last_word_number = True
self.eff_length += word.length
if word.length == 1:
single_letter_word_count += 1
if word.is_stop_word:
if not self.hit_colon:
self.stop_word_count = self.stop_word_count + 1
if idx == last_idx and len(token) != 1 and not token.isupper():
self.last_word_is_stop_word = True
if word.is_noun or word.text == "&":
noun = word.text_without_punct
prev_word = self.words[-1] if len(self.words) > 0 else None
if prev_word and (prev_word.is_number or prev_word.is_number_range) and not noun_chunk_buf:
noun_chunk_buf.append(prev_word.text_without_punct) # get stuff like 150 Broadway
if noun.endswith("'s"):
noun = noun[0:-2]
noun_chunk_buf.append(noun)
self.noun_chunks.append(" ".join(noun_chunk_buf))
noun_chunk_buf = []
elif (
"".join([x.lower() for x in noun if x not in {".", ","}])
in self.noun_chunk_ending_tokens
):
noun_chunk_buf.append(noun)
self.noun_chunks.append(" ".join(noun_chunk_buf))
noun_chunk_buf = []
else:
noun_chunk_buf.append(noun)
elif len(noun_chunk_buf) and word.is_number and word.text[0] not in ["$"]:
noun_chunk_buf.append(word.text_without_punct)
elif len(noun_chunk_buf):
self.noun_chunks.append(" ".join(noun_chunk_buf))
noun_chunk_buf = []
self.words.append(word)
if len(noun_chunk_buf) > 0:
self.noun_chunks.append(" ".join(noun_chunk_buf))
self.noun_chunks = sorted(list(set(filter(lambda x: x.lower() not in stop_words, self.noun_chunks))))
self.first_word = tokens[0]
self.last_word = tokens[-1]
self.last_char = self.text[-1]
self.ends_with_period = self.last_char == "."
self.ends_with_comma = self.last_char == ","
self.end_with_period_single_char = len(self.text) > 2 and self.text[-2] == "."
self.eff_word_count = self.alpha_count - self.stop_word_count
self.check_ends_with_period()
self.first_char = self.text[0]
self.has_continuing_chars = not self.numbered_line and (
self.first_char.islower() or self.first_char in continuing_chars
)
self.last_continuing_char = self.last_char in continuing_chars
self.check_zipcode_or_pobox()
self.check_list_item()
self.check_header()
self.check_table_row()
self.separate_line = (
self.is_header
or self.is_table_row
or self.is_list_item
or self.is_zipcode_or_po
)
self.is_list_or_row = self.is_table_row or self.is_list_item
self.is_header_or_row = (
self.is_header or self.is_table_row or self.is_zipcode_or_po
)
self.ends_with_abbreviation = self.ends_with_period and (
(self.last_word.find(".") != len(self.last_word) - 1)
or self.last_word.lower() in abbreviations
or len(self.last_word) <= 3
)
self.incomplete_line = not self.is_header_or_row and (
not self.ends_with_period
or self.ends_with_abbreviation
or self.end_with_period_single_char
)
self.continuing_line = self.has_continuing_chars and not self.separate_line
self.has_spaced_characters = single_letter_word_count / self.word_count > 0.8
self.set_line_type()
if self.is_header or self.is_header_without_comma:
if "," in self.text or self.last_word.isupper() and len(self.last_word) <= 2:
self.is_reference_author_name = True
self.last_word_is_co_ordinate_conjunction = self.ends_with_comma or self.last_word in conjunction_list
# print(self.separate_line)
# self.continuing_line = not self.separate_line and
def to_json(self):
json_lp = dict(self.__dict__)
del json_lp["visual_line"]
words = []
for word in self.words:
words.append(word.__dict__)
json_lp["words"] = words
return json_lp
class VisualLine:
def __init__(self, text_list=[], style_dict={}, page_stats={}):
self.text_list = text_list
self.start_x = None
self.start_y = None
self.end_x = None
self.end_y = None
self.fs = None
self.fw = None
self.start_fs = None
self.end_fs = None
self.diff_prev_y = None
self.diff_next_y = None
self.is_comparably_sized = False
self.is_comparably_bolded = False
self.is_prev_space_smallest = False
self.is_next_space_smallest = False
self.wrapped_page = False
self.text = " ".join(self.text_list)
if style_dict:
self.start_x = style_dict["start_x"][0]
self.start_y = style_dict["start_y"][0]
self.end_x = style_dict["end_x"][-1]
self.end_y = style_dict["end_y"][-1]
self.fs = style_dict["line_fs"][0]
self.fw = style_dict["line_fw"][0]
self.diff_prev_y = style_dict["diff_prev_y"][0]
self.diff_next_y = style_dict["diff_next_y"][0]
self.font_family = (
style_dict["font_family"][0] if len(style_dict["font_family"]) else None
)
self.font_style = (
style_dict["font_style"][0] if len(style_dict["font_style"]) else None
)
self.min_x = (
self.start_x
) # these variables are adjustable during line joins for line width
self.max_x = self.end_x
self.start_x_list = style_dict["start_x"] # joined ents
self.end_x_list = style_dict["end_x"] # joined ents
self.start_x_list_single_ent = style_dict["start_x_list"][0]
self.end_x_list_single_ent = style_dict["end_x_list"][0]
self.mode_fs = mode_of_list(style_dict["line_fs"])
self.tab_count = 0
# calculates tabs for when tika misses word split
if len(self.start_x_list_single_ent) == len(self.end_x_list_single_ent):
self.start_end_list = list(
zip(self.start_x_list_single_ent, self.end_x_list_single_ent),
)
for word_x, next_word_x in zip(
self.start_end_list[:-1],
self.start_end_list[1:],
):
word_start_x, word_end_x = word_x
next_word_start_x, next_word_end_x = next_word_x
word_distance = next_word_start_x - word_end_x
if word_distance > 20:
self.tab_count += 1
else:
self.start_end_list = []
self.tab_count_join = 0 # tab count after join in ptolines
# calculates tabs for when tika misses word split
if len(self.start_x_list) == len(self.end_x_list):
self.start_end_list_join = list(
zip(self.start_x_list, self.end_x_list),
)
for word_x, next_word_x in zip(
self.start_end_list_join[:-1],
self.start_end_list_join[1:],
):
word_start_x, word_end_x = word_x
next_word_start_x, next_word_end_x = next_word_x
word_distance = next_word_start_x - word_end_x
if word_distance > 20:
self.tab_count_join += 1
else:
self.start_end_list_join = []
if len(self.text.split()) == 2 and self.tab_count == 1:
self.text_list = self.text.split()
# Count tabs in text list, Eventually make it a function of font size
self.start_fs = round(style_dict["start_fs"][0], 1)
self.end_fs = round(style_dict["end_fs"][-1], 1)
self.compute_visual_features(page_stats)
def compute_visual_features(self, page_stats):
# compute font size relative to most common font
font_sizes_mode = page_stats["mode_fs"]
if self.fs > (4 / 3) * font_sizes_mode:
self.is_comparably_sized = True
else:
self.is_comparably_sized = False
# compute font weight relative to 600.0 which has generally
# been observed to correspond to bolding of some sort
font_weights_mode = page_stats["mode_fw"]
if font_weights_mode >= 600.0:
self.is_comparably_bolded = False
elif self.fw > 600.0:
self.is_comparably_bolded = True
# compare line height for similar type (same font) lines
if page_stats["fs_and_diff_prev_y"].get((self.fs, self.diff_prev_y), 0) > 2:
for k, v in page_stats["fs_and_diff_prev_y"].items():
if k == self.fs and 0 <= v < self.diff_prev_y:
break
else:
self.is_prev_space_smallest = True
if page_stats["fs_and_diff_next_y"].get((self.fs, self.diff_next_y), 0) > 2:
for k, v in page_stats["fs_and_diff_next_y"].items():
if k == self.fs and 0 <= v < self.diff_next_y:
break
else:
self.is_next_space_smallest = True
def should_join_table(self, next_line):
"""
Check if next line should be joined as a tr. This makes no assumption if the current line is a table
"""
# check list of spaced words
curr_line_ents = len(self.text_list)
next_line_ents = len(next_line.text_list)
ent_match = (
curr_line_ents == next_line_ents and curr_line_ents >= 2
) # tr should have at least two elements
# compare alignment of elements in both lists
if ent_match:
return
return False
def should_join_para(self):
return False
def should_join_header(self):
return False
def __str__(self):
output_str = f"\ntext_list = {self.text_list},\nstart_x = {self.start_x}, \nstart_y = {self.start_y}\nend_x = {self.end_x},\nend_y = {self.end_y},\nfs = {self.fs},\nfw = {self.fw},\nstart_fs = {self.start_fs},\nend_fs = {self.end_fs},\ndiff_prev_y = {self.diff_prev_y},\ndiff_next_y = {self.diff_next_y},\nis_comparably_sized = {self.is_comparably_sized},\nis_comparably_bolded = {self.is_comparably_bolded},\nis_prev_space_small = {self.is_prev_space_smallest}\nis_next_space_small = {self.is_next_space_smallest},"
output_str += f"\nfont_style = {self.font_style}"
return output_str
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := <fim_suffix>_as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if <fim_suffix>stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.<fim_suffix>exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() n<fim_suffix>ot in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_byte<fim_suffix>sio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper<fim_suffix>() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpat<fim_suffix>h := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
i<fim_suffix>f not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not ch<fim_suffix>unk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, i<fim_suffix>o.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, <fim_suffix>end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() els<fim_suffix>e base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode<fim_suffix>('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64deco<fim_suffix>de(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = li<fim_suffix>ne.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*arg<fim_suffix>s, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_model<fim_suffix>file(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_types.py<fim_prefix>import json
from typing import Any, TypedDict, Sequence, Literal
import sys
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
class BaseGenerateResponse(TypedDict):
model: str
'Model used to generate response.'
created_at: str
'Time when the request was created.'
done: bool
'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
total_duration: int
'Total duration in nanoseconds.'
load_duration: int
'Load duration in nanoseconds.'
prompt_eval_count: int
'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int
'Duration of evaluating the prompt in nanoseconds.'
eval_count: int
'Number of tokens evaluated in inference.'
eval_duration: int
'Duration of evaluating inference in nanoseconds.'
class GenerateResponse(BaseGenerateResponse):
"""
Response returned by generate requests.
"""
response: str
'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int]
'Tokenized history up to the point of the response.'
class Message(TypedDict):
"""
Chat message.
"""
role: Literal['user', 'assistant', 'system']
"Assumed role of the message. Response messages always has role 'assistant'."
content: str
'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]]
"""
Optional list of image data for multimodal models.
Valid input types are:
- `str` or path-like object: path to image file
- `bytes` or bytes-like object: raw image data
Valid image formats depend on the model. See the model card for more information.
"""
class ChatResponse(BaseGenerateResponse):
"""
Response returned by chat requests.
"""
message: Message
'Response message.'
class ProgressResponse(TypedDict):
status: str
completed: int
total: int
digest: str
class Options(TypedDict, total=False):
# load time options
numa: bool
num_ctx: int
num_batch: int
num_gqa: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
rope_frequency_base: float
rope_frequency_scale: float
num_thread: int
# runtime options
num_keep: int
seed: int
num_predict: int
top_k: int
top_p: float
tfs_z: float
typical_p: float
repeat_last_n: int
temperature: float
repeat_penalty: float
presence_penalty: float
frequency_penalty: float
mirostat: int
mirostat_tau: float
mirostat_eta: float
penalize_newline: bool
stop: Sequence[str]
class RequestError(Exception):
"""
Common class for request errors.
"""
def __init__(self, error: str):
super().__init__(error)
self.error = error
'Reason for the error.'
class ResponseError(Exception):
"""
Common class for response errors.
"""
def __init__(self, error: str, status_code: int = -1):
try:
# try to parse content as JSON and extract 'error'
# fallback to raw content if JSON parsing fails
error = json.loads(error).get<fim_suffix>('error', error)
except json.JSONDecodeError:
...
super().__init__(error)
self.error = error
'Reason for the error.'
self.status_code = status_code
'HTTP status code of the response.'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
spli<fim_suffix>t = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield parti<fim_suffix>al
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"<fim_suffix>""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"<fim_suffix>""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"<fim_suffix>""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
""<fim_suffix>"
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"<fim_suffix>""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
""<fim_suffix>"
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"<fim_suffix>""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
""<fim_suffix>"
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
""<fim_suffix>"
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
""<fim_suffix>"
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
t<fim_suffix>ry:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try<fim_suffix>:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
tr<fim_suffix>y:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try<fim_suffix>:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try<fim_suffix>:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_types.py<fim_prefix>import json
from typing import Any, TypedDict, Sequence, Literal
import sys
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
class BaseGenerateResponse(TypedDict):
model: str
'Model used to generate response.'
created_at: str
'Time when the request was created.'
done: bool
'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
total_duration: int
'Total duration in nanoseconds.'
load_duration: int
'Load duration in nanoseconds.'
prompt_eval_count: int
'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int
'Duration of evaluating the prompt in nanoseconds.'
eval_count: int
'Number of tokens evaluated in inference.'
eval_duration: int
'Duration of evaluating inference in nanoseconds.'
class GenerateResponse(BaseGenerateResponse):
"""
Response returned by generate requests.
"""
response: str
'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int]
'Tokenized history up to the point of the response.'
class Message(TypedDict):
"""
Chat message.
"""
role: Literal['user', 'assistant', 'system']
"Assumed role of the message. Response messages always has role 'assistant'."
content: str
'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]]
"""
Optional list of image data for multimodal models.
Valid input types are:
- `str` or path-like object: path to image file
- `bytes` or bytes-like object: raw image data
Valid image formats depend on the model. See the model card for more information.
"""
class ChatResponse(BaseGenerateResponse):
"""
Response returned by chat requests.
"""
message: Message
'Response message.'
class ProgressResponse(TypedDict):
status: str
completed: int
total: int
digest: str
class Options(TypedDict, total=False):
# load time options
numa: bool
num_ctx: int
num_batch: int
num_gqa: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
rope_frequency_base: float
rope_frequency_scale: float
num_thread: int
# runtime options
num_keep: int
seed: int
num_predict: int
top_k: int
top_p: float
tfs_z: float
typical_p: float
repeat_last_n: int
temperature: float
repeat_penalty: float
presence_penalty: float
frequency_penalty: float
mirostat: int
mirostat_tau: float
mirostat_eta: float
penalize_newline: bool
stop: Sequence[str]
class RequestError(Exception):
"""
Common class for request errors.
"""
def __init__(self, error: str):
super().__init__(error)
self.error = error
'Reason for the error.'
class ResponseError(Exception):
"""
Common class for response errors.
"""
def __init__(self, error: str, status_code: int = -1):
t<fim_suffix>ry:
# try to parse content as JSON and extract 'error'
# fallback to raw content if JSON parsing fails
error = json.loads(error).get('error', error)
except json.JSONDecodeError:
...
super().__init__(error)
self.error = error
'Reason for the error.'
self.status_code = status_code
'HTTP status code of the response.'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try<fim_suffix>:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | TRY | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_types.py<fim_prefix>import json
from typing import Any, TypedDict, Sequence, Literal
import sys
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
class BaseGenerateResponse(TypedDict):
model: str
'Model used to generate response.'
created_at: str
'Time when the request was created.'
done: bool
'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
total_duration: int
'Total duration in nanoseconds.'
load_duration: int
'Load duration in nanoseconds.'
prompt_eval_count: int
'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int
'Duration of evaluating the prompt in nanoseconds.'
eval_count: int
'Number of tokens evaluated in inference.'
eval_duration: int
'Duration of evaluating inference in nanoseconds.'
class GenerateResponse(BaseGenerateResponse):
"""
Response returned by generate requests.
"""
response: str
'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int]
'Tokenized history up to the point of the response.'
class Message(TypedDict):
"""
Chat message.
"""
role: Literal['user', 'assistant', 'system']
"Assumed role of the message. Response messages always has role 'assistant'."
content: str
'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]]
"""
Optional list of image data for multimodal models.
Valid input types are:
- `str` or path-like object: path to image file
- `bytes` or bytes-like object: raw image data
Valid image formats depend on the model. See the model card for more information.
"""
class ChatResponse(BaseGenerateResponse):
"""
Response returned by chat requests.
"""
message: Message
'Response message.'
class ProgressResponse(TypedDict):
status: str
completed: int
total: int
digest: str
class Options(TypedDict, total=False):
# load time options
numa: bool
num_ctx: int
num_batch: int
num_gqa: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
rope_frequency_base: float
rope_frequency_scale: float
num_thread: int
# runtime options
num_keep: int
seed: int
num_predict: int
top_k: int
top_p: float
tfs_z: float
typical_p: float
repeat_last_n: int
temperature: float
repeat_penalty: float
presence_penalty: float
frequency_penalty: float
mirostat: int
mirostat_tau: float
mirostat_eta: float
penalize_newline: bool
stop: Sequence[str]
class RequestError(Exception):
"""
Common class for request errors.
"""
def __init__(self, error: str):
super().__init__(error)
self.error = error
'Reason for the error.'
class ResponseError(Exception):
"""
Common class for response errors.
"""
def __init__(self, error: str, status_code: int = -1):
try:
# try to parse content as JSON and<fim_suffix> extract 'error'
# fallback to raw content if JSON parsing fails
error = json.loads(error).get('error', error)
except json.JSONDecodeError:
...
super().__init__(error)
self.error = error
'Reason for the error.'
self.status_code = status_code
'HTTP status code of the response.'
<fim_middle> | null | LINE_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_types.py<fim_prefix>import json
from typing import Any, TypedDict, Sequence, Literal
import sys
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
class BaseGenerateResponse(TypedDict):
model: str
'Model used to generate response.'
created_at: str
'Time when the request was created.'
done: bool
'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
total_duration: int
'Total duration in nanoseconds.'
load_duration: int
'Load duration in nanoseconds.'
prompt_eval_count: int
'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int
'Duration of evaluating the prompt in nanoseconds.'
eval_count: int
'Number of tokens evaluated in inference.'
eval_duration: int
'Duration of evaluating inference in nanoseconds.'
class GenerateResponse(BaseGenerateResponse):
"""
Response returned by generate requests.
"""
response: str
'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int]
'Tokenized history up to the point of the response.'
class Message(TypedDict):
"""
Chat message.
"""
role: Literal['user', 'assistant', 'system']
"Assumed role of the message. Response messages always has role 'assistant'."
content: str
'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]]
"""
Optional list of image data for multimodal models.
Valid input types are:
- `str` or path-like object: path to image file
- `bytes` or bytes-like object: raw image data
Valid image formats depend on the model. See the model card for more information.
"""
class ChatResponse(BaseGenerateResponse):
"""
Response returned by chat requests.
"""
message: Message
'Response message.'
class ProgressResponse(TypedDict):
status: str
completed: int
total: int
digest: str
class Options(TypedDict, total=False):
# load time options
numa: bool
num_ctx: int
num_batch: int
num_gqa: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
rope_frequency_base: float
rope_frequency_scale: float
num_thread: int
# runtime options
num_keep: int
seed: int
num_predict: int
top_k: int
top_p: float
tfs_z: float
typical_p: float
repeat_last_n: int
temperature: float
repeat_penalty: float
presence_penalty: float
frequency_penalty: float
mirostat: int
mirostat_tau: float
mirostat_eta: float
penalize_newline: bool
stop: Sequence[str]
class RequestError(Exception):
"""
Common class for request errors.
"""
def __init__(self, error: str):
super().__init__(error)
self.error = error
'Reason for the error.'
class ResponseError(Exception):
"""
Common class for response errors.
"""
def __init__(self, error: str, status_code: int = -1):
try:
# try to parse content as JSON and extract 'error'
# fallbac<fim_suffix>k to raw content if JSON parsing fails
error = json.loads(error).get('error', error)
except json.JSONDecodeError:
...
super().__init__(error)
self.error = error
'Reason for the error.'
self.status_code = status_code
'HTTP status code of the response.'
<fim_middle> | null | LINE_COMMENT | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, Typ<fim_suffix>eError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
exc<fim_suffix>ept ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except<fim_suffix> httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStat<fim_suffix>usError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as<fim_suffix> e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_types.py<fim_prefix>import json
from typing import Any, TypedDict, Sequence, Literal
import sys
if sys.version_info < (3, 11):
from typing_extensions import NotRequired
else:
from typing import NotRequired
class BaseGenerateResponse(TypedDict):
model: str
'Model used to generate response.'
created_at: str
'Time when the request was created.'
done: bool
'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
total_duration: int
'Total duration in nanoseconds.'
load_duration: int
'Load duration in nanoseconds.'
prompt_eval_count: int
'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int
'Duration of evaluating the prompt in nanoseconds.'
eval_count: int
'Number of tokens evaluated in inference.'
eval_duration: int
'Duration of evaluating inference in nanoseconds.'
class GenerateResponse(BaseGenerateResponse):
"""
Response returned by generate requests.
"""
response: str
'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int]
'Tokenized history up to the point of the response.'
class Message(TypedDict):
"""
Chat message.
"""
role: Literal['user', 'assistant', 'system']
"Assumed role of the message. Response messages always has role 'assistant'."
content: str
'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]]
"""
Optional list of image data for multimodal models.
Valid input types are:
- `str` or path-like object: path to image file
- `bytes` or bytes-like object: raw image data
Valid image formats depend on the model. See the model card for more information.
"""
class ChatResponse(BaseGenerateResponse):
"""
Response returned by chat requests.
"""
message: Message
'Response message.'
class ProgressResponse(TypedDict):
status: str
completed: int
total: int
digest: str
class Options(TypedDict, total=False):
# load time options
numa: bool
num_ctx: int
num_batch: int
num_gqa: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
rope_frequency_base: float
rope_frequency_scale: float
num_thread: int
# runtime options
num_keep: int
seed: int
num_predict: int
top_k: int
top_p: float
tfs_z: float
typical_p: float
repeat_last_n: int
temperature: float
repeat_penalty: float
presence_penalty: float
frequency_penalty: float
mirostat: int
mirostat_tau: float
mirostat_eta: float
penalize_newline: bool
stop: Sequence[str]
class RequestError(Exception):
"""
Common class for request errors.
"""
def __init__(self, error: str):
super().__init__(error)
self.error = error
'Reason for the error.'
class ResponseError(Exception):
"""
Common class for response errors.
"""
def __init__(self, error: str, status_code: int = -1):
try:
# try to parse content as JSON and extract 'error'
# fallback to raw content if JSON parsing fails
error = json.loads(error).get('error', error)
exc<fim_suffix>ept json.JSONDecodeError:
...
super().__init__(error)
self.error = error
'Reason for the error.'
self.status_code = status_code
'HTTP status code of the response.'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as<fim_suffix> e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | CATCH | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(<fim_suffix>modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for mess<fim_suffix>age in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_l<fim_suffix>ines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io<fim_suffix>.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for mes<fim_suffix>sage in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for li<fim_suffix>ne in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line <fim_suffix>in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
asy<fim_suffix>nc def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | METHOD | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
asy<fim_suffix>nc def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | METHOD | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while<fim_suffix> True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | WHILE | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
whil<fim_suffix>e True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | WHILE | complete_current_header_empty_completion |
<filename>ollama-python/ollama/_client.py<fim_prefix>import os
import io
import json
import httpx
import binascii
import platform
import urllib.parse
from os import PathLike
from pathlib import Path
from hashlib import sha256
from base64 import b64encode, b64decode
from typing import Any, AnyStr, Union, Optional, Sequence, Mapping, Literal
import sys
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
else:
from collections.abc import Iterator, AsyncIterator
from importlib import metadata
try:
__version__ = metadata.version('ollama')
except metadata.PackageNotFoundError:
__version__ = '0.0.0'
from ollama._types import Message, Options, RequestError, ResponseError
class BaseClient:
def __init__(
self,
client,
host: Optional[str] = None,
follow_redirects: bool = True,
timeout: Any = None,
**kwargs,
) -> None:
"""
Creates a httpx client. Default parameters are the same as those defined in httpx
except for the following:
- `follow_redirects`: True
- `timeout`: None
`kwargs` are passed to the httpx client.
"""
headers = kwargs.pop('headers', {})
headers['Content-Type'] = 'application/json'
headers['Accept'] = 'application/json'
headers['User-Agent'] = f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}'
self._client = client(
base_url=_parse_host(host or os.getenv('OLLAMA_HOST')),
follow_redirects=follow_redirects,
timeout=timeout,
headers=headers,
**kwargs,
)
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any]]:
with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
for line in r.iter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
return self._stream(*args, **kwargs) if stream else self._request(*args, **kwargs).json()
def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns a `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns a `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of Message or dict-like objects')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
return self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
).json()
def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
wh<fim_suffix>ile True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
with open(path, 'rb') as r:
self._request('POST', f'/api/blobs/{digest}', content=r)
return digest
def delete(self, model: str) -> Mapping[str, Any]:
response = self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
def list(self) -> Mapping[str, Any]:
return self._request('GET', '/api/tags').json()
def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
def show(self, model: str) -> Mapping[str, Any]:
return self._request('POST', '/api/show', json={'name': model}).json()
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request(self, method: str, url: str, **kwargs) -> httpx.Response:
response = await self._client.request(method, url, **kwargs)
try:
response.raise_for_status()
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return response
async def _stream(self, method: str, url: str, **kwargs) -> AsyncIterator[Mapping[str, Any]]:
async def inner():
async with self._client.stream(method, url, **kwargs) as r:
try:
r.raise_for_status()
except httpx.HTTPStatusError as e:
e.response.read()
raise ResponseError(e.response.text, e.response.status_code) from None
async for line in r.aiter_lines():
partial = json.loads(line)
if e := partial.get('error'):
raise ResponseError(e)
yield partial
return inner()
async def _request_stream(
self,
*args,
stream: bool = False,
**kwargs,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
if stream:
return await self._stream(*args, **kwargs)
response = await self._request(*args, **kwargs)
return response.json()
async def generate(
self,
model: str = '',
prompt: str = '',
system: str = '',
template: str = '',
context: Optional[Sequence[int]] = None,
stream: bool = False,
raw: bool = False,
format: Literal['', 'json'] = '',
images: Optional[Sequence[AnyStr]] = None,
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `GenerateResponse` if `stream` is `False`, otherwise returns an asynchronous `GenerateResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
return await self._request_stream(
'POST',
'/api/generate',
json={
'model': model,
'prompt': prompt,
'system': system,
'template': template,
'context': context or [],
'stream': stream,
'raw': raw,
'images': [_encode_image(image) for image in images or []],
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def chat(
self,
model: str = '',
messages: Optional[Sequence[Message]] = None,
stream: bool = False,
format: Literal['', 'json'] = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Create a chat response using the requested model.
Raises `RequestError` if a model is not provided.
Raises `ResponseError` if the request could not be fulfilled.
Returns `ChatResponse` if `stream` is `False`, otherwise returns an asynchronous `ChatResponse` generator.
"""
if not model:
raise RequestError('must provide a model')
for message in messages or []:
if not isinstance(message, dict):
raise TypeError('messages must be a list of strings')
if not (role := message.get('role')) or role not in ['system', 'user', 'assistant']:
raise RequestError('messages must contain a role and it must be one of "system", "user", or "assistant"')
if not message.get('content'):
raise RequestError('messages must contain content')
if images := message.get('images'):
message['images'] = [_encode_image(image) for image in images]
return await self._request_stream(
'POST',
'/api/chat',
json={
'model': model,
'messages': messages,
'stream': stream,
'format': format,
'options': options or {},
'keep_alive': keep_alive,
},
stream=stream,
)
async def embeddings(
self,
model: str = '',
prompt: str = '',
options: Optional[Options] = None,
keep_alive: Optional[Union[float, str]] = None,
) -> Sequence[float]:
response = await self._request(
'POST',
'/api/embeddings',
json={
'model': model,
'prompt': prompt,
'options': options or {},
'keep_alive': keep_alive,
},
)
return response.json()
async def pull(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/pull',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def push(
self,
model: str,
insecure: bool = False,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
return await self._request_stream(
'POST',
'/api/push',
json={
'name': model,
'insecure': insecure,
'stream': stream,
},
stream=stream,
)
async def create(
self,
model: str,
path: Optional[Union[str, PathLike]] = None,
modelfile: Optional[str] = None,
stream: bool = False,
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
"""
Raises `ResponseError` if the request could not be fulfilled.
Returns `ProgressResponse` if `stream` is `False`, otherwise returns a `ProgressResponse` generator.
"""
if (realpath := _as_path(path)) and realpath.exists():
modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent)
elif modelfile:
modelfile = await self._parse_modelfile(modelfile)
else:
raise RequestError('must provide either path or modelfile')
return await self._request_stream(
'POST',
'/api/create',
json={
'name': model,
'modelfile': modelfile,
'stream': stream,
},
stream=stream,
)
async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str:
base = Path.cwd() if base is None else base
out = io.StringIO()
for line in io.StringIO(modelfile):
command, _, args = line.partition(' ')
if command.upper() not in ['FROM', 'ADAPTER']:
print(line, end='', file=out)
continue
path = Path(args.strip()).expanduser()
path = path if path.is_absolute() else base / path
if path.exists():
args = f'@{await self._create_blob(path)}\n'
print(command, args, end='', file=out)
return out.getvalue()
async def _create_blob(self, path: Union[str, Path]) -> str:
sha256sum = sha256()
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
sha256sum.update(chunk)
digest = f'sha256:{sha256sum.hexdigest()}'
try:
await self._request('HEAD', f'/api/blobs/{digest}')
except ResponseError as e:
if e.status_code != 404:
raise
async def upload_bytes():
with open(path, 'rb') as r:
while True:
chunk = r.read(32 * 1024)
if not chunk:
break
yield chunk
await self._request('POST', f'/api/blobs/{digest}', content=upload_bytes())
return digest
async def delete(self, model: str) -> Mapping[str, Any]:
response = await self._request('DELETE', '/api/delete', json={'name': model})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def list(self) -> Mapping[str, Any]:
response = await self._request('GET', '/api/tags')
return response.json()
async def copy(self, source: str, destination: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/copy', json={'source': source, 'destination': destination})
return {'status': 'success' if response.status_code == 200 else 'error'}
async def show(self, model: str) -> Mapping[str, Any]:
response = await self._request('POST', '/api/show', json={'name': model})
return response.json()
def _encode_image(image) -> str:
"""
>>> _encode_image(b'ollama')
'b2xsYW1h'
>>> _encode_image(io.BytesIO(b'ollama'))
'b2xsYW1h'
>>> _encode_image('LICENSE')
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image(Path('LICENSE'))
'TUlUIExpY2Vuc2UKCkNvcHlyaWdodCAoYykgT2xsYW1hCgpQZXJtaXNzaW9uIGlzIGhlcmVieSBncmFudGVkLCBmcmVlIG9mIGNoYXJnZSwgdG8gYW55IHBlcnNvbiBvYnRhaW5pbmcgYSBjb3B5Cm9mIHRoaXMgc29mdHdhcmUgYW5kIGFzc29jaWF0ZWQgZG9jdW1lbnRhdGlvbiBmaWxlcyAodGhlICJTb2Z0d2FyZSIpLCB0byBkZWFsCmluIHRoZSBTb2Z0d2FyZSB3aXRob3V0IHJlc3RyaWN0aW9uLCBpbmNsdWRpbmcgd2l0aG91dCBsaW1pdGF0aW9uIHRoZSByaWdodHMKdG8gdXNlLCBjb3B5LCBtb2RpZnksIG1lcmdlLCBwdWJsaXNoLCBkaXN0cmlidXRlLCBzdWJsaWNlbnNlLCBhbmQvb3Igc2VsbApjb3BpZXMgb2YgdGhlIFNvZnR3YXJlLCBhbmQgdG8gcGVybWl0IHBlcnNvbnMgdG8gd2hvbSB0aGUgU29mdHdhcmUgaXMKZnVybmlzaGVkIHRvIGRvIHNvLCBzdWJqZWN0IHRvIHRoZSBmb2xsb3dpbmcgY29uZGl0aW9uczoKClRoZSBhYm92ZSBjb3B5cmlnaHQgbm90aWNlIGFuZCB0aGlzIHBlcm1pc3Npb24gbm90aWNlIHNoYWxsIGJlIGluY2x1ZGVkIGluIGFsbApjb3BpZXMgb3Igc3Vic3RhbnRpYWwgcG9ydGlvbnMgb2YgdGhlIFNvZnR3YXJlLgoKVEhFIFNPRlRXQVJFIElTIFBST1ZJREVEICJBUyBJUyIsIFdJVEhPVVQgV0FSUkFOVFkgT0YgQU5ZIEtJTkQsIEVYUFJFU1MgT1IKSU1QTElFRCwgSU5DTFVESU5HIEJVVCBOT1QgTElNSVRFRCBUTyBUSEUgV0FSUkFOVElFUyBPRiBNRVJDSEFOVEFCSUxJVFksCkZJVE5FU1MgRk9SIEEgUEFSVElDVUxBUiBQVVJQT1NFIEFORCBOT05JTkZSSU5HRU1FTlQuIElOIE5PIEVWRU5UIFNIQUxMIFRIRQpBVVRIT1JTIE9SIENPUFlSSUdIVCBIT0xERVJTIEJFIExJQUJMRSBGT1IgQU5ZIENMQUlNLCBEQU1BR0VTIE9SIE9USEVSCkxJQUJJTElUWSwgV0hFVEhFUiBJTiBBTiBBQ1RJT04gT0YgQ09OVFJBQ1QsIFRPUlQgT1IgT1RIRVJXSVNFLCBBUklTSU5HIEZST00sCk9VVCBPRiBPUiBJTiBDT05ORUNUSU9OIFdJVEggVEhFIFNPRlRXQVJFIE9SIFRIRSBVU0UgT1IgT1RIRVIgREVBTElOR1MgSU4gVEhFClNPRlRXQVJFLgo='
>>> _encode_image('YWJj')
'YWJj'
>>> _encode_image(b'YWJj')
'YWJj'
"""
if p := _as_path(image):
return b64encode(p.read_bytes()).decode('utf-8')
try:
b64decode(image, validate=True)
return image if isinstance(image, str) else image.decode('utf-8')
except (binascii.Error, TypeError):
...
if b := _as_bytesio(image):
return b64encode(b.read()).decode('utf-8')
raise RequestError('image must be bytes, path-like object, or file-like object')
def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]:
if isinstance(s, str) or isinstance(s, Path):
try:
if (p := Path(s)).exists():
return p
except Exception:
...
return None
def _as_bytesio(s: Any) -> Union[io.BytesIO, None]:
if isinstance(s, io.BytesIO):
return s
elif isinstance(s, bytes):
return io.BytesIO(s)
return None
def _parse_host(host: Optional[str]) -> str:
"""
>>> _parse_host(None)
'http://127.0.0.1:11434'
>>> _parse_host('')
'http://127.0.0.1:11434'
>>> _parse_host('1.2.3.4')
'http://1.2.3.4:11434'
>>> _parse_host(':56789')
'http://127.0.0.1:56789'
>>> _parse_host('1.2.3.4:56789')
'http://1.2.3.4:56789'
>>> _parse_host('http://1.2.3.4')
'http://1.2.3.4:80'
>>> _parse_host('https://1.2.3.4')
'https://1.2.3.4:443'
>>> _parse_host('https://1.2.3.4:56789')
'https://1.2.3.4:56789'
>>> _parse_host('example.com')
'http://example.com:11434'
>>> _parse_host('example.com:56789')
'http://example.com:56789'
>>> _parse_host('http://example.com')
'http://example.com:80'
>>> _parse_host('https://example.com')
'https://example.com:443'
>>> _parse_host('https://example.com:56789')
'https://example.com:56789'
>>> _parse_host('example.com/')
'http://example.com:11434'
>>> _parse_host('example.com:56789/')
'http://example.com:56789'
"""
host, port = host or '', 11434
scheme, _, hostport = host.partition('://')
if not hostport:
scheme, hostport = 'http', host
elif scheme == 'http':
port = 80
elif scheme == 'https':
port = 443
split = urllib.parse.urlsplit('://'.join([scheme, hostport]))
host = split.hostname or '127.0.0.1'
port = split.port or port
return f'{scheme}://{host}:{port}'
<fim_middle> | null | WHILE | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/conv_filter_bank.py<fim_prefix>from typing import List, Tuple
import numpy as np
from pydantic import root_validator, validator
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import IrisFilterResponse, NormalizedIris
from iris.io.validators import are_lengths_equal, is_not_empty
from iris.nodes.iris_response.image_filters.gabor_filters import GaborFilter
from iris.nodes.iris_response.image_filters.image_filter_interface import ImageFilter
from iris.nodes.iris_response.probe_schemas.probe_schema_interface import ProbeSchema
from iris.nodes.iris_response.probe_schemas.regular_probe_schema import RegularProbeSchema
def polar_img_padding(img: np.ndarray, p_rows: int, p_cols: int) -> np.ndarray:
"""Apply zero-padding vertically and rotate-padding horizontally to a normalized image in polar coordinates.
Args:
img (np.ndarray): normalized image in polar coordinates.
p_rows (int): padding size on top and bottom.
p_cols (int): padding size on left and right.
Returns:
np.ndarray: padded image.
"""
i_rows, i_cols = img.shape
padded_image = np.zeros((i_rows + 2 * p_rows, i_cols + 2 * p_cols))
padded_image[p_rows : i_rows + p_rows, p_cols : i_cols + p_cols] = img
padded_image[p_rows : i_rows + p_rows, 0:p_cols] = img[:, -p_cols:]
padded_image[p_rows : i_rows + p_rows, -p_cols:] = img[:, 0:p_cols]
return padded_image
class ConvFilterBank(Algorithm):
"""Apply filter bank.
Algorithm steps:
1) Obtain filters and corresponding probe schemas.
2) Apply convolution to a given pair of normalized iris image using the filters and probe schemas.
3) Generate the iris response and corresponding mask response.
"""
class Parameters(Algorithm.Parameters):
"""Default ConvFilterBank parameters."""
filters: List[ImageFilter]
probe_schemas: List[ProbeSchema]
# Validators
_are_lengths_equal = root_validator(pre=True, allow_reuse=True)(are_lengths_equal("probe_schemas", "filters"))
_is_not_empty = validator("*", allow_reuse=True)(is_not_empty)
__parameters_type__ = Parameters
def __init__(
self,
filters: List[ImageFilter] = [
GaborFilter(
kernel_size=(41, 21),
sigma_phi=7,
sigma_rho=6.13,
theta_degrees=90.0,
lambda_phi=28,
dc_correction=True,
to_fixpoints=True,
),
GaborFilter(
kernel_size=(17, 21),
sigma_phi=2,
sigma_rho=5.86,
theta_degrees=90.0,
lambda_phi=8,
dc_correction=True,
to_fixpoints=True,
),
],
probe_schemas: List[ProbeSchema] = [
RegularProbeSchema(n_rows=16, n_cols=256),
RegularProbeSchema(n_rows=16, n_cols=256),
],
) -> None:
"""Assign parameters.
Args:
filters (List[ImageFilter]): List of image filters.
probe_schemas (List[ProbeSchema]): List of corresponding probe schemas.
"""
super().__init__(filters=filters, probe_schemas=probe_schemas)
def run(self, normalization_output: NormalizedIris) -> IrisFilterResponse:
"""Apply filters to a normalized iris image.
Args:
normalization_output (NormalizedIris): Output of the normalization process.
Returns:
IrisFilterResponse: filter responses.
"""
iris_responses: List[np.ndarray] = []
mask_responses: List[np.ndarray] = []
for i_filter, i_schema in zip(self.params.filters, self.params.probe_schemas):
iris_response, mask_response = self._convolve(i_filter, i_schema, normalization_output)
iris_responses.append(iris_response)
mask_responses.append(mask_response)
return IrisFilterResponse(iris_responses=iris_responses, mask_responses=mask_responses)
def _convolve(
self, img_filter: ImageFilter, probe_schema: ProbeSchema, normalization_output: NormalizedIris
) -> Tuple[np.ndarray, np.ndarray]:
"""Apply convolution to a given norma<fim_suffix>lized iris image with the filter and probe schema.
Args:
img_filter (ImageFilter): filter used for convolution.
probe_schema (ProbeSchema): probe schema used for convolution.
normalization_output (NormalizedIris): Output of the normalization process.
Returns:
Tuple[np.ndarray, np.ndarray]: iris response and mask response.
"""
i_rows, i_cols = normalization_output.normalized_image.shape
k_rows, k_cols = img_filter.kernel_values.shape
p_rows = k_rows // 2
p_cols = k_cols // 2
iris_response = np.zeros((probe_schema.params.n_rows, probe_schema.params.n_cols), dtype=np.complex64)
mask_response = np.zeros((probe_schema.params.n_rows, probe_schema.params.n_cols))
padded_iris = polar_img_padding(normalization_output.normalized_image, 0, p_cols)
padded_mask = polar_img_padding(normalization_output.normalized_mask, 0, p_cols)
for i in range(probe_schema.params.n_rows):
for j in range(probe_schema.params.n_cols):
# Convert probe_schema position to integer pixel position.
pos = i * probe_schema.params.n_cols + j
r_probe = min(round(probe_schema.rhos[pos] * i_rows), i_rows - 1)
c_probe = min(round(probe_schema.phis[pos] * i_cols), i_cols - 1)
# Get patch from image centered at [i,j] probed pixel position.
rtop = max(0, r_probe - p_rows)
rbot = min(r_probe + p_rows + 1, i_rows - 1)
iris_patch = padded_iris[rtop:rbot, c_probe : c_probe + k_cols]
mask_patch = padded_mask[rtop:rbot, c_probe : c_probe + k_cols]
# Perform convolution at [i,j] probed pixel position.
ktop = p_rows - iris_patch.shape[0] // 2
iris_response[i][j] = (
(iris_patch * img_filter.kernel_values[ktop : ktop + iris_patch.shape[0], :]).sum()
/ iris_patch.shape[0]
/ k_cols
)
mask_response[i][j] = (
0 if iris_response[i][j] == 0 else (mask_patch.sum() / iris_patch.shape[0] / k_cols)
)
return iris_response, mask_response
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/geometry_refinement/contour_points_filter.py<fim_prefix>import cv2
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import GeometryPolygons, NoiseMask
class ContourPointNoiseEyeballDistanceFilter(Algorithm):
"""Implementation of point filtering algorithm that removes points which are to close to eyeball or noise.
The role of this algorithm is to create a buffer around the pupil and iris polygons. This accounts for
potential segmentation imprecisions, making the overall pipeline more robust against edge cases and out-of-distribution images.
The buffer width is computed relatively to the iris diameter: `min_distance_to_noise_and_eyeball * iris_diameter`
The trigger for this buffer are the eyeball boundary and the noise (e.g. eyelashes, specular reflection, etc.).
"""
class Parameters(Algorithm.Parameters):
"""Default ContourPointToNoiseEyeballDistanceFilter parameters."""
min_distance_to_noise_and_eyeball: float = Field(..., gt=0.0, lt=1.0)
__parameters_type__ = Parameters
def __init__(self, min_distance_to_noise_and_eyeball: float = 0.005) -> None:
"""Assign parameters.
Args:
min_distance_to_noise_and_eyeball (float, optional): Minimum distance to eyeball or noise expressed as a fraction of iris diameter length. Defaults to 0.025.
"""
super().__init__(min_distance_to_noise_and_eyeball=min_distance_to_noise_and_eyeball)
def run(self, polygons: GeometryPolygons, geometry_mask: NoiseMask) -> GeometryPolygons:
"""Perform polygon refinement by filtering out those iris/p<fim_suffix>upil polygons points which are to close to eyeball or noise.
Args:
polygons (GeometryPolygons): Polygons to refine.
geometry_mask (NoiseMask): Geometry noise mask.
Returns:
GeometryPolygons: Refined geometry polygons.
"""
noise_and_eyeball_polygon_points_mask = geometry_mask.mask.copy()
for eyeball_point in np.round(polygons.eyeball_array).astype(int):
x, y = eyeball_point
noise_and_eyeball_polygon_points_mask[y, x] = True
min_dist_to_noise_and_eyeball_in_px = round(
self.params.min_distance_to_noise_and_eyeball * polygons.iris_diameter
)
forbidden_touch_map = cv2.blur(
noise_and_eyeball_polygon_points_mask.astype(float),
ksize=(
2 * min_dist_to_noise_and_eyeball_in_px + 1,
2 * min_dist_to_noise_and_eyeball_in_px + 1,
),
)
forbidden_touch_map = forbidden_touch_map.astype(bool)
return GeometryPolygons(
pupil_array=self._filter_polygon_points(forbidden_touch_map, polygons.pupil_array),
iris_array=self._filter_polygon_points(forbidden_touch_map, polygons.iris_array),
eyeball_array=polygons.eyeball_array,
)
def _filter_polygon_points(self, forbidden_touch_map: np.ndarray, polygon_points: np.ndarray) -> np.ndarray:
"""Filter polygon's points.
Args:
forbidden_touch_map (np.ndarray): Forbidden touch map. If value of an element is greater then 0 then it means that point is to close to noise or eyeball.
polygon_points (np.ndarray): Polygon's points.
Returns:
np.ndarray: Filtered polygon's points.
"""
valid_points = [not forbidden_touch_map[y, x] for x, y in np.round(polygon_points).astype(int)]
return polygon_points[valid_points]
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/eye_properties_estimation/iris_bbox_calculator.py<fim_prefix>from numbers import Number
from typing import Tuple, Union
import numpy as np
from pydantic import validator
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import BoundingBox, GeometryPolygons, IRImage
from iris.io.errors import BoundingBoxEstimationError
from iris.io.validators import are_all_positive
class IrisBBoxCalculator(Algorithm):
"""Calculate the smallest bounding box around the iris polygon, cropped or not, padded or not."""
class Parameters(Algorithm.Parameters):
"""Parameters of the iris bounding box calculator."""
buffer: Union[int, float, Tuple[Number, Number]]
crop: bool
_are_all_positive = validator("buffer", allow_reuse=True)(are_all_positive)
__parameters_type__ = Parameters
def __init__(self, buffer: Union[int, float, Tuple[Number, Number]] = 0, crop: bool = False) -> None:
"""Assign parameter<fim_suffix>s.
WARNING: Depending on wether `buffer` is a float or an int, behaviour differs.
`buffer=2.0` => the iris size will be multiplied by 2., `buffer=2` => 2 pixels padding will be added.
Args:
buffer (Union[int, float, Tuple[Number, Number]], optional): Iris buffer in pixels.
if `int`, the bounding box will be padded by `buffer` pixels in each direction.
if `float`, the bounding box' height and width will be multiplied by `buffer`.
if `Tuple[int]`, the bounding box will be padded by `buffer[0]` pixels in the x direction
(left and right) and `buffer[1]` pixels in the y direction (top and bottom).
if `Tuple[float]`, the bounding box width will be multiplied by `buffer[0]` and height by `buffer[1]`.
crop (bool, optional): If True, the bounding box will be cropped to the shape of the input IR Image. Defaults to False.
"""
super().__init__(buffer=buffer, crop=crop)
def run(self, ir_image: IRImage, geometry_polygons: GeometryPolygons) -> BoundingBox:
"""Compute the bounding box around the iris with an additional buffer. Works best on extrapolated polygons.
The buffer's behaviour is explained in the constructor's docstring.
The bounding box will be cropped to the shape of the input IR Image.
Args:
ir_image (IRImage): IR image.
geometry_polygons (GeometryPolygons): polygons, from which the iris polygon (respectively the image shape) used to compute the bounding box (resp. crop the bounding box).
Returns:
BoundingBox: Estimated iris bounding box.
"""
iris_polygon = geometry_polygons.iris_array
image_height, image_width = (ir_image.height, ir_image.width)
buffer = (
(self.params.buffer, self.params.buffer)
if isinstance(self.params.buffer, (int, float))
else self.params.buffer
)
original_x_min: float = np.min(iris_polygon[:, 0])
original_x_max: float = np.max(iris_polygon[:, 0])
original_y_min: float = np.min(iris_polygon[:, 1])
original_y_max: float = np.max(iris_polygon[:, 1])
if original_x_max == original_x_min or original_y_max == original_y_min:
raise BoundingBoxEstimationError(
f"Iris bounding box empty. x_min={original_x_min}, x_max={original_x_max}, "
f"y_min={original_y_min}, y_max={original_y_max}"
)
if isinstance(buffer[0], int):
padded_x_min = original_x_min - buffer[0]
padded_x_max = original_x_max + buffer[0]
else:
bbox_width = original_x_max - original_x_min
padded_x_min = original_x_min - bbox_width * (buffer[0] - 1) / 2
padded_x_max = original_x_max + bbox_width * (buffer[0] - 1) / 2
if isinstance(buffer[1], int):
padded_y_min = original_y_min - buffer[1]
padded_y_max = original_y_max + buffer[1]
else:
bbox_height = original_y_max - original_y_min
padded_y_min = original_y_min - bbox_height * (buffer[1] - 1) / 2
padded_y_max = original_y_max + bbox_height * (buffer[1] - 1) / 2
if self.params.crop:
padded_x_min = max(padded_x_min, 0)
padded_x_max = min(padded_x_max, image_width)
padded_y_min = max(padded_y_min, 0)
padded_y_max = min(padded_y_max, image_height)
return BoundingBox(x_min=padded_x_min, x_max=padded_x_max, y_min=padded_y_min, y_max=padded_y_max)
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/common.py<fim_prefix>from typing import Tuple
import numpy as np
from pydantic import NonNegativeInt
from iris.io.dataclasses import GeometryPolygons
from iris.utils import common
def generate_iris_mask(extrapolated_contours: GeometryPolygons, noise_mask: np.ndarray) -> np.ndarray:
"""Generate iris mask by first finding the intersection region between extrapolated iris contours and eyeball contours. Then remove from the outputted mask those pixels for which noise_mask is equal to True.
Args:
extrapolated_contours (GeometryPolygons): Iris polygon vertices.
noise_mask (np.ndarray): Noise mask.
Returns:
np.ndarray: Iris mask.
"""
img_h, img_w = noise_mask.shape[:2]
iris_mask = common.contour_to_mask(extrapolated_contours.iris_array, (img_w, img_h))
eyeball_mask = common.contour_to_mask(extrapolated_contours.eyeball_array, (img_w, img_h))
iris_mask = iris_mask & eyeball_mask
iris_mask = ~(iris_mask & noise_mask) & iris_mask
return iris_mask
def correct_orientation(
pupil_points: np.ndarray, iris_points: np.ndarray, eye_orientation: float
) -> Tuple[np.ndarray, np.ndarray]:
"""Correct orientation by changing the starting angle in pupil and iris points' arrays.
Args:
pupil_points (np.ndarray): Pupil boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
iris_points (np.ndarray): Iris boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
eye_orientation (float): Eye orientation angle in radians.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with rotated based on eye_orientation angle boundary points (pupil_points, iris_points).
"""
orientation_angle = np.degrees(eye_orientation)
num_rotations = -round(orientation_angle * len(pupil_points) / 360.0)
pupil_points = np.roll(pupil_points, num_rotations, axis=0)
iris_points = np.roll(iris_points, num_rotations, axis=0)
return pupil_points, iris_points
def getgrids(res_in_r: NonNegativeInt, p2i_ratio: NonNegativeInt) -> np.ndarray:
"""Generate radius grids for nonlinear normalization based on p2i_ratio (pupil_to_iris ratio).
Args:
res_in_r (NonNegativeInt): Normalized image r resolution.
p2i_ratio (NonNegativeInt): pupil_to_iris ratio, range in [0,100]
Returns:
np.ndarray: nonlinear sampling grids for normalization
"""
p = [np.square(x) for x in np.arange(28, max(74 - p2i_ratio, p2i_ratio - 14), 1)]
q = p - p[0]
q = q / q[-1]
grids = np.interp(np.linspace(0, 1.0, res_in_r + 1), np.linspace(0, 1.0, len(q)), q)
return grids[0:-1] + np.diff(grids) / 2
def interpolate_pixel_intensity(image: np.ndarray, pixel_coords: Tuple[float, float]) -> float:
"""Perform bilinear interpolation to estimate pixel intensity in a given location.
Args:
image (np.ndarray): Original, not normalized image.
pixel_coords (Tuple[float, float]): Pixel coordinates.
Returns:
float: Interpolated pixel intensity.
Reference:
[1] https://en.wikipedia.org/wiki/Bilinear_interpolation
"""
def get_pixel_intensity(image: np.ndarray, pixel_x: float, pixel_y: float) -> float:
"""Get the intensity value of a pixel from an intensity image.
Args:
image (np.ndarray): Intensity image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
float: Pixel value.
"""
try:
return image[int(pixel_y), int(pixel_x)]
except IndexError:
return 0.0
def get_interpolation_points_coords(
image: np.ndarray, pixel_x: float, pixel_y: float
) -> Tuple[float, float, float, float]:
"""E<fim_suffix>xtract interpolation points coordinates.
Args:
image (np.ndarray): Original, not normalized image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
Tuple[float, float, float, float]: Tuple with interpolation points coordinates in a format (xmin, ymin, xmax, ymax).
"""
xmin, ymin = np.floor(pixel_x), np.floor(pixel_y)
xmax, ymax = np.ceil(pixel_x), np.ceil(pixel_y)
img_h, img_w = image.shape[:2]
if xmin == xmax and not xmax == img_w - 1:
xmax += 1
if xmin == xmax and xmax == img_w - 1:
xmin -= 1
if ymin == ymax and not ymax == img_h - 1:
ymax += 1
if ymin == ymax and ymax == img_h - 1:
ymin -= 1
return xmin, ymin, xmax, ymax
pixel_x, pixel_y = pixel_coords
xmin, ymin, xmax, ymax = get_interpolation_points_coords(image, pixel_x=pixel_x, pixel_y=pixel_y)
lower_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymax)
lower_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymax)
upper_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymin)
upper_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymin)
xs_differences = np.array([xmax - pixel_x, pixel_x - xmin])
neighboring_pixel_intensities = np.array(
[
[lower_left_pixel_intensity, upper_left_pixel_intensity],
[lower_right_pixel_intensity, upper_right_pixel_intensity],
]
)
ys_differences = np.array([[pixel_y - ymin], [ymax - pixel_y]])
pixel_intensity = np.matmul(np.matmul(xs_differences, neighboring_pixel_intensities), ys_differences)
return pixel_intensity.item()
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/callbacks/pipeline_trace.py<fim_prefix>from __future__ import annotations
from typing import Any, Dict, Iterable, List, Optional
from iris.callbacks.callback_interface import Callback
from iris.io.class_configs import Algorithm
from iris.orchestration.pipeline_dataclasses import PipelineNode
class PipelineCallTraceStorageError(Exception):
"""PipelineCallTraceStorage error class."""
pass
class PipelineCallTraceStorage:
"""A storage object for pipeline input, intermediate and final results."""
INPUT_KEY_NAME = "input"
ERROR_KEY_NAME = "error"
def __init__(self, results_names: Iterable[str]) -> None:
"""Assign parameters.
Args:
results_names (Iterable[str]): Create list of available keys in the storage.
"""
self._storage = self._init_storage(results_names)
def __getitem__(self, result_name: str) -> Any:
"""Get result_name result.
Args:
result_name (str): Result name.
Raises:
PipelineCallTraceStorageError: Raised if result_name is not found.
Returns:
Any: Result object.
"""
return self.get(result_name)
def __len__(self) -> int:
"""Get storage capacity.
Returns:
int: Storage capacity
"""
return len(self._storage.keys())
def get(self, result_name: str) -> Any:
"""Get result_name result.
Args:
result_name (str): Result name.
Raises:
PipelineCallTraceStorageError: Raised if result_name is not found.
Returns:
Any: Result object.
"""
if result_name not in self._storage.keys():
raise PipelineCallTraceStorageError(f"Unknown result name: {result_name}")
return self._storage[result_name]
def get_input(self) -> Any:
"""Return pipeline input.
Returns:
Any: Input to pipeline.
"""
return self.get(PipelineCallTraceStorage.INPUT_KEY_NAME)
def get_error(self) -> Optional[Exception]:
"""Retu<fim_suffix>rn stored error.
Returns:
Optional[Exception]: error.
"""
return self.get(PipelineCallTraceStorage.ERROR_KEY_NAME)
def write(self, result_name: str, result: Any) -> None:
"""Write a result to a storage saved under the name `result_name`.
Args:
result_name (str): Result name.
result (Any): Result reference to save.
"""
self._storage[result_name] = result
def write_input(self, in_value: Any) -> None:
"""Save `in_value` in storage.
Args:
in_value (Any): Input value.
"""
self._storage[PipelineCallTraceStorage.INPUT_KEY_NAME] = in_value
def write_error(self, error: Exception) -> None:
"""Save `error` in storage.
Args:
error (Exception): error to store.
"""
self._storage[PipelineCallTraceStorage.ERROR_KEY_NAME] = error
def clean(self) -> None:
"""Clean storage by setting all result references to None."""
for result_name in self._storage.keys():
self._storage[result_name] = None
def _init_storage(self, results_names: Iterable[str]) -> Dict[str, None]:
"""Initialize storage (dict) with proper names and None values as results.
Args:
results_names (Iterable[str]): Result names.
Returns:
Dict[str, None]: Storage dictionary.
"""
storage = {name: None for name in results_names}
storage[PipelineCallTraceStorage.INPUT_KEY_NAME] = None
storage[PipelineCallTraceStorage.ERROR_KEY_NAME] = None
return storage
@staticmethod
def initialise(nodes: Dict[str, Algorithm], pipeline_nodes: List[PipelineNode]) -> PipelineCallTraceStorage:
"""Instantiate mechanisms for intermediate results tracing.
Args:
nodes (Dict[str, Algorithm]): Mapping between nodes names and the corresponding instanciated nodes.
pipeline_nodes (List[PipelineNode]): List of nodes as declared in the input config. Not used in this function.
Returns:
PipelineCallTraceStorage: Pipeline intermediate and final results storage.
"""
call_trace = PipelineCallTraceStorage(results_names=nodes.keys())
for algorithm_name, algorithm_object in nodes.items():
algorithm_object._callbacks.append(NodeResultsWriter(call_trace, algorithm_name))
return call_trace
class NodeResultsWriter(Callback):
"""A node call results writer Callback class."""
def __init__(self, trace_storage_reference: PipelineCallTraceStorage, result_name: str) -> None:
"""Assign parameters.
Args:
trace_storage_reference (PipelineCallTraceStorage): Storage object reference to write.
result_name (str): Result name under which result should be written.
"""
self._trace_storage_reference = trace_storage_reference
self._result_name = result_name
def on_execute_end(self, result: Any) -> None:
"""Write on node execution end.
Args:
result (Any): Result of node call.
"""
self._trace_storage_reference.write(self._result_name, result)
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/validators/cross_object_validators.py<fim_prefix>import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons, IRImage
from iris.io.errors import ExtrapolatedPolygonsInsideImageValidatorError, EyeCentersInsideImageValidatorError
class EyeCentersInsideImageValidator(Algorithm):
"""Validate that the eye center are not too close to the border.
Raises:
EyeCentersInsideImageValidatorError: If pupil or iris center are strictly less than `min_distance_to_border`
pixel of the image boundary.
"""
class Parameters(Algorithm.Parameters):
"""Parameters class for EyeCentersInsideImageValidator objects."""
min_distance_to_border: float
__parameters_type__ = Parameters
def __init__(self, min_distance_to_border: float = 0.0) -> None:
"""Assign par<fim_suffix>ameters.
Args:
min_distance_to_border (float, optional): Minimum allowed distance to image boundary.
Defaults to 0.0 (Eye centers can be at the image border).
"""
super().__init__(min_distance_to_border=min_distance_to_border)
def run(self, ir_image: IRImage, eye_centers: EyeCenters) -> None:
"""Validate if eye centers are within proper image boundaries.
Args:
ir_image (IRImage): IR image
eye_centers (EyeCenters): Eye centers
Raises:
EyeCentersInsideImageValidatorError: Raised if pupil or iris center is not in within correct image boundary.
"""
if not self._check_center_valid(eye_centers.pupil_x, eye_centers.pupil_y, ir_image):
raise EyeCentersInsideImageValidatorError("Pupil center is not in allowed image boundary.")
if not self._check_center_valid(eye_centers.iris_x, eye_centers.iris_y, ir_image):
raise EyeCentersInsideImageValidatorError("Iris center is not in allowed image boundary.")
def _check_center_valid(self, center_x: float, center_y: float, ir_image: IRImage) -> bool:
"""Check if center point is within proper image bound.
Args:
center_x (float): Center x
center_y (float): Center y
ir_image (IRImage): IR image object
Returns:
bool: Result of the check.
"""
return (
self.params.min_distance_to_border <= center_x <= ir_image.width - self.params.min_distance_to_border
and self.params.min_distance_to_border <= center_y <= ir_image.height - self.params.min_distance_to_border
)
class ExtrapolatedPolygonsInsideImageValidator(Algorithm):
"""Validate that GeometryPolygons are included within the image to a certain minimum percentage.
Raises:
ExtrapolatedPolygonsInsideImageValidatorError: If the number of points of the pupil/iris/eyeball
that are within the input image is below threshold.
"""
class Parameters(Algorithm.Parameters):
"""Parameters class for ExtrapolatedPolygonsInsideImageValidator objects."""
min_pupil_allowed_percentage: float = Field(..., ge=0.0, le=1.0)
min_iris_allowed_percentage: float = Field(..., ge=0.0, le=1.0)
min_eyeball_allowed_percentage: float = Field(..., ge=0.0, le=1.0)
__parameters_type__ = Parameters
def __init__(
self,
min_pupil_allowed_percentage: float = 0.0,
min_iris_allowed_percentage: float = 0.0,
min_eyeball_allowed_percentage: float = 0.0,
) -> None:
"""Assign parameters.
Args:
min_pupil_allowed_percentage (float, optional): Minimum allowed percentage of extrapolated pupil polygons that must be within an image.
Defaults to 0.0 (Entire extrapolated polygon may be outside of an image).
min_iris_allowed_percentage (float, optional): Minimum allowed percentage of extrapolated iris polygons that must be within an image.
Defaults to 0.0 (Entire extrapolated polygon may be outside of an image).
min_eyeball_allowed_percentage (float, optional): Minimum allowed percentage of extrapolated eyeball polygons that must be within an image.
Defaults to 0.0 (Entire extrapolated polygon may be outside of an image).
"""
super().__init__(
min_pupil_allowed_percentage=min_pupil_allowed_percentage,
min_iris_allowed_percentage=min_iris_allowed_percentage,
min_eyeball_allowed_percentage=min_eyeball_allowed_percentage,
)
def run(self, ir_image: IRImage, extrapolated_polygons: GeometryPolygons) -> None:
"""Perform validation.
Args:
ir_image (IRImage): IR image.
extrapolated_polygons (GeometryPolygons): Extrapolated polygons.
Raises:
ExtrapolatedPolygonsInsideImageValidatorError: Raised if not enough points of the pupil/iris/eyeball are within an image.
"""
if not self._check_correct_percentage(
extrapolated_polygons.pupil_array, self.params.min_pupil_allowed_percentage, ir_image
):
raise ExtrapolatedPolygonsInsideImageValidatorError("Not enough pupil points are within an image.")
if not self._check_correct_percentage(
extrapolated_polygons.iris_array, self.params.min_iris_allowed_percentage, ir_image
):
raise ExtrapolatedPolygonsInsideImageValidatorError("Not enough iris points are within an image.")
if not self._check_correct_percentage(
extrapolated_polygons.eyeball_array, self.params.min_eyeball_allowed_percentage, ir_image
):
raise ExtrapolatedPolygonsInsideImageValidatorError("Not enough eyeball points are within an image.")
def _check_correct_percentage(self, polygon: np.ndarray, min_allowed_percentage: float, ir_image: IRImage) -> bool:
"""Check percentage of points withing image based on minimal specified threshold.
Args:
polygon (np.ndarray): polygon to verify.
min_allowed_percentage (float): minimal allowed percentage of points that must be within an image.
ir_image (IRImage): ir image object.
Returns:
bool: Check result.
"""
num_points_inside_image: float = np.sum(
np.all(np.logical_and((0, 0) <= polygon, polygon <= (ir_image.width, ir_image.height)), axis=1)
)
percentage_points_inside_image = num_points_inside_image / len(polygon)
return percentage_points_inside_image >= min_allowed_percentage
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/callbacks/pipeline_trace.py<fim_prefix>from __future__ import annotations
from typing import Any, Dict, Iterable, List, Optional
from iris.callbacks.callback_interface import Callback
from iris.io.class_configs import Algorithm
from iris.orchestration.pipeline_dataclasses import PipelineNode
class PipelineCallTraceStorageError(Exception):
"""PipelineCallTraceStorage error class."""
pass
class PipelineCallTraceStorage:
"""A storage object for pipeline input, intermediate and final results."""
INPUT_KEY_NAME = "input"
ERROR_KEY_NAME = "error"
def __init__(self, results_names: Iterable[str]) -> None:
"""Assign parameters.
Args:
results_names (Iterable[str]): Create list of available keys in the storage.
"""
self._storage = self._init_storage(results_names)
def __getitem__(self, result_name: str) -> Any:
"""Get result_name result.
Args:
result_name (str): Result name.
Raises:
PipelineCallTraceStorageError: Raised if result_name is not found.
Returns:
Any: Result object.
"""
return self.get(result_name)
def __len__(self) -> int:
"""Get storage capacity.
Returns:
int: Storage capacity
"""
return len(self._storage.keys())
def get(self, result_name: str) -> Any:
"""<fim_suffix>Get result_name result.
Args:
result_name (str): Result name.
Raises:
PipelineCallTraceStorageError: Raised if result_name is not found.
Returns:
Any: Result object.
"""
if result_name not in self._storage.keys():
raise PipelineCallTraceStorageError(f"Unknown result name: {result_name}")
return self._storage[result_name]
def get_input(self) -> Any:
"""Return pipeline input.
Returns:
Any: Input to pipeline.
"""
return self.get(PipelineCallTraceStorage.INPUT_KEY_NAME)
def get_error(self) -> Optional[Exception]:
"""Return stored error.
Returns:
Optional[Exception]: error.
"""
return self.get(PipelineCallTraceStorage.ERROR_KEY_NAME)
def write(self, result_name: str, result: Any) -> None:
"""Write a result to a storage saved under the name `result_name`.
Args:
result_name (str): Result name.
result (Any): Result reference to save.
"""
self._storage[result_name] = result
def write_input(self, in_value: Any) -> None:
"""Save `in_value` in storage.
Args:
in_value (Any): Input value.
"""
self._storage[PipelineCallTraceStorage.INPUT_KEY_NAME] = in_value
def write_error(self, error: Exception) -> None:
"""Save `error` in storage.
Args:
error (Exception): error to store.
"""
self._storage[PipelineCallTraceStorage.ERROR_KEY_NAME] = error
def clean(self) -> None:
"""Clean storage by setting all result references to None."""
for result_name in self._storage.keys():
self._storage[result_name] = None
def _init_storage(self, results_names: Iterable[str]) -> Dict[str, None]:
"""Initialize storage (dict) with proper names and None values as results.
Args:
results_names (Iterable[str]): Result names.
Returns:
Dict[str, None]: Storage dictionary.
"""
storage = {name: None for name in results_names}
storage[PipelineCallTraceStorage.INPUT_KEY_NAME] = None
storage[PipelineCallTraceStorage.ERROR_KEY_NAME] = None
return storage
@staticmethod
def initialise(nodes: Dict[str, Algorithm], pipeline_nodes: List[PipelineNode]) -> PipelineCallTraceStorage:
"""Instantiate mechanisms for intermediate results tracing.
Args:
nodes (Dict[str, Algorithm]): Mapping between nodes names and the corresponding instanciated nodes.
pipeline_nodes (List[PipelineNode]): List of nodes as declared in the input config. Not used in this function.
Returns:
PipelineCallTraceStorage: Pipeline intermediate and final results storage.
"""
call_trace = PipelineCallTraceStorage(results_names=nodes.keys())
for algorithm_name, algorithm_object in nodes.items():
algorithm_object._callbacks.append(NodeResultsWriter(call_trace, algorithm_name))
return call_trace
class NodeResultsWriter(Callback):
"""A node call results writer Callback class."""
def __init__(self, trace_storage_reference: PipelineCallTraceStorage, result_name: str) -> None:
"""Assign parameters.
Args:
trace_storage_reference (PipelineCallTraceStorage): Storage object reference to write.
result_name (str): Result name under which result should be written.
"""
self._trace_storage_reference = trace_storage_reference
self._result_name = result_name
def on_execute_end(self, result: Any) -> None:
"""Write on node execution end.
Args:
result (Any): Result of node call.
"""
self._trace_storage_reference.write(self._result_name, result)
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/geometry_refinement/smoothing.py<fim_prefix>from typing import List, Tuple
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons
from iris.io.errors import GeometryRefinementError
from iris.utils import math
class Smoothing(Algorithm):
"""Implementation of contour smoothing algorithm.
Algorithm steps:
1) Map iris/pupil points to polar space based on estimated iris/pupil centers.
2) Smooth iris/pupil contour by applying 1D convolution with rolling median kernel approach.
3) Map points back to cartesian space from polar space.
"""
class Parameters(Algorithm.Parameters):
"""Smoothing parameters class."""
dphi: float = Field(..., gt=0.0, lt=360.0)
kernel_size: float = Field(..., gt=0.0, lt=360.0)
gap_threshold: float = Field(..., gt=0.0, lt=360.0)
__parameters_type__ = Parameters
def __init__(self, dphi: float = 1.0, kernel_size: float = 10.0, gap_threshold: float = 10.0) -> None:
"""Assign parameters.
Args:
dphi (float, optional): phi angle delta used to sample points while doing smoothing by interpolation. Defaults to 1.0.
kernel_size (float, optional): Rolling median kernel size expressed in radians. Final kernel size is computed as a quotient of kernel_size and dphi. Defaults to 10.0.
gap_threshold (float, optional): Gap threshold distance. Defaults to None. Defaults to 10.0.
"""
super().__init__(dphi=dphi, kernel_size=kernel_size, gap_threshold=gap_threshold)
@property
def kernel_offset(self) -> int:
"""Kernel offset (distance from kernel center to border) property used when smoothing with rolling median. If a quotient is less then 1 then kernel size equal to 1 is returned.
Returns:
int: Kernel size.
"""
return max(1, int((np.radians(self.params.kernel_size) / np.radians(self.params.dphi))) // 2)
def run(self, polygons: GeometryPolygons, eye_centers: EyeCenters) -> GeometryPolygons:
"""Perform smoothing refinement.
Args:
polygons (GeometryPolygons): Contours to refine.
eye_centers (EyeCenters): Eye center used when performing a coordinates mapping from cartesian space to polar space.
Returns:
GeometryPolygons: Smoothed contours.
"""
pupil_arcs = self._smooth(polygons.pupil_array, (eye_centers.pupil_x, eye_centers.pupil_y))
iris_arcs = self._smooth(polygons.iris_array, (eye_centers.iris_x, eye_centers.iris_y))
return GeometryPolygons(pupil_array=pupil_arcs, iris_array=iris_arcs, eyeball_array=polygons.eyeball_array)
def _smooth(self, polygon: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth a single contour.
Args:
polygon (np.ndarray): Contour to smooth.
center_xy (Tuple[float, float]): Contour's center.
Returns:
np.ndarray: Smoothed contour's vertices.
"""
arcs, num_gaps = self._cut_into_arcs(polygon, center_xy)
arcs = (
self._smooth_circular_shape(arcs[0], center_xy)
if num_gaps == 0
else np.vstack([self._smooth_arc(arc, center_xy) for arc in arcs if len(arc) >= 2])
)
return arcs
def _cut_into_arcs(self, polygon: np.ndarray, center_xy: Tuple[float, float]) -> Tuple[List[np.ndarray], int]:
"""Cut co<fim_suffix>ntour into arcs.
Args:
polygon (np.ndarray): Contour polygon.
center_xy (Tuple[float, float]): Polygon's center.
Returns:
Tuple[List[np.ndarray], int]: Tuple with: (list of list of vertices, number of gaps detected in a contour).
"""
rho, phi = math.cartesian2polar(polygon[:, 0], polygon[:, 1], *center_xy)
phi, rho = self._sort_two_arrays(phi, rho)
differences = np.abs(phi - np.roll(phi, -1))
# True distance between first and last point
differences[-1] = 2 * np.pi - differences[-1]
gap_indices = np.argwhere(differences > np.radians(self.params.gap_threshold)).flatten()
if gap_indices.size < 2:
return [polygon], gap_indices.size
gap_indices += 1
phi, rho = np.split(phi, gap_indices), np.split(rho, gap_indices)
arcs = [
np.column_stack(math.polar2cartesian(rho_coords, phi_coords, *center_xy))
for rho_coords, phi_coords in zip(rho, phi)
]
# Connect arc which lies between 0 and 2π.
if len(arcs) == gap_indices.size + 1:
arcs[0] = np.vstack([arcs[0], arcs[-1]])
arcs = arcs[:-1]
return arcs, gap_indices.size
def _smooth_arc(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth a single contour arc.
Args:
vertices (np.ndarray): Arc's vertices.
center_xy (Tuple[float, float]): Center of an entire contour.
Returns:
np.ndarray: Smoothed arc's vertices.
"""
rho, phi = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy)
phi, rho = self._sort_two_arrays(phi, rho)
idx = self._find_start_index(phi)
offset = phi[idx]
relative_phi = (phi - offset) % (2 * np.pi)
smoothed_relative_phi, smoothed_rho = self._smooth_array(relative_phi, rho)
smoothed_phi = (smoothed_relative_phi + offset) % (2 * np.pi)
x_smoothed, y_smoothed = math.polar2cartesian(smoothed_rho, smoothed_phi, *center_xy)
return np.column_stack([x_smoothed, y_smoothed])
def _smooth_circular_shape(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth arc in a form of a circular shape.
Args:
vertices (np.ndarray): Arc's vertices.
center_xy (Tuple[float, float]): Center of an entire contour.
Returns:
np.ndarray: Smoothed arc's vertices.
"""
rho, phi = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy)
padded_phi = np.concatenate([phi - 2 * np.pi, phi, phi + 2 * np.pi])
padded_rho = np.concatenate([rho, rho, rho])
smoothed_phi, smoothed_rho = self._smooth_array(padded_phi, padded_rho)
mask = (smoothed_phi >= 0) & (smoothed_phi < 2 * np.pi)
rho_smoothed, phi_smoothed = smoothed_rho[mask], smoothed_phi[mask]
x_smoothed, y_smoothed = math.polar2cartesian(rho_smoothed, phi_smoothed, *center_xy)
return np.column_stack([x_smoothed, y_smoothed])
def _smooth_array(self, phis: np.ndarray, rhos: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Smooth coordinates expressed in polar space.
Args:
phis (np.ndarray): phi values.
rhos (np.ndarray): rho values.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with smoothed coordinates (phis, rhos).
"""
interpolated_phi = np.arange(min(phis), max(phis), np.radians(self.params.dphi))
interpolated_rho = np.interp(interpolated_phi, xp=phis, fp=rhos, period=2 * np.pi)
smoothed_rho = self._rolling_median(interpolated_rho, self.kernel_offset)
smoothed_phi = interpolated_phi[self.kernel_offset : -self.kernel_offset]
return smoothed_phi, smoothed_rho
def _sort_two_arrays(self, first_list: np.ndarray, second_list: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Sort both numpy arrays based on values from the first_list.
Args:
first_list (np.ndarray): First array.
second_list (np.ndarray): Second array.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with (sorted first array, sorted second array).
"""
zipped_lists = zip(first_list, second_list)
sorted_pairs = sorted(zipped_lists)
sorted_tuples = zip(*sorted_pairs)
first_list, second_list = [list(sorted_tuple) for sorted_tuple in sorted_tuples]
return np.array(first_list), np.array(second_list)
def _find_start_index(self, phi: np.ndarray) -> int:
"""Find the start index by checking the largest gap. phi needs to be sorted.
Args:
phi (np.ndarray): phi angle values.
Raises:
GeometryRefinementError: Raised if phi values are not sorted ascendingly.
Returns:
int: Index value.
"""
if not np.all((phi - np.roll(phi, 1))[1:] >= 0):
raise GeometryRefinementError("Smoothing._find_start_index phi must be sorted ascendingly!")
phi_tmp = np.concatenate(([phi[-1] - 2 * np.pi], phi, [phi[0] + 2 * np.pi]))
phi_tmp_left_neighbor = np.roll(phi_tmp, 1)
dphi = (phi_tmp - phi_tmp_left_neighbor)[1:-1]
largest_gap_index = np.argmax(dphi)
return int(largest_gap_index)
def _rolling_median(self, signal: np.ndarray, kernel_offset: int) -> np.ndarray:
"""Compute rolling median of a 1D signal.
Args:
signal (np.ndarray): Signal values.
kernel_size (int): Kernel size.
Raises:
GeometryRefinementError: Raised if signal is not 1D.
Returns:
np.ndarray: Rolling median result.
"""
if signal.ndim != 1:
raise GeometryRefinementError("Smoothing._rolling_median only works for 1d arrays.")
stacked_signals: List[np.ndarray] = []
for i in range(-kernel_offset, kernel_offset + 1):
stacked_signals.append(np.roll(signal, i))
stacked_signals = np.stack(stacked_signals)
rolling_median = np.median(stacked_signals, axis=0)
rolling_median = rolling_median[kernel_offset:-kernel_offset]
return rolling_median
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/eye_properties_estimation/pupil_iris_property_calculator.py<fim_prefix>from typing import List
from pydantic import Field
from iris.callbacks.callback_interface import Callback
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons, PupilToIrisProperty
from iris.io.errors import PupilIrisPropertyEstimationError
class PupilIrisPropertyCalculator(Algorithm):
"""Computes pupil-to-iris properties.
Algorithm steps:
(1) Calculate pupil diameter to iris diameter ratio, i.e. pupil dilation.
(2) Calculate the ratio of the pupil center to iris center distance over the iris diameter.
"""
class Parameters(Algorithm.Parameters):
"""PupilIrisPropertyCalculator parameters.
min_pupil_diameter (float): threshold of pupil diameter, below which the pupil is too small. min_pupil_diameter should be higher than 0.
min_iris_diameter (float): threshold of iris diameter, below which the iris is too small. min_iris_diameter should be higher than 0.
"""
min_pupil_diameter: float = Field(..., gt=0.0)
min_iris_diameter: float = Field(..., gt=0.0)
__parameters_type__ = Parameters
def __init__(
self,
min_pupil_diameter: float = 1.0,
min_iris_diameter: float = 150.0,
callbacks: List[Callback] = [],
) -> None:
"""Assign par<fim_suffix>ameters.
Args:
min_pupil_diameter (float): minimum pupil diameter. Defaults to 1.0.
min_iris_diameter (float): minimum iris diameter. Defaults to 150.0.
callbacks (List[Callback]): callbacks list. Defaults to [].
"""
super().__init__(
min_pupil_diameter=min_pupil_diameter,
min_iris_diameter=min_iris_diameter,
callbacks=callbacks,
)
def run(self, geometries: GeometryPolygons, eye_centers: EyeCenters) -> PupilToIrisProperty:
"""Calculate pupil-to-iris property.
Args:
geometries (GeometryPolygons): polygons used for calculating pupil-ro-iris property.
eye_centers (EyeCenters): eye centers used for calculating pupil-ro-iris property.
Raises:
PupilIrisPropertyEstimationError: Raised if 1) the pupil or iris diameter is too small, 2) pupil diameter is larger than or equal to iris diameter, 3) pupil center is outside iris.
Returns:
PupilToIrisProperty: pupil-ro-iris property object.
"""
iris_diameter = geometries.iris_diameter
pupil_diameter = geometries.pupil_diameter
if pupil_diameter < self.params.min_pupil_diameter:
raise PupilIrisPropertyEstimationError("Pupil diameter is too small!")
if iris_diameter < self.params.min_iris_diameter:
raise PupilIrisPropertyEstimationError("Iris diameter is too small!")
if pupil_diameter >= iris_diameter:
raise PupilIrisPropertyEstimationError("Pupil diameter is larger than/equal to Iris diameter!")
if eye_centers.center_distance * 2 >= iris_diameter:
raise PupilIrisPropertyEstimationError("Pupil center is outside iris!")
return PupilToIrisProperty(
pupil_to_iris_diameter_ratio=pupil_diameter / iris_diameter,
pupil_to_iris_center_dist_ratio=eye_centers.center_distance * 2 / iris_diameter,
)
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/vectorization/contouring.py<fim_prefix>from typing import Callable, List
import cv2
import numpy as np
from pydantic import NonNegativeFloat
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import GeometryMask, GeometryPolygons
from iris.io.errors import VectorizationError
from iris.utils.math import area
def filter_polygon_areas(
polygons: List[np.ndarray], rel_tr: NonNegativeFloat = 0.03, abs_tr: NonNegativeFloat = 0.0
) -> List[np.ndarray]:
"""Filter out polygons whose area is below either an absolute threshold or a fraction of the largest area.
Args:
polygons (List[np.ndarray]): List of polygons to filter.
rel_tr (NonNegativeFloat, optional): Relative threshold. Defaults to 0.03.
abs_tr (NonNegativeFloat, optional): Absolute threshold. Defaults to 0.0.
Returns:
List[np.ndarray]: Filtered polygons' list.
"""
areas = [area(polygon) if len(polygon) > 2 else 1.0 for polygon in polygons]
area_factors = np.array(areas) / np.max(areas)
filtered_polygons = [
polygon
for area, area_factor, polygon in zip(areas, area_factors, polygons)
if area > abs_tr and area_factor > rel_tr
]
return filtered_polygons
class ContouringAlgorithm(Algorithm):
"""Implementation of a vectorization process through contouring raster image."""
class Parameters(Algorithm.Parameters):
"""Parameters class of the ContouringAlgorithm class."""
contour_filters: List[Callable[[List[np.ndarray]], List[np.ndarray]]]
__parameters_type__ = Parameters
def __init__(
self,
contour_filters: List[Callable[[List[np.ndarray]], List[np.ndarray]]] = [filter_polygon_areas],
) -> None:
"""Assig<fim_suffix>n parameters.
Args:
contour_filters (List[Callable[[List[np.ndarray]], List[np.ndarray]]], optional): List of filter functions used to filter out noise in polygons.
Defaults to [ContouringAlgorithm.filter_polygon_areas].
"""
super().__init__(contour_filters=contour_filters)
def run(self, geometry_mask: GeometryMask) -> GeometryPolygons:
"""Contouring vectorization algorithm implementation.
Args:
geometry_mask (GeometryMask): Geometry segmentation map.
Raises:
VectorizationError: Raised if iris region not segmented or an error occur during iris region processing.
Returns:
GeometryPolygons: Geometry polygons points.
"""
if not np.any(geometry_mask.iris_mask):
raise VectorizationError("Geometry raster verification failed.")
geometry_contours = self._find_contours(geometry_mask)
return geometry_contours
def _find_contours(self, mask: GeometryMask) -> GeometryPolygons:
"""Find raw contours for different classes in raster.
Args:
mask (GeometryMask): Raster object.
Returns:
GeometryPolygons: Raw contours indicating polygons of different classes.
"""
eyeball_array = self._find_class_contours(mask.filled_eyeball_mask.astype(np.uint8))
iris_array = self._find_class_contours(mask.filled_iris_mask.astype(np.uint8))
pupil_array = self._find_class_contours(mask.pupil_mask.astype(np.uint8))
return GeometryPolygons(pupil_array=pupil_array, iris_array=iris_array, eyeball_array=eyeball_array)
def _find_class_contours(self, binary_mask: np.ndarray) -> np.ndarray:
"""Find contour between two different contours.
Args:
binary_mask (np.ndarray): Raster object.
Raises:
VectorizationError: Raised if number of contours found is different than 1.
Returns:
np.ndarray: Contour points array.
"""
contours, hierarchy = cv2.findContours(binary_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
if hierarchy is None:
raise VectorizationError("_find_class_contours: No contour hierarchy found at all.")
parent_indices = np.flatnonzero(hierarchy[..., 3] == -1)
contours = [np.squeeze(contours[i]) for i in parent_indices]
contours = self._filter_contours(contours)
if len(contours) != 1:
raise VectorizationError("_find_class_contours: Number of contours must be equal to 1.")
return contours[0]
def _filter_contours(self, contours: List[np.ndarray]) -> List[np.ndarray]:
"""Filter contours based on predefined filters.
Args:
contours (List[np.ndarray]): Contours list.
Returns:
List[np.ndarray]: Filtered list of contours.
"""
for filter_func in self.params.contour_filters:
contours = filter_func(contours)
return contours
<fim_middle> | null | BLOCK_COMMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/probe_schemas/regular_probe_schema.py<fim_prefix>from typing import List, Literal, Optional, Tuple, Union
import numpy as np
from pydantic import Field, PositiveInt, confloat, fields, validator
from iris.io.errors import ProbeSchemaError
from iris.nodes.iris_response.probe_schemas.probe_schema_interface import ProbeSchema
class RegularProbeSchema(ProbeSchema):
"""Probe Schema for a regular Grid."""
class RegularProbeSchemaParameters(ProbeSchema.ProbeSchemaParameters):
"""RegularProbeSchema parameters."""
n_rows: int = Field(..., gt=1)
n_cols: int = Field(..., gt=1)
boundary_rho: List[confloat(ge=0.0, lt=1)]
boundary_phi: Union[
Literal["periodic-symmetric", "periodic-left"],
List[confloat(ge=0.0, lt=1)],
]
image_shape: Optional[List[PositiveInt]]
@validator("boundary_rho", "boundary_phi")
def check_overlap(
cls: type,
v: Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]],
field: fields.ModelField,
) -> Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]:
"""Validate offsets to avoid overlap.
Args:
cls (type): Class type.
v (Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ProbeSchemaError: Raises warning that offsets are together too large.
Returns:
Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]: The value for boundary_rho or boundary_phi respectively
"""
if isinstance(v, List):
if (v[0] + v[1]) >= 1:
raise ProbeSchemaError(
f"Offset for {field.name} on left and right corner must be a sum smaller 1, otherwise, offsets overlap."
)
return v
__parameters_type__ = RegularProbeSchemaParameters
def __init__(
self,
n_rows: int,
n_cols: int,
boundary_rho: List[float] = [0, 0.0625],
boundary_phi: Union[
Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]
] = "periodic-left",
image_shape: Optional[List[PositiveInt]] = None,
) -> None:
"""Assign parameters.
Args:
n_rows (int): Number of rows used, represents the number of different rho
values
n_cols (int): Number of columns used, represents the number of different
phi values
boundary_rho (List[float], optional): List with two values f1 and f2. The sampling goes from 0+f1 to 0-f2.
boundary_phi (Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]], optional): Boundary conditions for the probing
can either be periodic or non-periodic, if they are periodic, the distance
from one column to the next must be the same also for the boundaries.
Else, no conditions for the boundaries are required. Options are:
- 'periodic-symmetric': the first and the last column are placed with an offset to the
borders, that is half of the spacing of the two columns
- 'periodic-left': the first column is at the border of the bottom of the image, while
the last column is one spacing apart from the top of the image
- list with two values: in this case the an offset of value f1 and f2 is set on both ends, i.e. the
the sampling no longer goes from 0 to 1 ('no-offset') but instead from 0+f1 to 0-f2
Defaults to "periodic_symmetric".
image_shape (list, optional): list containing the desired image dimensions. If provided, the function will throw
a warning if interpolation happens, i.e. if a kernel would be placed in between two pixels. Defaults to None.
"""
super().__init__(
n_rows=n_rows,
n_cols=n_cols,
boundary_rho=boundary_rho,
boundary_phi=boundary_phi,
image_shape=image_shape,
)
def generate_schema(self) -> Tuple[np.ndarray, np.ndarray]:
"""Generate rhos and phis.
Return:
Tuple[np.ndarray, np.ndarray]: the rhos and phis.
"""
rho = np.linspace(
0 + self.params.boundary_rho[0], 1 - self.params.boundary_rho[1], self.params.n_rows, endpoint=True
)
if self.params.boundary_phi == "periodic-symmetric":
phi = np.linspace(0, 1, self.params.n_cols, endpoint=False)
phi = phi + (phi[1] - phi[0]) / 2
if self.params.boundary_phi == "periodic-left":
phi = np.linspace(0, 1, self.params.n_cols, endpoint=False)
if isinstance(self.params.boundary_phi, List):
phi = np.linspace(
0 + self.params.boundary_phi[0], 1 - self.params.boundary_phi[1], self.params.n_cols, endpoint=True
)
phis, rhos = np.meshgrid(phi, rho)
rhos = rhos.flatten()
ph<fim_suffix>is = phis.flatten()
# if image_shape provided: verify that values lie on pixel values
if self.params.image_shape is not None:
rhos_pixel_values = rhos * self.params.image_shape[0]
phis_pixel_values = phis * self.params.image_shape[1]
rho_pixel_values = np.logical_or(
np.less_equal(rhos_pixel_values % 1, 10 ** (-10)),
np.less_equal(1 - 10 ** (-10), rhos_pixel_values % 1),
).all()
phi_pixel_values = np.logical_or(
np.less_equal(phis_pixel_values % 1, 10 ** (-10)),
np.less_equal(1 - 10 ** (-10), phis_pixel_values % 1),
).all()
if not rho_pixel_values:
raise ProbeSchemaError(
f"Choice for n_rows {self.params.n_rows} leads to interpolation errors, please change input variables"
)
if not phi_pixel_values:
raise ProbeSchemaError(f"Choice for n_cols {self.params.n_cols} leads to interpolation errors")
return rhos, phis
@staticmethod
def find_suitable_n_rows(
row_min: int,
row_max: int,
length: int,
boundary_condition: Union[
Literal["periodic-symmetric", "periodic-left"],
List[float],
] = "periodic_symmetric",
) -> List[int]:
"""Find proper spacing of rows/columns for given boundary conditions (i.e. image size, offset. etc).
Args:
row_min (int): Starting value for row count
row_max (int): End value for row count
length (int): Pixels in the respective dimension
boundary_condition (Union[Literal["periodic-symmetric", "periodic-left"], List[float]], optional): Boundary conditions for the probing
can either be periodic or non-periodic, if they are periodic, the distance
from one row to the next must be the same also for the boundaries.
Else, no conditions for the boundaries are required. Options are:
- 'periodic-symmetric': the first and the last row are placed with an offset to the
borders, that is half of the spacing of the two rows
- 'periodic-left': the first row is at the border of the bottom of the image, while
the last row is one spacing apart from the top of the image
- list with two values: in this case the an offset of value f1 and f2 is set on both ends, i.e. the
the sampling no longer goes from 0 to 1 ('no-offset') but instead from 0+f1 to 0-f2
Defaults to "periodic_symmetric".
Returns:
list: List of all number of rows that does not lead to interpolation errors
"""
suitable_values: List[int] = []
# loop through all values and validate whether they are suitable
for counter in range(row_min, row_max + 1):
if boundary_condition == "periodic-symmetric":
values = np.linspace(0, 1, counter, endpoint=False)
values = values + (values[1] - values[0]) / 2
if boundary_condition == "periodic-left":
values = np.linspace(0, 1, counter, endpoint=False)
if isinstance(boundary_condition, List):
values = np.linspace(0 + boundary_condition[0], 1 - boundary_condition[1], counter, endpoint=True)
pixel_values = values * length
pixel_values_modulo = pixel_values % 1
no_interpolation = np.less_equal(pixel_values_modulo, 10 ** (-10))
no_interpolation = np.logical_or(no_interpolation, np.less_equal(1 - 10 ** (-10), pixel_values_modulo))
no_interpolation = no_interpolation.all()
if no_interpolation:
suitable_values.append(counter)
return suitable_values
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/eye_properties_estimation/occlusion_calculator.py<fim_prefix>from typing import List, Tuple
import numpy as np
from pydantic import Field
from iris.callbacks.callback_interface import Callback
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, EyeOcclusion, EyeOrientation, GeometryPolygons, NoiseMask
from iris.utils import common, math
class OcclusionCalculator(Algorithm):
"""Calculate the eye occlusion value.
This algorithm computes the fraction of visible iris in an image based on extrapolated polygons and the various noise masks.
For an occlusion of 0, the iris is completely occluded. For an occlusion of 1, the iris is completely visible
For historical reasons, this remained called "Occlusion", while it more precisely refers to the "Opening" of the eye.
The parameter `quantile_angle` refers to the zone of the iris to consider for the occlusion computation.
This is because the middle horizontal third of the iris is usually more useful, since less likely to be occluded by the eyelids.
For a `quantile_angle` of 90º, the entire iris will be considered.
For a `quantile_angle` of 30º, the horizontal middle third of the iris will be considered.
For a `quantile_angle` of 0º, nothing will be considered (limit value).
"""
class Parameters(Algorithm.Parameters):
"""Default OcclusionCalculator parameters."""
quantile_angle: float = Field(..., ge=0.0, le=90.0)
__parameters_type__ = Parameters
def __init__(self, quantile_angle: float, callbacks: List[Callback] = []) -> None:
"""Assign parameters.
Args:
quantile_angle (float): Quantile angle for estimating the area in which we want to calculate the visible fraction value in degrees.
callbacks (List[Callback]): callbacks list. Defaults to [].
"""
super().__init__(quantile_angle=quantile_angle, callbacks=callbacks)
def run(
self,
extrapolated_polygons: GeometryPolygons,
noise_mask: NoiseMask,
eye_orientation: EyeOrientation,
eye_centers: EyeCenters,
) -> EyeOcclusion:
"""Compute the iris visible fraction.
Args:
extrapolated_polygons (GeometryPolygons): Extrapolated polygons contours.
noise_mask (NoiseMask): Noise mask.
eye_orientation (EyeOrientation): Eye orientation angle.
eye_centers (EyeCenters): Eye centers.
Returns:
EyeOcclusion: Visible iris fraction.
"""
if self.params.quantile_angle == 0.0:
return EyeOcclusion(visible_fraction=0.0)
xs2mask, ys2mask = self._get_quantile_points(extrapolated_polygons.iris_array, eye_orientation, eye_centers)
img_h, img_w = noise_mask.mask.shape
iris_mask_quantile = common.contour_to_mask(np.column_stack([xs2mask, ys2mask]), mask_shape=(img_w, img_h))
pupil_mask = common.contour_to_mask(extrapolated_polygons.pupil_array, mask_shape=(img_<fim_suffix>w, img_h))
eyeball_mask = common.contour_to_mask(extrapolated_polygons.eyeball_array, mask_shape=(img_w, img_h))
visible_iris_mask = iris_mask_quantile & ~pupil_mask & eyeball_mask & ~noise_mask.mask
extrapolated_iris_mask = iris_mask_quantile & ~pupil_mask
if extrapolated_iris_mask.sum() == 0:
return EyeOcclusion(visible_fraction=0.0)
visible_fraction = visible_iris_mask.sum() / extrapolated_iris_mask.sum()
return EyeOcclusion(visible_fraction=visible_fraction)
def _get_quantile_points(
self, iris_coords: np.ndarray, eye_orientation: EyeOrientation, eye_centers: EyeCenters
) -> Tuple[np.ndarray, np.ndarray]:
"""Get those iris's points which fall into a specified quantile.
Args:
iris_coords (np.ndarray): Iris polygon coordinates.
eye_orientation: (EyeOrientation): Eye orientation.
eye_centers: (EyeCenters): Eye centers.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with xs and ys that falls into quantile region.
"""
orientation_angle = np.degrees(eye_orientation.angle)
num_rotations = -round(orientation_angle * len(iris_coords) / 360.0)
iris_xs, iris_ys = iris_coords[:, 0], iris_coords[:, 1]
iris_rhos, iris_phis = math.cartesian2polar(iris_xs, iris_ys, eye_centers.iris_x, eye_centers.iris_y)
iris_phis = np.roll(iris_phis, num_rotations, axis=0)
iris_rhos = np.roll(iris_rhos, num_rotations, axis=0)
scaled_quantile = round(self.params.quantile_angle * len(iris_coords) / 360.0)
phis2mask = np.concatenate(
[
iris_phis[:scaled_quantile],
iris_phis[-scaled_quantile:],
iris_phis[len(iris_phis) // 2 : len(iris_phis) // 2 + scaled_quantile],
iris_phis[len(iris_phis) // 2 - scaled_quantile : len(iris_phis) // 2],
]
)
rhos2mask = np.concatenate(
[
iris_rhos[:scaled_quantile],
iris_rhos[-scaled_quantile:],
iris_rhos[len(iris_rhos) // 2 : len(iris_rhos) // 2 + scaled_quantile],
iris_rhos[len(iris_rhos) // 2 - scaled_quantile : len(iris_rhos) // 2],
]
)
phis2mask, rhos2mask = zip(*sorted(zip(phis2mask, rhos2mask)))
xs2mask, ys2mask = math.polar2cartesian(rhos2mask, phis2mask, eye_centers.iris_x, eye_centers.iris_y)
return xs2mask, ys2mask
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/nonlinear_normalization.py<fim_prefix>from typing import Collection, Tuple
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeOrientation, GeometryPolygons, IRImage, NoiseMask, NormalizedIris
from iris.io.errors import NormalizationError
from iris.nodes.normalization.common import (
correct_orientation,
generate_iris_mask,
getgrids,
interpolate_pixel_intensity,
)
from iris.utils import math
class NonlinearNormalization(Algorithm):
"""Implementation of a normalization algorithm which uses nonlinear squared transformation to map image pixels.
Algorithm steps:
1) Create nonlinear grids of sampling radii based on parameters: res_in_r, intermediate_radiuses.
2) Compute the mapping between the normalized image pixel location and the original image location.
3) Obtain pixel values of normalized image using bilinear interpolation.
"""
class Parameters(Algorithm.Parameters):
"""Parameters class for NonlinearNormalization."""
res_in_r: int = Field(..., gt=0)
intermediate_radiuses: Collection[float]
oversat_threshold: int = Field(..., gt=0)
__parameters_type__ = Parameters
def __init__(self, res_in_r: int = 128, oversat_threshold: int = 254) -> None:
"""Assign parameters.
Args:
res_in_r (int): Normalized image r resolution. Defaults to 128.
oversat_threshold (int, optional): threshold for masking over-satuated pixels. Defaults to 254.
"""
intermediate_radiuses = np.array([getgrids(max(0, res_in_r), p2i_ratio) for p2i_ratio in range(100)])
super().__init__(
res_in_r=res_in_r,
intermediate_radiuses=intermediate_radiuses,
oversat_threshold=oversat_threshold,
)
def run(
self,
image: IRImage,
noise_mask: NoiseMask,
extrapolated_contours: GeometryPolygons,
eye_orientation: EyeOrientation,
) -> NormalizedIris:
"""Normalize iris using nonlinear transformation when sampling points from cartisian to polar coordinates.
Args:
image (IRImage): Input image to normalize.
noise_mask (NoiseMask): Noise mask.
extrapolated_contours (GeometryPolygons): Extrapolated contours.
eye_orientation (EyeOrientation): Eye orientation angle.
Returns:
NormalizedIris: NormalizedIris object containing normalized image and iris mask.
"""
if len(extrapolated_contours.pupil_array) != len(extrapolated_contours.iris_array):
raise NormalizationError("Extrapolated amount of iris and pupil points must be the same.")
pupil_points, iris_points = correct_orientation(
extrapolated_contours.pupil_array,
extrapolated_contours.iris_array,
eye_orientation.angle,
)
iris_mask = generate_iris_mask(extrapolated_contours, noise_mask.mask)
iris_mask[image.img_data >= self.params.oversat_threshold] = False
src_points = self._generate_correspondences(pupil_points, iris_points)
normalized_image, normalized_mask = self._normalize_all(
original_image=image.img_data, iris_mask=iris_mask, src_points=src_points
)
normalized_iris = NormalizedIris(
normalized_image=normalized_image,
normalized_mask=normalized_mask,
)
return normalized_iris
def _generate_correspondences(self, pupil_points: np.ndarray, iris_points: np.ndarray) -> np.ndarray:
"""Generate corresponding positions in original image.
Args:
pupil_points (np.ndarray): Pupil bounding points. NumPy array of shape (num_points x 2).
iris_points (np.ndarray): Iris bounding points. NumPy array of shape (num_points x 2).
Returns:
np.ndarray: generated corresponding points.
"""
pupil_diameter = math.estimate_diameter(pupil_points)
iris_diameter = math.estimate_diameter(iris_points)
p2i_ratio = pupil_diameter / iris_diameter
if p2i_ratio <= 0 or p2i_ratio >= 1:
raise NormalizationError(f"Invalid pupil to iris ratio, not in the range (0,1): {p2i_ratio}.")
src_points = np.array(
[
pupil_points + x * (iris_points - pupil_points)
for x in self.params.intermediate_radiuses[round(100 * (p2i_ratio))]
]
)
return src_points
def _normalize_all(
self,
original_image: np.ndarray,
iris_mask: np.ndarray,
src_points: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray]:
"""Normalize all points of an image using bilinear.
Args:
original_image (np.ndarray): Entire input image to normalize.
iris_mask (np.ndarray): Iris class segmentation mask.
src_points (np.ndarray): original input image points.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with normalized image and mask.
"""
s<fim_suffix>rc_shape = src_points.shape[0:2]
src_points = np.vstack(src_points)
normalized_image = np.array(
[interpolate_pixel_intensity(original_image, pixel_coords=image_xy) for image_xy in src_points]
)
normalized_image = np.reshape(normalized_image, src_shape)
normalized_mask = np.array(
[self.get_pixel_binary(iris_mask, image_xy[0], image_xy[1]) for image_xy in src_points]
)
normalized_mask = np.reshape(normalized_mask, src_shape)
return normalized_image / 255.0, normalized_mask
@staticmethod
def get_pixel_binary(image: np.ndarray, pixel_x: float, pixel_y: float) -> bool:
"""Get the boolean value of a pixel from a binary image.
Args:
image (np.ndarray): Binary image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
bool: Pixel value.
"""
try:
return image[int(pixel_y), int(pixel_x)]
except IndexError:
return False
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/utils/common.py<fim_prefix>from typing import Tuple
import cv2
import numpy as np
def contour_to_mask(vertices: np.ndarray, mask_shape: Tuple[int, int]) -> np.ndarray:
"""Generate binary mask based on polygon's vertices.
Args:
vertices (np.ndarray): Vertices points array.
mask_shape (Tuple[int, int]): Tuple with output mask dimension (weight, height).
Returns:
np.ndarray: Binary mask.
"""
width<fim_suffix>, height = mask_shape
mask = np.zeros(shape=(height, width, 3))
vertices = np.round(vertices).astype(np.int32)
cv2.fillPoly(mask, pts=[vertices], color=(255, 0, 0))
mask = mask[..., 0]
mask = mask.astype(bool)
return mask
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/eye_properties_estimation/bisectors_method.py<fim_prefix>from typing import Tuple
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons
from iris.io.errors import EyeCentersEstimationError
class BisectorsMethod(Algorithm):
"""Implementation of eye's center estimation algorithm using bisectors method for finding a circle center.
This algorithm samples a given number of bisectors from the pupil and iris polygons, and averages their intersection
to produce the polygon center. This method is robust against noise in the polygons, making it a good choice for
non-perfect shapes. It is also robust to polygons missing parts of the circle arc, making it a good choice for
partially-occluded shapes.
LIMITATIONS:
The iris and pupil can be approximated to circles, when the user is properly gazing at the camera.
This requires that the cases of off-gaze have already been filtered out.
"""
class Parameters(Algorithm.Parameters):
"""Default Parameters for BisectorsMethod algorithm."""
num_bisectors: int = Field(..., gt=0)
min_distance_between_sector_points: float = Field(..., gt=0.0, lt=1.0)
max_iterations: int = Field(..., gt=0)
__parameters_type__ = Parameters
def __init__(
self,
num_bisectors: int = 100,
min_distance_between_sector_points: float = 0.75,
max_iterations: int = 50,
) -> None:
"""Assign parameters.
Args:
num_bisectors (int, optional): Number of bisectors.. Defaults to 100.
min_distance_between_sector_points (float, optional): Minimum distance between sectors expressed as a fractional value of a circular shape diameter. Defaults to 0.75.
max_iterations (int, optional): Max iterations for bisector search.. Defaults to 50.
"""
super().__init__(
num_bisectors=num_bisectors,
min_distance_between_sector_points=min_distance_between_sector_points,
max_iterations=max_iterations,
)
def run(self, geometries: GeometryPolygons) -> EyeCenters:
"""Estimate eye's iris and pupil centers.
Args:
geometries (GeometryPolygons): Geometry polygons.
Returns:
EyeCenters: Eye's centers object.
"""
pupil_center_x, pupil_center_y = self._find_center_coords(geometries.pupil_array, geometries.pupil_diameter)
iris_center_x, iris_center_y = self._find_center_coords(geometries.iris_array, geometries.iris_diameter)
return EyeCenters(pupil_x=pupil_center_x, pupil_y=pupil_center_y, iris_x=iris_center_x, iris_y=iris_center_y)
def _find_center_coords(self, polygon: np.ndarray, diameter: float) -> Tuple[float, float]:
"""Find center coordinates of a polygon.
Args:
polygon (np.ndarray): np.ndarray.
diameter (float): diameter of the polygon.
Returns:
Tuple[float, float]: Tuple with the center location coordinates (x, y).
"""
min_distance_between_sector_points_in_px = self.params.min_distance_between_sector_points * diameter
first_bisectors_point, second_bisectors_point = self._calculate_perpendicular_bisectors(
polygon, min_distance_between_sector_points_in_px
)
return self._find_best_intersection(first_bisectors_point, second_bisectors_point)
def _calculate_perpendicular_bisectors(
self, polygon: np.ndarray, min_distance_between_sector_points_in_px: float
) -> Tuple[np.ndarray, np.ndarray]:
"""Calculate the perpendicular bisector of self.params.num_bisectors randomly chosen points from a polygon's vertices.
A pair of points is used if their distance is larger then min_distance_between_sector_points_in_px.
Args:
polygon (np.ndarray): np.ndarray based on which we are searching the center of a circular shape.
min_distance_between_sector_points_in_px (float): Minimum distance between sector points.
Raises:
EyeCentersEstimationError: Raised if not able to find enough random pairs of points on the arc with a large enough distance!
Returns:
Tuple[np.ndarray, np.ndarray]: Calculated perpendicular bisectors.
"""
np.random.seed(142857)
bisectors_first_points = np.empty([0, 2])
bisectors_second_points = np.empty([0, 2])
for _ in range(self.params.max_iterations):
random_indices = np.random.choice(len(polygon), size=(self.params.num_bisectors, 2))
first_drawn_points = polygon[random_indices[:, 0]]
second_drawn_points = polygon[random_indices[:, 1]]
norms = np.linalg.norm(first_drawn_points - second_drawn_points, axis=1)
mask = norms > min_dist<fim_suffix>ance_between_sector_points_in_px
bisectors_first_points = np.vstack([bisectors_first_points, first_drawn_points[mask]])
bisectors_second_points = np.vstack([bisectors_second_points, second_drawn_points[mask]])
if len(bisectors_first_points) >= self.params.num_bisectors:
break
else:
raise EyeCentersEstimationError(
"Not able to find enough random pairs of points on the arc with a large enough distance!"
)
bisectors_first_points = bisectors_first_points[: self.params.num_bisectors]
bisectors_second_points = bisectors_second_points[: self.params.num_bisectors]
bisectors_center = (bisectors_first_points + bisectors_second_points) / 2
# Flip xs with ys and flip sign of on of them to create a 90deg rotation
inv_bisectors_center_slope = np.fliplr(bisectors_second_points - bisectors_first_points)
inv_bisectors_center_slope[:, 1] = -inv_bisectors_center_slope[:, 1]
# Add perpendicular vector to center and normalize
norm = np.linalg.norm(inv_bisectors_center_slope, axis=1)
inv_bisectors_center_slope[:, 0] /= norm
inv_bisectors_center_slope[:, 1] /= norm
first_bisectors_point = bisectors_center - inv_bisectors_center_slope
second_bisectors_point = bisectors_center + inv_bisectors_center_slope
return first_bisectors_point, second_bisectors_point
def _find_best_intersection(self, fst_points: np.ndarray, sec_points: np.ndarray) -> Tuple[float, float]:
"""fst_points and sec_points are NxD arrays defining N lines. D is the dimension of the space.
This function returns the least squares intersection of the N lines from the system given by eq. 13 in
http://cal.cs.illinois.edu/~johannes/research/LS_line_intersecpdf.
Args:
fst_points (np.ndarray): First bisectors points.
sec_points (np.ndarray): Second bisectors points.
Returns:
Tuple[float, float]: Best intersection point.
Reference:
[1] http://cal.cs.illinois.edu/~johannes/research/LS_line_intersecpdf
"""
norm_bisectors = (sec_points - fst_points) / np.linalg.norm(sec_points - fst_points, axis=1)[:, np.newaxis]
# Generate the array of all projectors I - n*n.T
projections = np.eye(norm_bisectors.shape[1]) - norm_bisectors[:, :, np.newaxis] * norm_bisectors[:, np.newaxis]
# Generate R matrix and q vector
R = projections.sum(axis=0)
q = (projections @ fst_points[:, :, np.newaxis]).sum(axis=0)
# Solve the least squares problem for the intersection point p: Rp = q
p = np.linalg.lstsq(R, q, rcond=None)[0]
intersection_x, intersection_y = p
return intersection_x.item(), intersection_y.item()
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/probe_schemas/regular_probe_schema.py<fim_prefix>from typing import List, Literal, Optional, Tuple, Union
import numpy as np
from pydantic import Field, PositiveInt, confloat, fields, validator
from iris.io.errors import ProbeSchemaError
from iris.nodes.iris_response.probe_schemas.probe_schema_interface import ProbeSchema
class RegularProbeSchema(ProbeSchema):
"""Probe Schema for a regular Grid."""
class RegularProbeSchemaParameters(ProbeSchema.ProbeSchemaParameters):
"""RegularProbeSchema parameters."""
n_rows: int = Field(..., gt=1)
n_cols: int = Field(..., gt=1)
boundary_rho: List[confloat(ge=0.0, lt=1)]
boundary_phi: Union[
Literal["periodic-symmetric", "periodic-left"],
List[confloat(ge=0.0, lt=1)],
]
image_shape: Optional[List[PositiveInt]]
@validator("boundary_rho", "boundary_phi")
def check_overlap(
cls: type,
v: Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]],
field: fields.ModelField,
) -> Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]:
"""Validate offsets to avoid overlap.
Args:
cls (type): Class type.
v (Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ProbeSchemaError: Raises warning that offsets are together too large.
Returns:
Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]: The value for boundary_rho or boundary_phi respectively
"""
if isinstance(v, List):
if (v[0] + v[1]) >= 1:
raise ProbeSchemaError(
f"Offset for {field.name} on left and right corner must be a sum smaller 1, otherwise, offsets overlap."
)
return v
__parameters_type__ = RegularProbeSchemaParameters
def __init__(
self,
n_rows: int,
n_cols: int,
boundary_rho: List[float] = [0, 0.0625],
boundary_phi: Union[
Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]
] = "periodic-left",
image_shape: Optional[List[PositiveInt]] = None,
) -> None:
"""Assign parameters.
Args:
n_rows (int): Number of rows used, represents the number of different rho
values
n_cols (int): Number of columns used, represents the number of different
phi values
boundary_rho (List[float], optional): List with two values f1 and f2. The sampling goes from 0+f1 to 0-f2.
boundary_phi (Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]], optional): Boundary conditions for the probing
can either be periodic or non-periodic, if they are periodic, the distance
from one column to the next must be the same also for the boundaries.
Else, no conditions for the boundaries are required. Options are:
- 'periodic-symmetric': the first and the last column are placed with an offset to the
borders, that is half of the spacing of the two columns
- 'periodic-left': the first column is at the border of the bottom of the image, while
the last column is one spacing apart from the top of the image
- list with two values: in this case the an offset of value f1 and f2 is set on both ends, i.e. the
the sampling no longer goes from 0 to 1 ('no-offset') but instead from 0+f1 to 0-f2
Defaults to "periodic_symmetric".
image_shape (list, optional): list containing the desired image dimensions. If provided, the function will throw
a warning if interpolation happens, i.e. if a kernel would be placed in between two pixels. Defaults to None.
"""
super().__init__(
n_rows=n_rows,
n_cols=n_cols,
boundary_rho=boundary_rho,
boundary_phi=boundary_phi,
image_shape=image_shape,
)
def generate_schema(self) -> Tuple[np.ndarray, np.ndarray]:
"""Generate rhos and phis.
Return:
Tuple[np.ndarray, np.ndarray]: the rhos and phis.
"""
rho = np.linspace(
0 + self.params.boundary_rho[0], 1 - self.params.boundary_rho[1], self.params.n_rows, endpoint=True
)
if self.params.boundary_phi == "periodic-symmetric":
phi = np.linspace(0, 1, se<fim_suffix>lf.params.n_cols, endpoint=False)
phi = phi + (phi[1] - phi[0]) / 2
if self.params.boundary_phi == "periodic-left":
phi = np.linspace(0, 1, self.params.n_cols, endpoint=False)
if isinstance(self.params.boundary_phi, List):
phi = np.linspace(
0 + self.params.boundary_phi[0], 1 - self.params.boundary_phi[1], self.params.n_cols, endpoint=True
)
phis, rhos = np.meshgrid(phi, rho)
rhos = rhos.flatten()
phis = phis.flatten()
# if image_shape provided: verify that values lie on pixel values
if self.params.image_shape is not None:
rhos_pixel_values = rhos * self.params.image_shape[0]
phis_pixel_values = phis * self.params.image_shape[1]
rho_pixel_values = np.logical_or(
np.less_equal(rhos_pixel_values % 1, 10 ** (-10)),
np.less_equal(1 - 10 ** (-10), rhos_pixel_values % 1),
).all()
phi_pixel_values = np.logical_or(
np.less_equal(phis_pixel_values % 1, 10 ** (-10)),
np.less_equal(1 - 10 ** (-10), phis_pixel_values % 1),
).all()
if not rho_pixel_values:
raise ProbeSchemaError(
f"Choice for n_rows {self.params.n_rows} leads to interpolation errors, please change input variables"
)
if not phi_pixel_values:
raise ProbeSchemaError(f"Choice for n_cols {self.params.n_cols} leads to interpolation errors")
return rhos, phis
@staticmethod
def find_suitable_n_rows(
row_min: int,
row_max: int,
length: int,
boundary_condition: Union[
Literal["periodic-symmetric", "periodic-left"],
List[float],
] = "periodic_symmetric",
) -> List[int]:
"""Find proper spacing of rows/columns for given boundary conditions (i.e. image size, offset. etc).
Args:
row_min (int): Starting value for row count
row_max (int): End value for row count
length (int): Pixels in the respective dimension
boundary_condition (Union[Literal["periodic-symmetric", "periodic-left"], List[float]], optional): Boundary conditions for the probing
can either be periodic or non-periodic, if they are periodic, the distance
from one row to the next must be the same also for the boundaries.
Else, no conditions for the boundaries are required. Options are:
- 'periodic-symmetric': the first and the last row are placed with an offset to the
borders, that is half of the spacing of the two rows
- 'periodic-left': the first row is at the border of the bottom of the image, while
the last row is one spacing apart from the top of the image
- list with two values: in this case the an offset of value f1 and f2 is set on both ends, i.e. the
the sampling no longer goes from 0 to 1 ('no-offset') but instead from 0+f1 to 0-f2
Defaults to "periodic_symmetric".
Returns:
list: List of all number of rows that does not lead to interpolation errors
"""
suitable_values: List[int] = []
# loop through all values and validate whether they are suitable
for counter in range(row_min, row_max + 1):
if boundary_condition == "periodic-symmetric":
values = np.linspace(0, 1, counter, endpoint=False)
values = values + (values[1] - values[0]) / 2
if boundary_condition == "periodic-left":
values = np.linspace(0, 1, counter, endpoint=False)
if isinstance(boundary_condition, List):
values = np.linspace(0 + boundary_condition[0], 1 - boundary_condition[1], counter, endpoint=True)
pixel_values = values * length
pixel_values_modulo = pixel_values % 1
no_interpolation = np.less_equal(pixel_values_modulo, 10 ** (-10))
no_interpolation = np.logical_or(no_interpolation, np.less_equal(1 - 10 ** (-10), pixel_values_modulo))
no_interpolation = no_interpolation.all()
if no_interpolation:
suitable_values.append(counter)
return suitable_values
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/common.py<fim_prefix>from typing import Tuple
import numpy as np
from pydantic import NonNegativeInt
from iris.io.dataclasses import GeometryPolygons
from iris.utils import common
def generate_iris_mask(extrapolated_contours: GeometryPolygons, noise_mask: np.ndarray) -> np.ndarray:
"""Generate iris mask by first finding the intersection region between extrapolated iris contours and eyeball contours. Then remove from the outputted mask those pixels for which noise_mask is equal to True.
Args:
extrapolated_contours (GeometryPolygons): Iris polygon vertices.
noise_mask (np.ndarray): Noise mask.
Returns:
np.ndarray: Iris mask.
"""
img_h, img_w = noise_mask.shape[:2]
iris_mask = common.contour_to_mask(extrapolated_contours.iris_array, (img_w, img_h))
eyeball_mask = common.contour_to_mask(extrapolated_contours.eyeball_array, (img_w, img_h))
iris_mask = iris_mask & eyeball_mask
iris_mask = ~(iris_mask & noise_mask) & iris_mask
return iris_mask
def correct_orientation(
pupil_points: np.ndarray, iris_points: np.ndarray, eye_orientation: float
) -> Tuple[np.ndarray, np.ndarray]:
"""Correct orientation by changing the starting angle in pupil and iris points' arrays.
Args:
pupil_points (np.ndarray): Pupil boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
iris_points (np.ndarray): Iris boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
eye_orientation (float): Eye orientation angle in radians.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with rotated based on eye_orientation angle boundary points (pupil_points, iris_points).
"""
orientation_angle = np.degrees(eye_orientation)
num_rotations = -round(orientation_angle * len(pupil_points) / 360.0)
pupil_points = np.roll(pupil_points, num_rotations, axis=0)
iris_points = np.roll(iris_points, num_rotations, axis=0)
return pupil_points, iris_points
def getgrids(res_in_r: NonNegativeInt, p2i_ratio: NonNegativeInt) -> np.ndarray:
"""Generate radius grids for nonlinear normalization based on p2i_ratio (pupil_to_iris ratio).
Args:
res_in_r (NonNegativeInt): Normalized image r resolution.
p2i_ratio (NonNegativeInt): pupil_to_iris ratio, range in [0,100]
Returns:
np.ndarray: nonlinear sampling grids for normalization
"""
p = [np.square(x) for x in np.arange(28, max(74 - p2i_ratio, p2i_ratio - 14), 1)]
q = p - p[0]
q = q / q[-1]
grids = np.interp(np.linspace(0, 1.0, res_in_r + 1), np.linspace(0, 1.0, len(q)), q)
return grids[0:-1] + np.diff(grids) / 2
def interpolate_pixel_intensity(image: np.ndarray, pixel_coords: Tuple[float, float]) -> float:
"""Perform bilinear interpolation to estimate pixel intensity in a given location.
Args:
image (np.ndarray): Original, not normalized image.
pixel_coords (Tuple[float, float]): Pixel coordinates.
Returns:
float: Interpolated pixel intensity.
Reference:
[1] https://en.wikipedia.org/wiki/Bilinear_interpolation
"""
def get_pixel_intensity(image: np.ndarray, pixel_x: float, pixel_y: float) -> float:
"""Get the intensity value of a pixel from an intensity image.
Args:
image (np.ndarray): Intensity image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
float: Pixel value.
"""
try:
return image[int(pixel_y), int(pixel_x)]
except IndexError:
return 0.0
def get_interpolation_points_coords(
image: np.ndarray, pixel_x: float, pixel_y: float
) -> Tuple[float, float, float, float]:
"""Extract interpolation points coordinates.
Args:
image (np.ndarray): Original, not normalized image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
Tuple[float, float, float, float]: Tuple with interpolation points coordinates in a format (xmin, ymin, xmax, ymax).
"""
xmin, ymin = np.floor(pixel_x), np.floor(pixel_y)
xmax, ymax = np.ceil(pixel_x), np.ceil(pixel_y)
img_h, img_w = image.shape[:2]
if xmin == xmax and not xmax == img_w - 1:
xmax += 1
if xmin == xmax and xmax == img_w - 1:
xmin -= 1
if ymin == ymax and not ymax == img_h - 1:
ymax += 1
if ymin == ymax and ymax == img_h - 1:
ymin -= 1
return xmin, ymin, xmax, <fim_suffix>ymax
pixel_x, pixel_y = pixel_coords
xmin, ymin, xmax, ymax = get_interpolation_points_coords(image, pixel_x=pixel_x, pixel_y=pixel_y)
lower_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymax)
lower_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymax)
upper_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymin)
upper_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymin)
xs_differences = np.array([xmax - pixel_x, pixel_x - xmin])
neighboring_pixel_intensities = np.array(
[
[lower_left_pixel_intensity, upper_left_pixel_intensity],
[lower_right_pixel_intensity, upper_right_pixel_intensity],
]
)
ys_differences = np.array([[pixel_y - ymin], [ymax - pixel_y]])
pixel_intensity = np.matmul(np.matmul(xs_differences, neighboring_pixel_intensities), ys_differences)
return pixel_intensity.item()
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/image_filters/gabor_filters.py<fim_prefix>from typing import Any, Dict, Tuple
import numpy as np
from pydantic import Field, conint, root_validator, validator
import iris.io.validators as pydantic_v
from iris.io.errors import ImageFilterError
from iris.nodes.iris_response.image_filters.image_filter_interface import ImageFilter
def upper_bound_Gabor_parameters(cls: type, values: Dict[str, Any]) -> Dict[str, Any]:
"""Check upper bounds of Gabor filter parameters such as sigma_phi, sigma_rho and lambda_phi for the given kernel_size.
Args:
cls (type): class type.
values (Dict[str, Any]): values to be checked.
Raises:
ImageFilterError: Raised if 1) sigma_phi is greater than kernel_size[0], 2) sigma_rho is greater than kernel_size[1], 3) lambda_phi greater than kernel_size[0].
Returns:
Dict[str, Any]: values of checked parameters.
"""
kernel_size, sigma_phi, sigma_rho, lambda_phi = (
values["kernel_size"],
values["sigma_phi"],
values["sigma_rho"],
values["lambda_phi"],
)
if sigma_phi >= kernel_size[0]:
raise ImageFilterError("Invalid parameters: sigma_phi can not be greater than kernel_size[0].")
if sigma_rho >= kernel_size[1]:
raise ImageFilterError("Invalid parameters: sigma_rho can not be greater than kernel_size[1].")
if lambda_phi >= kernel_size[0]:
raise ImageFilterError("Invalid parameters: lambda_phi can not be greater than kernel_size[0].")
return values
def upper_bound_LogGabor_parameters(cls: type, values: Dict[str, Any]) -> Dict[str, Any]:
"""Check upper bound of LogGabor filter parameter lambda_rho for the given kernel_size.
Args:
cls (type): class type.
values (Dict[str, Any]): values to be checked.
Raises:
ImageFilterError: lambda_phi can not be greater than kernel_size[1].
Returns:
Dict[str, Any]: values of checked parameters.
"""
kernel_size, lambda_rho = values["kernel_size"], values["lambda_rho"]
if lambda_rho >= kernel_size[1]:
raise ImageFilterError("Invalid parameters: lambda_rho can not be greater than kernel_size[1].")
return values
def get_xy_mesh(kernel_size: Tuple[int, int]) -> Tuple[np.ndarray, np.ndarray]:
"""Get (x,y) meshgrids for a given kernel size.
Args:
kernel_size (Tuple[int, int]): Kernel width and height.
Returns:
Tuple[np.ndarray, np.ndarray]: meshgrid of (x, y) positions.
"""
ksize_phi_half = kernel_size[0] // 2
ksize_rho_half = kernel_size[1] // 2
y, x = np.meshgrid(
np.arange(-ksize_phi_half, ksize_phi_half + 1),
np.arange(-ksize_rho_half, ksize_rho_half + 1),
indexing="xy",
sparse=True,
)
return x, y
def get_radius(x: np.ndarray, y: np.ndarray) -> np.ndarray:
"""Get radius to the image center for a given array of relative positions (x,y).
Args:
x (np.ndarray): x position relative to the image center.
y (np.ndarray): y position relative to the image center.
Returns:
np.ndarray: radius to the image center.
"""
radius = np.sqrt(x**2 + y**2)
return radius
def rotate(x: np.ndarray, y: np.ndarray, angle: float) -> Tuple[np.ndarray, np.ndarray]:
"""Rotate a given array of relative positions (x,y) by a given angle.
Args:
x (np.ndarray): x position.
y (np.ndarray): y position.
angle (float): angle for rotation (in degrees).
Returns:
Tuple[np.ndarray, np.ndarray]: rotated x, y positions.
"""
cos_theta = np.cos(angle * np.pi / 180)
sin_theta = np.sin(angle * np.pi / 180)
rotx = x * cos_theta + y * sin_theta
roty = -x * sin_theta + y * cos_theta
return rotx, roty
def normalize_kernel_values(kernel_values: np.ndarray) -> np.ndarray:
"""Normalize the kernel values so that the square sum is 1.
Args:
kernel_values (np.ndarray): Kernel values (complex numbers).
Returns:
np.ndarray: normalized Kernel values.
"""
norm_real = np.linalg.norm(kernel_values.real, ord="fro")
if norm_real > 0:
kernel_values.real /= norm_real
norm_imag = np.linalg.norm(kernel_values.imag, ord="fro")
if norm_imag > 0:
kernel_values.imag /= norm_imag
return kernel_values
def convert_to_fixpoint_kernelvalues(kernel_values: np.ndarray) -> np.ndarray:
"""Convert the kernel values (both real and imaginary) to fix points.
Args:
kernel_values (np.ndarray): Kernel values.
Returns:
np.ndarray: fix-point Kernel values.
"""
if np.iscomplexobj(kernel_values):
kernel_values.real = np.round(kernel_values.real * 2**15)
kernel_values.imag = np.round(kernel_values.imag * 2**15)
else:
kernel_values = np.round(kernel_values * 2**15)
return kernel_values
class GaborFilter(ImageFilter):
"""Implementation of a 2D Gabor filter.
Reference:
[1] https://inc.ucsd.edu/mplab/75/media//gabor.pdf.
"""
class Parameters(ImageFilter.Parameters):
"""GaborFilter parameters."""
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]
sigma_phi: float = Field(..., ge=1)
sigma_rho: float = Field(..., ge=1)
theta_degrees: float = Field(..., ge=0, lt=360)
lambda_phi: float = Field(..., ge=2)
dc_correction: bool
to_fixpoints: bool
_upper_bound = root_validator(pre=True, allow_reuse=True)(upper_bound_Gabor_parameters)
_is_odd = validator("kernel_size", allow_reuse=True, each_item=True)(pydantic_v.is_odd)
__parameters_type__ = Parameters
def __init__(
self,
*,
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)],
sigma_phi: float,
sigma_rho: float,
theta_degrees: float,
lambda_phi: float,
dc_correction: bool = True,
to_fixpoints: bool = False,
) -> None:
"""Assign parameters.
Args:
kernel_size (Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]): Kernel width and height.
sigma_phi (float): phi standard deviation.
sigma_rho (float): rho standard deviation.
theta_degrees (float): orientation of kernel in degrees.
lambda_phi (float): wavelength of the sinusoidal factor, lower value = thinner strip.
dc_correction (bool, optional): whether to enable DC correction. Defaults to True.
to_fixpoints (bool, optional): whether to convert kernel values to fixpoints. Defaults to False.
"""
super().__init__(
kernel_size=kernel_size,
sigma_phi=sigma_phi,
sigma_rho=sigma_rho,
theta_degrees=theta_degrees,
lambda_phi=lambda_phi,
dc_correction=dc_correction,
to_fixpoints=to_fixpoints,
)
def compute_kernel_values(self) -> np.ndarray:
"""Compute 2D Gabor filter kernel values.
Returns:
np.ndarray: Kernel values.
"""
# convert to polar coordinates
x, y = get_xy_mesh(self.params.kernel_size)
rotx, roty = rotate(x, y, self.params.theta_degrees)
# calculate carrier and envelope
carrier = 1j * 2 * np.pi / self.params.lambda_phi * rotx
envelope = -(rotx**2 / self.params.sigma_phi**2 + roty**2 / self.params.sigma_rho**2) / 2
# calculate kernel values
kernel_values = np.exp(envelope + carrier)
kernel_values /= 2 * np.pi * self.params.sigma_phi * self.params.sigma_rho
# apply DC correction
if self.params.dc_correction:
# Step 1: calculate mean value of Gabor Wavelet
g_mean = np.mean(np.real(kernel_values), axis=-1)
# Step 2: define gaussian offset
correction_term_mean <fim_suffix>= np.mean(envelope, axis=-1)
# Step 3: substract gaussian
kernel_values = kernel_values - (g_mean / correction_term_mean)[:, np.newaxis] * envelope
# normalize kernel values
kernel_values = normalize_kernel_values(kernel_values)
if self.params.to_fixpoints:
kernel_values = convert_to_fixpoint_kernelvalues(kernel_values)
return kernel_values
class LogGaborFilter(ImageFilter):
"""Implementation of a 2D LogGabor filter.
Reference:
[1] https://en.wikipedia.org/wiki/Log_Gabor_filter.
"""
class Parameters(ImageFilter.Parameters):
"""LogGaborFilter parameters."""
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]
sigma_phi: float = Field(..., gt=0, le=np.pi)
sigma_rho: float = Field(..., gt=0.1, le=1)
theta_degrees: float = Field(..., ge=0, lt=360)
lambda_rho: float = Field(..., gt=2)
to_fixpoints: bool
_upper_bound = root_validator(pre=True, allow_reuse=True)(upper_bound_LogGabor_parameters)
_is_odd = validator("kernel_size", allow_reuse=True, each_item=True)(pydantic_v.is_odd)
__parameters_type__ = Parameters
def __init__(
self,
*,
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)],
sigma_phi: float,
sigma_rho: float,
theta_degrees: float,
lambda_rho: float,
to_fixpoints: bool = False,
) -> None:
"""Assign parameters.
Args:
kernel_size (Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]): Kernel width and height.
sigma_phi (float): bandwidth in phi (frequency domain).
sigma_rho (float): bandwidth in rho (frequency domain).
theta_degrees (float): orientation of filter in degrees.
lambda_rho (float): wavelength in rho.
to_fixpoints (bool, optional): whether to convert kernel values to fixpoints. Defaults to False.
"""
super().__init__(
kernel_size=kernel_size,
sigma_phi=sigma_phi,
sigma_rho=sigma_rho,
theta_degrees=theta_degrees,
lambda_rho=lambda_rho,
to_fixpoints=to_fixpoints,
)
def compute_kernel_values(self) -> np.ndarray:
"""Compute 2D LogGabor filter kernel values.
Returns:
np.ndarray: Kernel values.
"""
# convert to polar coordinates
x, y = get_xy_mesh(self.params.kernel_size)
radius = get_radius(x, y)
# remove 0 radius value in the center
ksize_phi_half = self.params.kernel_size[0] // 2
ksize_rho_half = self.params.kernel_size[1] // 2
radius[ksize_rho_half][ksize_phi_half] = 1
# get angular distance
[rotx, roty] = rotate(x, y, self.params.theta_degrees)
dtheta = np.arctan2(roty, rotx)
# calculate envelope and orientation
envelope = np.exp(
-0.5
* np.log2(radius * self.params.lambda_rho / self.params.kernel_size[1]) ** 2
/ self.params.sigma_rho**2
)
envelope[ksize_rho_half][ksize_phi_half] = 0
orientation = np.exp(-0.5 * dtheta**2 / self.params.sigma_phi**2)
# calculate kernel values
kernel_values = envelope * orientation
kernel_values = np.fft.fftshift(np.fft.ifft2(np.fft.ifftshift(kernel_values)))
# normalize kernel values
kernel_values = normalize_kernel_values(kernel_values)
if self.params.to_fixpoints:
kernel_values = convert_to_fixpoint_kernelvalues(kernel_values)
return kernel_values
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/io/validators.py<fim_prefix>from typing import Any, Callable, Dict, Iterable, List
import numpy as np
from pydantic import fields
# ----- validators -----
def is_odd(cls: type, v: int, field: fields.ModelField) -> int:
"""Check that kernel size are odd numbers.
Args:
cls (type): Class type.
v (int): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ValueError: Exception raised if number isn't odd.
Returns:
int: `v` sent for further processing.
"""
if (v % 2) == 0:
raise ValueError(f"{cls.__name__}: {field.name} must be odd numbers.")
return v
def is_binary(cls: type, v: np.ndarray, field: fields.ModelField) -> np.ndarray:
"""Check if array has only boolean values, i.e. is binary.
Args:
cls (type): Class type.
v (np.ndarray): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ValueError: Exception raised if array doesn't contain bool datatypes.
Returns:
np.ndarray: `v` sent for further processing.
"""
if v.dtype != np.dtype("bool"):
raise ValueError(f"{cls.__name__}: {field.name} must be binary. got dtype {v.dtype}")
return v
def is_list_of_points(cls: type, v: np.ndarray, field: fields.ModelField) -> np.ndarray:
"""Check if np.ndarray has shape (_, 2).
Args:
cls (type): Class type.
v (np.ndarray): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ValueError: Exception raised if array doesn't contain 2D points.
Returns:
np.ndarray: `v` sent for further processing.
"""
if len(v.shape) != 2 or v.shape[1] != 2:
raise ValueError(f"{cls.__name__}: {field.name} must have shape (_, 2).")
return v
def is_not_empty(cls: type, v: List[Any], field: fields.ModelField) -> List[Any]:
"""Check that both inputs are not empty.
Args:
cls (type): Class type.
v (List[Any]): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ValueError: Exception raised if list is empty.
Returns:
List[Any]: `v` sent for further processing.
"""
if len(v) == 0:
raise ValueError(f"{cls.__name__}: {field.name} list cannot be empty.")
return v
def is_not_zero_sum(cls: type, v: Any, field: fields.ModelField) -> Any:
"""Check that both inputs are not empty.
Args:
cls (type): Class type.
v (Any): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ValueError: Raised if v doesn't sum to 0.
Returns:
Any: `v` sent for further processing.
"""
if np.sum(v) == 0:
raise ValueError(f"{cls.__name__}: {field.name} sum cannot be zero.")
return v
def are_all_positive(cls: type, v: Any, field: fields.ModelField) -> Any:
"""Check that all values are positive.
Args:
cls (type): Class type.
v (Any): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ValueError: Raise if not all values in are positive.
Returns:
Any: `v` sent for further processing.
"""
if isinstance(v, Iterable):
if not np.array([value >= 0 for value in v]).all():
raise ValueError(f"{cls.__name__}: all {field.name} must be positive. Received {v}")
elif v < 0.0:
raise ValueError(f"{cls.__name__}: {field.name} must be positive. Received {v}")
return v
def to_dtype_float32(cls: type, v: np.ndarray, field: fields.ModelField) -> np.ndarray:
"""Convert input np.ndarray to dtype np.float32.
Args:
cls (type): Class type.
v (np.ndarray): Value to convert
field (fields.ModelField): Field descriptor.
Returns:
np.ndarray: `v` sent for further processing.
"""
return v.astype(np.float32)
# ----- root_validators -----
def is_valid_bbox(cls: type, values: Dict[str, float]) -> Dict[str, float]:
"""Check that the bounding box is valid."""
if values["x_min"] >= values["x_max"] or values["y_min"] >= values["y_max"]:
raise ValueError(
f'{cls.__name__}: invalid bbox. x_min={values["x_min"]}, x_max={values["x_max"]},'
f' y_min={values["y_min"]}, y_max={values["y_max"]}'
)
return values
# ----- parametrized validators -----
def is_array_n_dimensions(nb_dimensions: int) -> Callable:
"""Create a pydantic validator checking if an array is n-dimensional.
Args:
nb_dimensions (int): number of dimensions the array must have
Returns:
Callable: the validator.
"""
def validator(cls: type, v: np.ndarray, field: fields.ModelField) -> np.ndarray:
"""Check if the array has the right number of dimensions."""
if len(v.shape) != nb_dimensions and (v.shape != (0,) or nb_dimensions != 0):
raise ValueError(
f"{cls.__name__}: wrong number of dimensions for {field.name}. "
f"Expected {nb_dimensions}, got {len(v.shape)}"
)
return v
return validator
# ----- parametrized root_validators -----
def are_lengths_equal(field1: str, field2: str) -> Callable:
"""Create a pydantic validator checking if the two fields have the same length.
Args:
field1 (str): name of the first field
field2 (str): name of the first field
Returns:
Callable: the validator.
"""
def __root_validator(cls: type, values: Dict[str, List[Any]]) -> Dict[str, List[Any]]:
"""Check if len(field1) equals len(field2)."""
if len(values[field1]) != len(values[field2]):
raise ValueError(
f"{cls.__name__}: {field1} and {field2} length mismatch, "
f"resp. {len(values[field1])} and {len(values[field2])}"
)
re<fim_suffix>turn values
return __root_validator
def are_shapes_equal(field1: str, field2: str) -> Callable:
"""Create a pydantic validator checking if the two fields have the same shape.
Args:
field1 (str): name of the first field
field2 (str): name of the first field
Returns:
Callable: the validator.
"""
def __root_validator(cls: type, values: Dict[str, np.ndarray]) -> Dict[str, np.ndarray]:
"""Check if field1.shape equals field2.shape."""
if values[field1].shape != values[field2].shape:
raise ValueError(f"{cls.__name__}: {field1} and {field2} shape mismatch.")
return values
return __root_validator
def are_all_shapes_equal(field1: str, field2: str) -> Callable:
"""Create a pydantic validator checking if two lists of array have the same shape per element.
This function creates a pydantic validator for two lists of np.ndarrays which checks if they have the same length,
and if all of their element have the same shape one by one.
Args:
field1 (str): name of the first field
field2 (str): name of the first field
Returns:
Callable: the validator.
"""
def __root_validator(cls: type, values: Dict[str, np.ndarray]) -> Dict[str, np.ndarray]:
"""Check if len(field1) equals len(field2) and if every element have the same shape."""
shapes_field_1 = [element.shape for element in values[field1]]
shapes_field_2 = [element.shape for element in values[field2]]
if len(values[field1]) != len(values[field2]) or shapes_field_1 != shapes_field_2:
raise ValueError(
f"{cls.__name__}: {field1} and {field2} shape mismatch, resp. {shapes_field_1} and {shapes_field_2}."
)
return values
return __root_validator
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/vectorization/contouring.py<fim_prefix>from typing import Callable, List
import cv2
import numpy as np
from pydantic import NonNegativeFloat
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import GeometryMask, GeometryPolygons
from iris.io.errors import VectorizationError
from iris.utils.math import area
def filter_polygon_areas(
polygons: List[np.ndarray], rel_tr: NonNegativeFloat = 0.03, abs_tr: NonNegativeFloat = 0.0
) -> List[np.ndarray]:
"""Filter out polygons whose area is below either an absolute threshold or a fraction of the largest area.
Args:
polygons (List[np.ndarray]): List of polygons to filter.
rel_tr (NonNegativeFloat, optional): Relative threshold. Defaults to 0.03.
abs_tr (NonNegativeFloat, optional): Absolute threshold. Defaults to 0.0.
Returns:
List[np.ndarray]: Filtered polygons' list.
"""
areas = [area(polygon) if len(polygon) > 2 else 1.0 for polygon in polygons]
area_factors = np.array(areas) / np.max(areas)
filtered_polygons = [
polygon
for area, area_factor, polygon in zip(areas, area_factors, polygons)
if area > abs_tr and area_factor > rel_tr
]
return filte<fim_suffix>red_polygons
class ContouringAlgorithm(Algorithm):
"""Implementation of a vectorization process through contouring raster image."""
class Parameters(Algorithm.Parameters):
"""Parameters class of the ContouringAlgorithm class."""
contour_filters: List[Callable[[List[np.ndarray]], List[np.ndarray]]]
__parameters_type__ = Parameters
def __init__(
self,
contour_filters: List[Callable[[List[np.ndarray]], List[np.ndarray]]] = [filter_polygon_areas],
) -> None:
"""Assign parameters.
Args:
contour_filters (List[Callable[[List[np.ndarray]], List[np.ndarray]]], optional): List of filter functions used to filter out noise in polygons.
Defaults to [ContouringAlgorithm.filter_polygon_areas].
"""
super().__init__(contour_filters=contour_filters)
def run(self, geometry_mask: GeometryMask) -> GeometryPolygons:
"""Contouring vectorization algorithm implementation.
Args:
geometry_mask (GeometryMask): Geometry segmentation map.
Raises:
VectorizationError: Raised if iris region not segmented or an error occur during iris region processing.
Returns:
GeometryPolygons: Geometry polygons points.
"""
if not np.any(geometry_mask.iris_mask):
raise VectorizationError("Geometry raster verification failed.")
geometry_contours = self._find_contours(geometry_mask)
return geometry_contours
def _find_contours(self, mask: GeometryMask) -> GeometryPolygons:
"""Find raw contours for different classes in raster.
Args:
mask (GeometryMask): Raster object.
Returns:
GeometryPolygons: Raw contours indicating polygons of different classes.
"""
eyeball_array = self._find_class_contours(mask.filled_eyeball_mask.astype(np.uint8))
iris_array = self._find_class_contours(mask.filled_iris_mask.astype(np.uint8))
pupil_array = self._find_class_contours(mask.pupil_mask.astype(np.uint8))
return GeometryPolygons(pupil_array=pupil_array, iris_array=iris_array, eyeball_array=eyeball_array)
def _find_class_contours(self, binary_mask: np.ndarray) -> np.ndarray:
"""Find contour between two different contours.
Args:
binary_mask (np.ndarray): Raster object.
Raises:
VectorizationError: Raised if number of contours found is different than 1.
Returns:
np.ndarray: Contour points array.
"""
contours, hierarchy = cv2.findContours(binary_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
if hierarchy is None:
raise VectorizationError("_find_class_contours: No contour hierarchy found at all.")
parent_indices = np.flatnonzero(hierarchy[..., 3] == -1)
contours = [np.squeeze(contours[i]) for i in parent_indices]
contours = self._filter_contours(contours)
if len(contours) != 1:
raise VectorizationError("_find_class_contours: Number of contours must be equal to 1.")
return contours[0]
def _filter_contours(self, contours: List[np.ndarray]) -> List[np.ndarray]:
"""Filter contours based on predefined filters.
Args:
contours (List[np.ndarray]): Contours list.
Returns:
List[np.ndarray]: Filtered list of contours.
"""
for filter_func in self.params.contour_filters:
contours = filter_func(contours)
return contours
<fim_middle> | null | STATEMENT | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response_refinement/fragile_bits_refinement.py<fim_prefix>from typing import Literal, Tuple
import numpy as np
from pydantic import confloat
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import IrisFilterResponse
class FragileBitRefinement(Algorithm):
"""Refining mask by masking out fragile bits.
Algorithm:
Thresholding by the given parameter value_threshold at each bit, set the corresponding mask response to 0 if iris response is below the threshold.
"""
class Parameters(Algorithm.Parameters):
"""RegularProbeSchema parameters."""
value_threshold: Tuple[confloat(ge=0), confloat(ge=0)]
fragile_type: Literal["cartesian", "polar"]
__parameters_type__ = Parameters
def __init__(
self,
value_threshold: Tuple[confloat(ge=0), confloat(ge=0)],
fragile_type: Literal["cartesian", "polar"] = "polar",
) -> None:
"""Create Fragile Bit Refinement object.
Args:
value_threshold (Tuple[confloat(ge=0), confloat(ge=0)]): Thresholding iris response values.
fragile_type (Literal["cartesian", "polar"], optional): The Fragile bits can be either
calculated in cartesian or polar coordinates. In the first, the values
of value_threshold denote to x and y axis, in the case of polar coordinates,
the values denote to radius and angle. Defaults to "polar".
"""
super().__init__(value_threshold=value_threshold, fragile_type=fragile_type)
def run(self, iris_filter_response: IrisFilterResponse) -> IrisFilterResponse:
"""Generate refined IrisFilterResponse.
Args:
iris_filter_response (IrisFilterResponse): Filter bank response.
Returns:
IrisFilterResponse: Filter bank response.
"""
fragile_masks = []
for iris_re<fim_suffix>sponse, iris_mask in zip(iris_filter_response.iris_responses, iris_filter_response.mask_responses):
if self.params.fragile_type == "cartesian":
mask_value_real = np.abs(np.real(iris_response)) >= self.params.value_threshold[0]
mask_value_imaginary = np.abs(np.imag(iris_response)) >= self.params.value_threshold[1]
mask_value = mask_value_real * mask_value_imaginary
if self.params.fragile_type == "polar":
iris_response_r = np.abs(iris_response)
iris_response_phi = np.angle(iris_response)
mask_value_r = iris_response_r >= self.params.value_threshold[0]
cos_mask = np.abs(np.cos(iris_response_phi)) <= np.abs(np.cos(self.params.value_threshold[1]))
sine_mask = np.abs(np.sin(iris_response_phi)) <= np.abs(np.cos(self.params.value_threshold[1]))
mask_value_phi = cos_mask * sine_mask
mask_value = mask_value_r * mask_value_phi
mask_value = mask_value * iris_mask
fragile_masks.append(mask_value)
return IrisFilterResponse(iris_responses=iris_filter_response.iris_responses, mask_responses=fragile_masks)
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/geometry_refinement/smoothing.py<fim_prefix>from typing import List, Tuple
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons
from iris.io.errors import GeometryRefinementError
from iris.utils import math
class Smoothing(Algorithm):
"""Implementation of contour smoothing algorithm.
Algorithm steps:
1) Map iris/pupil points to polar space based on estimated iris/pupil centers.
2) Smooth iris/pupil contour by applying 1D convolution with rolling median kernel approach.
3) Map points back to cartesian space from polar space.
"""
class Parameters(Algorithm.Parameters):
"""Smoothing parameters class."""
dphi: float = Field(..., gt=0.0, lt=360.0)
kernel_size: float = Field(..., gt=0.0, lt=360.0)
gap_threshold: float = Field(..., gt=0.0, lt=360.0)
__parameters_type__ = Parameters
def __init__(self, dphi: float = 1.0, kernel_size: float = 10.0, gap_threshold: float = 10.0) -> None:
"""Assign parameters.
Args:
dphi (float, optional): phi angle delta used to sample points while doing smoothing by interpolation. Defaults to 1.0.
kernel_size (float, optional): Rolling median kernel size expressed in radians. Final kernel size is computed as a quotient of kernel_size and dphi. Defaults to 10.0.
gap_threshold (float, optional): Gap threshold distance. Defaults to None. Defaults to 10.0.
"""
super().__init__(dphi=dphi, kernel_size=kernel_size, gap_threshold=gap_threshold)
@property
def kernel_offset(self) -> int:
"""Kernel offset (distance from kernel center to border) property used when smoothing with rolling median. If a quotient is less then 1 then kernel size equal to 1 is returned.
Returns:
int: Kernel size.
"""
return max(1, int((np.radians(self.params.kernel_size) / np.radians(self.params.dphi))) // 2)
def run(self, polygons: GeometryPolygons, eye_centers: EyeCenters) -> GeometryPolygons:
"""Perform smoothing refinement.
Args:
polygons (GeometryPolygons): Contours to refine.
eye_centers (EyeCenters): Eye center used when performing a coordinates mapping from cartesian space to polar space.
Returns:
GeometryPolygons: Smoothed contours.
"""
pupil_arcs = self._smooth(polygons.pupil_array, (eye_centers.pupil_x, eye_centers.pupil_y))
iris_arcs = self._smooth(polygons.iris_array, (eye_centers.iris_x, eye_centers.iris_y))
return GeometryPolygons(pupil_array=pupil_arcs, iris_array=iris_arcs, eyeball_array=polygons.eyeball_array)
def _smooth(self, polygon: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth a single contour.
Args:
polygon (np.ndarray): Contour to smooth.
center_xy (Tuple[float, float]): Contour's center.
Returns:
np.ndarray: Smoothed contour's vertices.
"""
arcs, num_gaps = self._cut_into_arcs(polygon, center_xy)
arcs = (
self._smooth_circular_shape(arcs[0], center_xy)
if num_gaps == 0
else np.vstack([self._smooth_arc(arc, center_xy) for arc in arcs if len(arc) >= 2])
)
return arcs
def _cut_into_arcs(self, polygon: np.ndarray, center_xy: Tuple[float, float]) -> Tuple[List[np.ndarray], int]:
"""Cut contour into arcs.
Args:
polygon (np.ndarray): Contour polygon.
center_xy (Tuple[float, float]): Polygon's center.
Returns:
Tuple[List[np.ndarray], int]: Tuple with: (list of list of vertices, number of gaps detected in a contour).
"""
rho, phi = math.cartesian2polar(polygon[:, 0], polygon[:, 1], *center_xy)
phi, rho = self._sort_two_arrays(phi, rho)
differences = np.abs(phi - np.roll(phi, -1))
# True distance between first and last point
differences[-1] = 2 * np.pi - differences[-1]
gap_indices = np.argwhere(differences > np.radians(self.params.gap_threshold)).flatten()
if gap_indices.size < 2:
return [polygon], gap_indices.size
gap_indices += 1
phi, rho = np.split(phi, gap_indices), np.split(rho, gap_indices)
arcs = [
np.column_stack(math.polar2cartesian(rho_coords, phi_coords, *center_xy))
for rho_coords, phi_coords in zip(rho, phi)
]
# Connect arc which lies between 0 and 2π.
if len(arcs) == gap_indices.size + 1:
arcs[0] = np.vstack([arcs[0], arcs[-1]])
arcs = arcs[:-1]
return arcs, gap_indices.size
def _smooth_arc(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth a single contour arc.
Args:
vertices (np.ndarray): Arc's vertices.
center_xy (Tuple[float, float]): Center of an entire contour.
Returns:
np.ndarray: Smoothed arc's vertices.
"""
rho, phi = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy)
phi, rho = self._sort_two_arrays(phi, rho)
idx = self._find_start_index(phi)
offset = phi[idx]
relative_phi = (phi - offset) % (2 * np.pi)
smoothed_relative_phi, smoothed_rho = self._smooth_array(relative_phi, rho)
smoothed_phi = (smoothed_relative_phi + offset) % (2 * np.pi)
x_smoothed, y_smoothed = math.polar2cartesian(smoothed_rho, smoothed_phi, *center_xy)
return np.column_stack([x_smoothed, y_smoothed])
def _smooth_circular_shape(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth arc in a form of a circular shape.
Args:
vertices (np.ndarray): Arc's vertices.
center_xy (Tuple[float, float]): Center of an entire contour.
Returns:
np.ndarray: Smoothed arc's vertices.
"""
rho, phi = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy)
padded_phi = np.concatenate([phi - 2 * np.pi, phi, phi + 2 * np.pi])
padded_rho = np.concatenate([rho, rho, rho])
smoothed_phi, smoothed_rho = self._smooth_array(padded_phi, padded_rho)
mask = (smoothed_phi >= 0) & (smoothed_phi < 2 * np.pi)
rho_smoothed, phi_smoothed = smoothed_rho[mask], smoothed_phi[mask]
x_smoothed, y_smoothed = math.polar2cartesian(rho_smoothed, phi_smoothed, *center_xy)
return np.column_stack([x_smoothed, y_smoothed])
def _smooth_array(self, phis: np.ndarray, rhos: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Smooth coordinates expressed in polar space.
Args:
phis (np.ndarray): phi values.
rhos (np.ndarray): rho values.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with smoothed coordinates (phis, rhos).
"""
interpolated_phi = np.arange(min(phis), max(phis), np.radians(self.params.dphi))
interpolated_rho = np.interp(interpolated_phi, xp=phis, fp=rhos, period=2 * np.pi)
smoothed_rho = self._rolling_median(interpolated_rho, self.kernel_offset)
smoothed_phi = interpolated_phi[self.kernel_offset : -self.kernel_offset]
return smoothed_phi, smoothed_rho
def _sort_two_arrays(self, first_list: np.ndarray, second_list: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Sort both numpy arrays based on values from the first_list.
Args:
first_list (np.ndarray): First array.
second_list (np.ndarray): Second array.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with (sorted first array, sorted second array).
"""
zipped_lists = zip(first_list, second_list)
sorted_pairs = sorted(zipped_lists)
sorted_tuples = zip(*sorted_pairs)
first_list, second_list = [list(sorted_tuple) for sorted_tuple in sorted_tuples]
return np.array(first_list), np.array(second_list)
def _find_start_index(self, phi: np.ndarray) -> int:
"""Find the start index by checking the largest gap. phi needs to be sorted.
Args:
phi (np.ndarray): phi angle values.
Raises:
GeometryRefinementError: Raised if phi values are not sorted ascendingly.
Returns:
int: Index value.
"""
if not np.all((phi - np.roll(phi, 1))[1:] >= 0):
raise GeometryRefinementError("Smoothing._find_start_index phi must be sorted ascendingly!")
phi_tmp = np.concatenate(([phi[-1] - 2 * np.pi], phi, [phi[0] + 2 * np.pi]))
phi_tmp_left_neighbor = np.roll(phi_tmp, 1)
dphi = (phi_tmp - phi_tmp_left_neighbor)[1:-1]
largest_gap_index = np.argmax(dphi)
return int(largest_gap_index)
def _rolling_median(self, signal: np.ndarray, kernel_offset: int) -> np.ndarray:
"""Compute rolling median of a 1D signal.
Args:
signal (np.ndarray): Signal values.
kernel_size (int): Kernel size.
Raises:
GeometryRefinementError: Raised if signal is not 1D.
Returns:
np.ndarray: Rolling median result.
"""
if signal.ndim != 1:
raise GeometryRefinementError("Smoothing._rolling_median only works for 1d arrays.")
stacked_signals: List[np.ndarray] = []
for i in ran<fim_suffix>ge(-kernel_offset, kernel_offset + 1):
stacked_signals.append(np.roll(signal, i))
stacked_signals = np.stack(stacked_signals)
rolling_median = np.median(stacked_signals, axis=0)
rolling_median = rolling_median[kernel_offset:-kernel_offset]
return rolling_median
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/eye_properties_estimation/bisectors_method.py<fim_prefix>from typing import Tuple
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons
from iris.io.errors import EyeCentersEstimationError
class BisectorsMethod(Algorithm):
"""Implementation of eye's center estimation algorithm using bisectors method for finding a circle center.
This algorithm samples a given number of bisectors from the pupil and iris polygons, and averages their intersection
to produce the polygon center. This method is robust against noise in the polygons, making it a good choice for
non-perfect shapes. It is also robust to polygons missing parts of the circle arc, making it a good choice for
partially-occluded shapes.
LIMITATIONS:
The iris and pupil can be approximated to circles, when the user is properly gazing at the camera.
This requires that the cases of off-gaze have already been filtered out.
"""
class Parameters(Algorithm.Parameters):
"""Default Parameters for BisectorsMethod algorithm."""
num_bisectors: int = Field(..., gt=0)
min_distance_between_sector_points: float = Field(..., gt=0.0, lt=1.0)
max_iterations: int = Field(..., gt=0)
__parameters_type__ = Parameters
def __init__(
self,
num_bisectors: int = 100,
min_distance_between_sector_points: float = 0.75,
max_iterations: int = 50,
) -> None:
"""Assign parameters.
Args:
num_bisectors (int, optional): Number of bisectors.. Defaults to 100.
min_distance_between_sector_points (float, optional): Minimum distance between sectors expressed as a fractional value of a circular shape diameter. Defaults to 0.75.
max_iterations (int, optional): Max iterations for bisector search.. Defaults to 50.
"""
super().__init__(
num_bisectors=num_bisectors,
min_distance_between_sector_points=min_distance_between_sector_points,
max_iterations=max_iterations,
)
def run(self, geometries: GeometryPolygons) -> EyeCenters:
"""Estimate eye's iris and pupil centers.
Args:
geometries (GeometryPolygons): Geometry polygons.
Returns:
EyeCenters: Eye's centers object.
"""
pupil_center_x, pupil_center_y = self._find_center_coords(geometries.pupil_array, geometries.pupil_diameter)
iris_center_x, iris_center_y = self._find_center_coords(geometries.iris_array, geometries.iris_diameter)
return EyeCenters(pupil_x=pupil_center_x, pupil_y=pupil_center_y, iris_x=iris_center_x, iris_y=iris_center_y)
def _find_center_coords(self, polygon: np.ndarray, diameter: float) -> Tuple[float, float]:
"""Find center coordinates of a polygon.
Args:
polygon (np.ndarray): np.ndarray.
diameter (float): diameter of the polygon.
Returns:
Tuple[float, float]: Tuple with the center location coordinates (x, y).
"""
min_distance_between_sector_points_in_px = self.params.min_distance_between_sector_points * diameter
first_bisectors_point, second_bisectors_point = self._calculate_perpendicular_bisectors(
polygon, min_distance_between_sector_points_in_px
)
return self._find_best_intersection(first_bisectors_point, second_bisectors_point)
def _calculate_perpendicular_bisectors(
self, polygon: np.ndarray, min_distance_between_sector_points_in_px: float
) -> Tuple[np.ndarray, np.ndarray]:
"""Calculate the perpendicular bisector of self.params.num_bisectors randomly chosen points from a polygon's vertices.
A pair of points is used if their distance is larger then min_distance_between_sector_points_in_px.
Args:
polygon (np.ndarray): np.ndarray based on which we are searching the center of a circular shape.
min_distance_between_sector_points_in_px (float): Minimum distance between sector points.
Raises:
EyeCentersEstimationError: Raised if not able to find enough random pairs of points on the arc with a large enough distance!
Returns:
Tuple[np.ndarray, np.ndarray]: Calculated perpendicular bisectors.
"""
np.random.seed(142857)
bisectors_first_points = np.empty([0, 2])
bisectors_second_points = np.empty([0, 2])
for _ in range(self.params.max_iteratio<fim_suffix>ns):
random_indices = np.random.choice(len(polygon), size=(self.params.num_bisectors, 2))
first_drawn_points = polygon[random_indices[:, 0]]
second_drawn_points = polygon[random_indices[:, 1]]
norms = np.linalg.norm(first_drawn_points - second_drawn_points, axis=1)
mask = norms > min_distance_between_sector_points_in_px
bisectors_first_points = np.vstack([bisectors_first_points, first_drawn_points[mask]])
bisectors_second_points = np.vstack([bisectors_second_points, second_drawn_points[mask]])
if len(bisectors_first_points) >= self.params.num_bisectors:
break
else:
raise EyeCentersEstimationError(
"Not able to find enough random pairs of points on the arc with a large enough distance!"
)
bisectors_first_points = bisectors_first_points[: self.params.num_bisectors]
bisectors_second_points = bisectors_second_points[: self.params.num_bisectors]
bisectors_center = (bisectors_first_points + bisectors_second_points) / 2
# Flip xs with ys and flip sign of on of them to create a 90deg rotation
inv_bisectors_center_slope = np.fliplr(bisectors_second_points - bisectors_first_points)
inv_bisectors_center_slope[:, 1] = -inv_bisectors_center_slope[:, 1]
# Add perpendicular vector to center and normalize
norm = np.linalg.norm(inv_bisectors_center_slope, axis=1)
inv_bisectors_center_slope[:, 0] /= norm
inv_bisectors_center_slope[:, 1] /= norm
first_bisectors_point = bisectors_center - inv_bisectors_center_slope
second_bisectors_point = bisectors_center + inv_bisectors_center_slope
return first_bisectors_point, second_bisectors_point
def _find_best_intersection(self, fst_points: np.ndarray, sec_points: np.ndarray) -> Tuple[float, float]:
"""fst_points and sec_points are NxD arrays defining N lines. D is the dimension of the space.
This function returns the least squares intersection of the N lines from the system given by eq. 13 in
http://cal.cs.illinois.edu/~johannes/research/LS_line_intersecpdf.
Args:
fst_points (np.ndarray): First bisectors points.
sec_points (np.ndarray): Second bisectors points.
Returns:
Tuple[float, float]: Best intersection point.
Reference:
[1] http://cal.cs.illinois.edu/~johannes/research/LS_line_intersecpdf
"""
norm_bisectors = (sec_points - fst_points) / np.linalg.norm(sec_points - fst_points, axis=1)[:, np.newaxis]
# Generate the array of all projectors I - n*n.T
projections = np.eye(norm_bisectors.shape[1]) - norm_bisectors[:, :, np.newaxis] * norm_bisectors[:, np.newaxis]
# Generate R matrix and q vector
R = projections.sum(axis=0)
q = (projections @ fst_points[:, :, np.newaxis]).sum(axis=0)
# Solve the least squares problem for the intersection point p: Rp = q
p = np.linalg.lstsq(R, q, rcond=None)[0]
intersection_x, intersection_y = p
return intersection_x.item(), intersection_y.item()
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/io/dataclasses.py<fim_prefix>from __future__ import annotations
from typing import Any, Dict, List, Literal, Tuple
import numpy as np
from pydantic import Field, NonNegativeInt, root_validator, validator
from iris.io import validators as v
from iris.io.class_configs import ImmutableModel
from iris.utils import math
class IRImage(ImmutableModel):
"""Data holder for input IR image."""
img_data: np.ndarray
eye_side: Literal["left", "right"]
@property
def height(self) -> int:
"""Return IR image's height.
Return:
int: image height.
"""
return self.img_data.shape[0]
@property
def width(self) -> int:
"""Return IR image's width.
Return:
int: image width.
"""
return self.img_data.shape[1]
def serialize(self) -> Dict[str, Any]:
"""Serialize IRImage object.
Returns:
Dict[str, Any]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, Any]) -> IRImage:
"""Deserialize IRImage object.
Args:
data (Dict[str, Any]): Serialized object to dict.
Returns:
IRImage: Deserialized object.
"""
return IRImage(**data)
class SegmentationMap(ImmutableModel):
"""Data holder for the segmentation models predictions."""
predictions: np.ndarray
index2class: Dict[NonNegativeInt, str]
_is_segmap_3_dimensions = validator("predictions", allow_reuse=True)(v.is_array_n_dimensions(3))
@root_validator(pre=True, allow_reuse=True)
def _check_segmap_shape_and_consistency(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""Check that the number of classes equals the depth of the segmentation map.
Args:
values (Dict[str, Any]): Dictionary with segmap and classes {param_name: data}.
Raises:
ValueError: Raised if there is resolution mismatch between image and mask.
Returns:
Dict[str, Any]: Unmodified values parameter passed for further processing.
"""
if values["predictions"].shape[2] != len(values["index2class"]):
segmap_depth, nb_classes = values["predictions"].shape, len(values["index2class"])
raise ValueError(
f"{cls.__name__}: mismatch between predictions shape {segmap_depth} and classes length {nb_classes}."
)
return values
@property
def height(self) -> int:
"""Return segmap's height.
Return:
int: segmap height.
"""
return self.predictions.shape[0]
@property
def width(self) -> int:
"""Return segmap's width.
Return:
int: segmap width.
"""
return self.predictions.shape[1]
@property
def nb_classes(self) -> int:
"""Return the number of classes of the segmentation map (i.e. nb channels).
Return:
int: number of classes in the segmentation map.
"""
return self.predictions.shape[2]
def __eq__(self, other: object) -> bool:
"""Check if two SegmentationMap objects are equal.
Args:
other (object): Second object to compare.
Returns:
bool: Comparison result.
"""
if not isinstance(other, SegmentationMap):
return False
return self.index2class == other.index2class and np.allclose(self.predictions, other.predictions)
def index_of(self, class_name: str) -> int:
"""Get class index based on its name.
Args:
class_name (str): Class name
Raises:
ValueError: Index of a class
Returns:
int: Raised if `class_name` not found in `index2class` dictionary.
"""
for index, name i<fim_suffix>n self.index2class.items():
if name == class_name:
return index
raise ValueError(f"Index for the `{class_name}` not found")
def serialize(self) -> Dict[str, Any]:
"""Serialize SegmentationMap object.
Returns:
Dict[str, Any]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, Any]) -> SegmentationMap:
"""Deserialize SegmentationMap object.
Args:
data (Dict[str, Any]): Serialized object to dict.
Returns:
SegmentationMap: Deserialized object.
"""
return SegmentationMap(**data)
class GeometryMask(ImmutableModel):
"""Data holder for the geometry raster."""
pupil_mask: np.ndarray
iris_mask: np.ndarray
eyeball_mask: np.ndarray
_is_mask_2D = validator("*", allow_reuse=True)(v.is_array_n_dimensions(2))
_is_binary = validator("*", allow_reuse=True)(v.is_binary)
@property
def filled_eyeball_mask(self) -> np.ndarray:
"""Fill eyeball mask.
Returns:
np.ndarray: Eyeball mask with filled iris/pupil "holes".
"""
binary_maps = np.zeros(self.eyeball_mask.shape[:2], dtype=np.uint8)
binary_maps += self.pupil_mask
binary_maps += self.iris_mask
binary_maps += self.eyeball_mask
return binary_maps.astype(bool)
@property
def filled_iris_mask(self) -> np.ndarray:
"""Fill iris mask.
Returns:
np.ndarray: Iris mask with filled pupil "holes".
"""
binary_maps = np.zeros(self.iris_mask.shape[:2], dtype=np.uint8)
binary_maps += self.pupil_mask
binary_maps += self.iris_mask
return binary_maps.astype(bool)
def serialize(self) -> Dict[str, Any]:
"""Serialize GeometryMask object.
Returns:
Dict[str, Any]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, Any]) -> GeometryMask:
"""Deserialize GeometryMask object.
Args:
data (Dict[str, Any]): Serialized object to dict.
Returns:
GeometryMask: Deserialized object.
"""
return GeometryMask(**data)
class NoiseMask(ImmutableModel):
"""Data holder for the refined geometry masks."""
mask: np.ndarray
_is_mask_2D = validator("mask", allow_reuse=True)(v.is_array_n_dimensions(2))
_is_binary = validator("*", allow_reuse=True)(v.is_binary)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize NoiseMask object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, np.ndarray]) -> NoiseMask:
"""Deserialize NoiseMask object.
Args:
data (Dict[str, np.ndarray]): Serialized object to dict.
Returns:
NoiseMask: Deserialized object.
"""
return NoiseMask(**data)
class GeometryPolygons(ImmutableModel):
"""Data holder for the refined geometry polygons. Input np.ndarrays are mandatorily converted to np.float32 dtype for compatibility with some downstream tasks such as MomentsOfArea."""
pupil_array: np.ndarray
iris_array: np.ndarray
eyeball_array: np.ndarray
_is_list_of_points = validator("*", allow_reuse=True)(v.is_list_of_points)
_convert_dtype = validator("*", allow_reuse=True)(v.to_dtype_float32)
@property
def pupil_diameter(self) -> float:
"""Return pupil diameter.
Returns:
float: pupil diameter.
"""
return math.estimate_diameter(self.pupil_array)
@property
def iris_diameter(self) -> float:
"""Return iris diameter.
Returns:
float: iris diameter.
"""
return math.estimate_diameter(self.iris_array)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize GeometryPolygons object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
return {"pupil": self.pupil_array, "iris": self.iris_array, "eyeball": self.eyeball_array}
@staticmethod
def deserialize(data: Dict[str, np.ndarray]) -> GeometryPolygons:
"""Deserialize GeometryPolygons object.
Args:
data (Dict[str, np.ndarray]): Serialized object to dict.
Returns:
GeometryPolygons: Deserialized object.
"""
data = {"pupil_array": data["pupil"], "iris_array": data["iris"], "eyeball_array": data["eyeball"]}
return GeometryPolygons(**data)
class EyeOrientation(ImmutableModel):
"""Data holder for the eye orientation. The angle must be comprised between -pi/2 (included) and pi/2 (excluded)."""
angle: float = Field(..., ge=-np.pi / 2, lt=np.pi / 2)
def serialize(self) -> float:
"""Serialize EyeOrientation object.
Returns:
float: Serialized object.
"""
return self.angle
@staticmethod
def deserialize(data: float) -> EyeOrientation:
"""Deserialize EyeOrientation object.
Args:
data (float): Serialized object to float.
Returns:
EyeOrientation: Deserialized object.
"""
return EyeOrientation(angle=data)
class EyeCenters(ImmutableModel):
"""Data holder for eye's centers."""
pupil_x: float
pupil_y: float
iris_x: float
iris_y: float
@property
def center_distance(self) -> float:
"""Return distance between pupil and iris center.
Return:
float: center distance.
"""
return np.linalg.norm([self.iris_x - self.pupil_x, self.iris_y - self.pupil_y])
def serialize(self) -> Dict[str, Tuple[float]]:
"""Serialize EyeCenters object.
Returns:
Dict[str, Tuple[float]]: Serialized object.
"""
return {"iris_center": (self.iris_x, self.iris_y), "pupil_center": (self.pupil_x, self.pupil_y)}
@staticmethod
def deserialize(data: Dict[str, Tuple[float]]) -> EyeCenters:
"""Deserialize EyeCenters object.
Args:
data (Dict[str, Tuple[float]]): Serialized object to dict.
Returns:
EyeCenters: Deserialized object.
"""
data = {
"pupil_x": data["pupil_center"][0],
"pupil_y": data["pupil_center"][1],
"iris_x": data["iris_center"][0],
"iris_y": data["iris_center"][1],
}
return EyeCenters(**data)
class Offgaze(ImmutableModel):
"""Data holder for offgaze score."""
score: float = Field(..., ge=0.0, le=1.0)
def serialize(self) -> float:
"""Serialize Offgaze object.
Returns:
float: Serialized object.
"""
return self.score
@staticmethod
def deserialize(data: float) -> Offgaze:
"""Deserialize Offgaze object.
Args:
data (float): Serialized object to float.
Returns:
Offgaze: Deserialized object.
"""
return Offgaze(score=data)
class PupilToIrisProperty(ImmutableModel):
"""Data holder for pupil-ro-iris ratios."""
pupil_to_iris_diameter_ratio: float = Field(..., gt=0, lt=1)
pupil_to_iris_center_dist_ratio: float = Field(..., ge=0, lt=1)
def serialize(self) -> Dict[str, float]:
"""Serialize PupilToIrisProperty object.
Returns:
Dict[str, float]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, float]) -> PupilToIrisProperty:
"""Deserialize PupilToIrisProperty object.
Args:
data (Dict[str, float]): Serialized object to dict.
Returns:
PupilToIrisProperty: Deserialized object.
"""
return PupilToIrisProperty(**data)
class Landmarks(ImmutableModel):
"""Data holder for eye's landmarks."""
pupil_landmarks: np.ndarray
iris_landmarks: np.ndarray
eyeball_landmarks: np.ndarray
_is_list_of_points = validator("*", allow_reuse=True)(v.is_list_of_points)
def serialize(self) -> Dict[str, List[float]]:
"""Serialize Landmarks object.
Returns:
Dict[str, List[float]]: Serialized object.
"""
return {
"pupil": self.pupil_landmarks.tolist(),
"iris": self.iris_landmarks.tolist(),
"eyeball": self.eyeball_landmarks.tolist(),
}
@staticmethod
def deserialize(data: Dict[str, List[float]]) -> Landmarks:
"""Deserialize Landmarks object.
Args:
data (Dict[str, List[float]]): Serialized object to dict.
Returns:
Landmarks: Deserialized object.
"""
data = {
"pupil_landmarks": np.array(data["pupil"]),
"iris_landmarks": np.array(data["iris"]),
"eyeball_landmarks": np.array(data["eyeball"]),
}
return Landmarks(**data)
class BoundingBox(ImmutableModel):
"""Data holder for eye's bounding box."""
x_min: float
y_min: float
x_max: float
y_max: float
_is_valid_bbox = root_validator(pre=True, allow_reuse=True)(v.is_valid_bbox)
def serialize(self) -> Dict[str, float]:
"""Serialize BoundingBox object.
Returns:
Dict[str, float]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, float]) -> BoundingBox:
"""Deserialize BoundingBox object.
Args:
data (Dict[str, float]): Serialized object to dict.
Returns:
BoundingBox: Deserialized object.
"""
return BoundingBox(**data)
class NormalizedIris(ImmutableModel):
"""Data holder for the normalized iris images."""
normalized_image: np.ndarray
normalized_mask: np.ndarray
_is_array_2D = validator("*", allow_reuse=True)(v.is_array_n_dimensions(2))
_is_binary = validator("normalized_mask", allow_reuse=True)(v.is_binary)
_img_mask_shape_match = root_validator(pre=True, allow_reuse=True)(
v.are_shapes_equal("normalized_image", "normalized_mask")
)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize NormalizedIris object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, np.ndarray]) -> NormalizedIris:
"""Deserialize NormalizedIris object.
Args:
data (Dict[str, np.ndarray]): Serialized object to dict.
Returns:
NormalizedIris: Deserialized object.
"""
return NormalizedIris(**data)
class IrisFilterResponse(ImmutableModel):
"""Data holder for filter bank response with associated mask."""
iris_responses: List[np.ndarray]
mask_responses: List[np.ndarray]
_responses_mask_shape_match = root_validator(pre=True, allow_reuse=True)(
v.are_all_shapes_equal("iris_responses", "mask_responses")
)
def serialize(self) -> Dict[str, List[np.ndarray]]:
"""Serialize IrisFilterResponse object.
Returns:
Dict[str, List[np.ndarray]]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, List[np.ndarray]]) -> IrisFilterResponse:
"""Deserialize IrisFilterResponse object.
Args:
data (Dict[str, List[np.ndarray]]): Serialized object to dict.
Returns:
IrisFilterResponse: Deserialized object.
"""
return IrisFilterResponse(**data)
class IrisTemplate(ImmutableModel):
"""Data holder for final iris template with mask."""
iris_codes: List[np.ndarray]
mask_codes: List[np.ndarray]
_responses_mask_shape_match = root_validator(pre=True, allow_reuse=True)(
v.are_all_shapes_equal("iris_codes", "mask_codes")
)
_is_binary = validator("*", allow_reuse=True, each_item=True)(v.is_binary)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize IrisTemplate object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
stacked_iris_codes = np.stack(self.iris_codes)
stacked_iris_codes = stacked_iris_codes.transpose(1, 2, 0, 3)
stacked_mask_codes = np.stack(self.mask_codes)
stacked_mask_codes = stacked_mask_codes.transpose(1, 2, 0, 3)
return {
"iris_codes": stacked_iris_codes,
"mask_codes": stacked_mask_codes,
}
class EyeOcclusion(ImmutableModel):
"""Data holder for the eye occlusion."""
visible_fraction: float = Field(..., ge=-0.0, le=1.0)
def serialize(self) -> float:
"""Serialize EyeOcclusion object.
Returns:
float: Serialized object.
"""
return self.visible_fraction
@staticmethod
def deserialize(data: float) -> EyeOcclusion:
"""Deserialize EyeOcclusion object.
Args:
data (float): Serialized object to float.
Returns:
EyeOcclusion: Deserialized object.
"""
return EyeOcclusion(visible_fraction=data)
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/conv_filter_bank.py<fim_prefix>from typing import List, Tuple
import numpy as np
from pydantic import root_validator, validator
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import IrisFilterResponse, NormalizedIris
from iris.io.validators import are_lengths_equal, is_not_empty
from iris.nodes.iris_response.image_filters.gabor_filters import GaborFilter
from iris.nodes.iris_response.image_filters.image_filter_interface import ImageFilter
from iris.nodes.iris_response.probe_schemas.probe_schema_interface import ProbeSchema
from iris.nodes.iris_response.probe_schemas.regular_probe_schema import RegularProbeSchema
def polar_img_padding(img: np.ndarray, p_rows: int, p_cols: int) -> np.ndarray:
"""Apply zero-padding vertically and rotate-padding horizontally to a normalized image in polar coordinates.
Args:
img (np.ndarray): normalized image in polar coordinates.
p_rows (int): padding size on top and bottom.
p_cols (int): padding size on left and right.
Returns:
np.ndarray: padded image.
"""
i_rows, i_cols = img.shape
padded_image = np.zeros((i_rows + 2 * p_rows, i_cols + 2 * p_cols))
padded_image[p_rows : i_rows + p_rows, p_cols : i_cols + p_cols] = img
padded_image[p_rows : i_rows + p_rows, 0:p_cols] = img[:, -p_cols:]
padded_image[p_rows : i_rows + p_rows, -p_cols:] = img[:, 0:p_cols]
return padded_image
class ConvFilterBank(Algorithm):
"""Apply filter bank.
Algorithm steps:
1) Obtain filters and corresponding probe schemas.
2) Apply convolution to a given pair of normalized iris image using the filters and probe schemas.
3) Generate the iris response and corresponding mask response.
"""
class Parameters(Algorithm.Parameters):
"""Default ConvFilterBank parameters."""
filters: List[ImageFilter]
probe_schemas: List[ProbeSchema]
# Validators
_are_lengths_equal = root_validator(pre=True, allow_reuse=True)(are_lengths_equal("probe_schemas", "filters"))
_is_not_empty = validator("*", allow_reuse=True)(is_not_empty)
__parameters_type__ = Parameters
def __init__(
self,
filters: List[ImageFilter] = [
GaborFilter(
kernel_size=(41, 21),
sigma_phi=7,
sigma_rho=6.13,
theta_degrees=90.0,
lambda_phi=28,
dc_correction=True,
to_fixpoints=True,
),
GaborFilter(
kernel_size=(17, 21),
sigma_phi=2,
sigma_rho=5.86,
theta_degrees=90.0,
lambda_phi=8,
dc_correction=True,
to_fixpoints=True,
),
],
probe_schemas: List[ProbeSchema] = [
RegularProbeSchema(n_rows=16, n_cols=256),
RegularProbeSchema(n_rows=16, n_cols=256),
],
) -> None:
"""Assign parameters.
Args:
filters (List[ImageFilter]): List of image filters.
probe_schemas (List[ProbeSchema]): List of corresponding probe schemas.
"""
super().__init__(filters=filters, probe_schemas=probe_schemas)
def run(self, normalization_output: NormalizedIris) -> IrisFilterResponse:
"""Apply filters to a normalized iris image.
Args:
normalization_output (NormalizedIris): Output of the normalization process.
Returns:
IrisFilterResponse: filter responses.
"""
iris_responses: List[np.ndarray] = []
mask_responses: List[np.ndarray] = []
for i_filter, i_schema in <fim_suffix>zip(self.params.filters, self.params.probe_schemas):
iris_response, mask_response = self._convolve(i_filter, i_schema, normalization_output)
iris_responses.append(iris_response)
mask_responses.append(mask_response)
return IrisFilterResponse(iris_responses=iris_responses, mask_responses=mask_responses)
def _convolve(
self, img_filter: ImageFilter, probe_schema: ProbeSchema, normalization_output: NormalizedIris
) -> Tuple[np.ndarray, np.ndarray]:
"""Apply convolution to a given normalized iris image with the filter and probe schema.
Args:
img_filter (ImageFilter): filter used for convolution.
probe_schema (ProbeSchema): probe schema used for convolution.
normalization_output (NormalizedIris): Output of the normalization process.
Returns:
Tuple[np.ndarray, np.ndarray]: iris response and mask response.
"""
i_rows, i_cols = normalization_output.normalized_image.shape
k_rows, k_cols = img_filter.kernel_values.shape
p_rows = k_rows // 2
p_cols = k_cols // 2
iris_response = np.zeros((probe_schema.params.n_rows, probe_schema.params.n_cols), dtype=np.complex64)
mask_response = np.zeros((probe_schema.params.n_rows, probe_schema.params.n_cols))
padded_iris = polar_img_padding(normalization_output.normalized_image, 0, p_cols)
padded_mask = polar_img_padding(normalization_output.normalized_mask, 0, p_cols)
for i in range(probe_schema.params.n_rows):
for j in range(probe_schema.params.n_cols):
# Convert probe_schema position to integer pixel position.
pos = i * probe_schema.params.n_cols + j
r_probe = min(round(probe_schema.rhos[pos] * i_rows), i_rows - 1)
c_probe = min(round(probe_schema.phis[pos] * i_cols), i_cols - 1)
# Get patch from image centered at [i,j] probed pixel position.
rtop = max(0, r_probe - p_rows)
rbot = min(r_probe + p_rows + 1, i_rows - 1)
iris_patch = padded_iris[rtop:rbot, c_probe : c_probe + k_cols]
mask_patch = padded_mask[rtop:rbot, c_probe : c_probe + k_cols]
# Perform convolution at [i,j] probed pixel position.
ktop = p_rows - iris_patch.shape[0] // 2
iris_response[i][j] = (
(iris_patch * img_filter.kernel_values[ktop : ktop + iris_patch.shape[0], :]).sum()
/ iris_patch.shape[0]
/ k_cols
)
mask_response[i][j] = (
0 if iris_response[i][j] == 0 else (mask_patch.sum() / iris_patch.shape[0] / k_cols)
)
return iris_response, mask_response
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/callbacks/pipeline_trace.py<fim_prefix>from __future__ import annotations
from typing import Any, Dict, Iterable, List, Optional
from iris.callbacks.callback_interface import Callback
from iris.io.class_configs import Algorithm
from iris.orchestration.pipeline_dataclasses import PipelineNode
class PipelineCallTraceStorageError(Exception):
"""PipelineCallTraceStorage error class."""
pass
class PipelineCallTraceStorage:
"""A storage object for pipeline input, intermediate and final results."""
INPUT_KEY_NAME = "input"
ERROR_KEY_NAME = "error"
def __init__(self, results_names: Iterable[str]) -> None:
"""Assign parameters.
Args:
results_names (Iterable[str]): Create list of available keys in the storage.
"""
self._storage = self._init_storage(results_names)
def __getitem__(self, result_name: str) -> Any:
"""Get result_name result.
Args:
result_name (str): Result name.
Raises:
PipelineCallTraceStorageError: Raised if result_name is not found.
Returns:
Any: Result object.
"""
return self.get(result_name)
def __len__(self) -> int:
"""Get storage capacity.
Returns:
int: Storage capacity
"""
return len(self._storage.keys())
def get(self, result_name: str) -> Any:
"""Get result_name result.
Args:
result_name (str): Result name.
Raises:
PipelineCallTraceStorageError: Raised if result_name is not found.
Returns:
Any: Result object.
"""
if result_name not in self._storage.keys():
raise PipelineCallTraceStorageError(f"Unknown result name: {result_name}")
return self._storage[result_name]
def get_input(self) -> Any:
"""Return pipeline input.
Returns:
Any: Input to pipeline.
"""
return self.get(PipelineCallTraceStorage.INPUT_KEY_NAME)
def get_error(self) -> Optional[Exception]:
"""Return stored error.
Returns:
Optional[Exception]: error.
"""
return self.get(PipelineCallTraceStorage.ERROR_KEY_NAME)
def write(self, result_name: str, result: Any) -> None:
"""Write a result to a storage saved under the name `result_name`.
Args:
result_name (str): Result name.
result (Any): Result reference to save.
"""
self._storage[result_name] = result
def write_input(self, in_value: Any) -> None:
"""Save `in_value` in storage.
Args:
in_value (Any): Input value.
"""
self._storage[PipelineCallTraceStorage.INPUT_KEY_NAME] = in_value
def write_error(self, error: Exception) -> None:
"""Save `error` in storage.
Args:
error (Exception): error to store.
"""
self._storage[PipelineCallTraceStorage.ERROR_KEY_NAME] = error
def clean(self) -> None:
"""Clean storage by setting all result references to None."""
for result_name in self._storage.<fim_suffix>keys():
self._storage[result_name] = None
def _init_storage(self, results_names: Iterable[str]) -> Dict[str, None]:
"""Initialize storage (dict) with proper names and None values as results.
Args:
results_names (Iterable[str]): Result names.
Returns:
Dict[str, None]: Storage dictionary.
"""
storage = {name: None for name in results_names}
storage[PipelineCallTraceStorage.INPUT_KEY_NAME] = None
storage[PipelineCallTraceStorage.ERROR_KEY_NAME] = None
return storage
@staticmethod
def initialise(nodes: Dict[str, Algorithm], pipeline_nodes: List[PipelineNode]) -> PipelineCallTraceStorage:
"""Instantiate mechanisms for intermediate results tracing.
Args:
nodes (Dict[str, Algorithm]): Mapping between nodes names and the corresponding instanciated nodes.
pipeline_nodes (List[PipelineNode]): List of nodes as declared in the input config. Not used in this function.
Returns:
PipelineCallTraceStorage: Pipeline intermediate and final results storage.
"""
call_trace = PipelineCallTraceStorage(results_names=nodes.keys())
for algorithm_name, algorithm_object in nodes.items():
algorithm_object._callbacks.append(NodeResultsWriter(call_trace, algorithm_name))
return call_trace
class NodeResultsWriter(Callback):
"""A node call results writer Callback class."""
def __init__(self, trace_storage_reference: PipelineCallTraceStorage, result_name: str) -> None:
"""Assign parameters.
Args:
trace_storage_reference (PipelineCallTraceStorage): Storage object reference to write.
result_name (str): Result name under which result should be written.
"""
self._trace_storage_reference = trace_storage_reference
self._result_name = result_name
def on_execute_end(self, result: Any) -> None:
"""Write on node execution end.
Args:
result (Any): Result of node call.
"""
self._trace_storage_reference.write(self._result_name, result)
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/geometry_refinement/contour_interpolation.py<fim_prefix>from typing import List
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import GeometryPolygons
class ContourInterpolation(Algorithm):
"""Implementation of contour interpolation algorithm conditioned by given NoiseMask.
Algorithm performs linar interpolation of points between vectorized, predicted points such that maximum distance between two consecutive points in a polygon isn't greater than
a fraction of a iris diameter length specified as `max_distance_between_boundary_points` parameter.
"""
class Parameters(Algorithm.Parameters):
"""Parameters class for ContourInterpolation objects."""
max_distance_between_boundary_points: float = Field(..., gt=0.0, lt=1.0)
__parameters_type__ = Parameters
def __init__(self, max_distance_between_boundary_points: float = 0.01) -> None:
"""Assign parameters.
Args:
max_distance_between_boundary_points (float, optional): Maximum distance between boundary contour points expressed as a fraction of a iris diameter length. Defaults to 0.01.
"""
super().__init__(max_distance_between_boundary_points=max_distance_between_boundary_points)
def run(self, polygons: GeometryPolygons) -> GeometryPolygons:
"""Refine polygons by interpolating contour points.
Args:
polygons (GeometryPolygons): Polygons to refine.
Returns:
GeometryPolygons: Refined polygons.
"""
max_boundary_dist_in_px = self.params.max_distance_between_boundary_points * polygons.iris_diameter
refined_pupil_array = self._interpolate_polygon_points(polygons.pupil_array, max_boundary_dist_in_px)
refined_iris_array = self._interpolate_polygon_points(polygons.iris_array, max_boundary_dist_in_px)
refined_eyeball_array = self._interpolate_polygon_points(polygons.eyeball_array, max_boundary_dist_in_px)
return GeometryPolygons(
pupil_array=refined_pupil_array,
iris_array=refined_iris_array,
eyeball_array=refined_eyeball_array,
)
def _interpolate_polygon_points(self, polygon: np.ndarray, max_distance_between_points_px: float) -> np.ndarray:
"""Interpolate contours points, so that the distance between two is no greater than `self.params.max_distance_between_boundary_points` in pixel space.
Args:
polygon (np.ndarray): Contour polygons.
max_distance_between_points_px (float): `self.params.max_distance_between_boundary_points` expressed in pixel length relative to iris diameter.
Returns:
np.ndarray: Interpolated polygon points.
"""
previous_boundary = np.roll(polygon, shift=1, axis=0)
distances = np.linalg.norm(polygon - previous_boundary, axis=1)
num_points = np.ceil(distances / max_distance_between_points_px).astype(int)
x: List[np.ndarray] = []
y: List[np.ndarray] = []
for (x1, y1), (x2,<fim_suffix> y2), num_point in zip(previous_boundary, polygon, num_points):
x.append(np.linspace(x1, x2, num=num_point, endpoint=False))
y.append(np.linspace(y1, y2, num=num_point, endpoint=False))
new_boundary = np.stack([np.concatenate(x), np.concatenate(y)], axis=1)
_, indices = np.unique(new_boundary, axis=0, return_index=True)
new_boundary = new_boundary[np.sort(indices)]
return new_boundary
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/perspective_normalization.py<fim_prefix>from typing import Collection, List, Tuple
import cv2
import numpy as np
from pydantic import Field, validator
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeOrientation, GeometryPolygons, IRImage, NoiseMask, NormalizedIris
from iris.io.errors import NormalizationError
from iris.nodes.normalization.common import correct_orientation, generate_iris_mask, interpolate_pixel_intensity
class PerspectiveNormalization(Algorithm):
"""Implementation of a normalization algorithm which uses perspective transformation to map image pixels.
Algorithm steps:
1) Create a grid of trapezoids around iris in original image based on following algorithm parameters: res_in_phi, res_in_r, intermediate_radiuses.
2) Create a grid of corresponding to each trapezoid rectangles in normalized image.
3) For each corresponding trapezoid, rectangle pair compute perspective matrix to estimate normalized image pixel location in an original image location.
4) Map each normalized image pixel to original image pixel based on estimated perspective matrix and perform bilinear interpolation if necessary.
"""
class Parameters(Algorithm.Parameters):
"""Parameters class for PerspectiveNormalization."""
res_in_phi: int = Field(..., gt=0)
res_in_r: int = Field(..., gt=0)
skip_boundary_points: int = Field(..., gt=0)
intermediate_radiuses: Collection[float]
oversat_threshold: int = Field(..., gt=0)
@validator("intermediate_radiuses")
def check_intermediate_radiuses(cls: type, v: Collection[float]) -> Collection[float]:
"""Check intermediate_radiuses parameter.
Args:
cls (type): PerspectiveNormalization.Parameters class.
v (Collection[float]): Variable value to check.
Raises:
NormalizationError: Raised if number of radiuses is invalid or min value is less then 0.0 or greater than 1.0.
Returns:
Collection[float]: intermediate_radiuses value passed for further processing.
"""
if len(v) < 2:
raise NormalizationError(f"Invalid number of intermediate_radiuses: {len(v)}.")
if min(v) < 0.0:
raise NormalizationError(f"Invalid min value of intermediate_radiuses: {min(v)}.")
if max(v) > 1.0:
raise NormalizationError(f"Invalid max value of intermediate_radiuses: {max(v)}.")
return v
__parameters_type__ = Parameters
def __init__(
self,
res_in_phi: int = 512,
res_in_r: int = 128,
skip_boundary_points: int = 1,
intermediate_radiuses: Collection[float] = np.linspace(0.0, 1.0, 8),
oversat_threshold: int = 254,
) -> None:
"""Assign parameters.
Args:
res_in_phi (int): Normalized image phi resolution. Defaults to 512.
res_in_r (int): Normalized image r resolution. Defaults to 128.
skip_boundary_points (int, optional): Take every nth point from estimated boundaries when generating correspondences.
Defaults to 1.
intermediate_radiuses (t.Iterable[float], optional): Intermediate rings radiuses used to generate additional points for estimating transformations.
Defaults to np.linspace(0.0, 1.0, 8).
oversat_threshold (int, optional): threshold for masking over-satuated pixels. Defaults to 254.
"""
super().__init__(
res_in_phi=res_in_phi,
res_in_r=res_in_r,
skip_boundary_points=skip_boundary_points,
intermediate_radiuses=intermediate_radiuses,
oversat_threshold=oversat_threshold,
)
def run(
self,
image: IRImage,
noise_mask: NoiseMask,
extrapolated_contours: GeometryPolygons,
eye_orientation: EyeOrientation,
) -> NormalizedIris:
"""Normalize iris using perspective transformation estimated for every region of an image separately.
Args:
image (IRImage): Input image to normalize.
noise_mask (NoiseMask): Noise mask.
extrapolated_contours (GeometryPolygons): Extrapolated contours.
eye_orientation (EyeOrientation): Eye orientation angle.
Returns:
NormalizedIris: NormalizedIris object containing normalized image and iris mask.
"""
if len(extrapolated_contours.pupil_array) != len(extrapolated_contours.iris_array):
raise NormalizationError("Extrapolated amount of iris and pupil points must be the same.")
pupil_points, iris_points = correct_orientation(
extrapolated_contours.pupil_array,
extrapolated_contours.iris_array,
eye_orientation.angle,
)
iris_mask = generate_iris_mask(extrapolated_contours, noise_mask.mask)
iris_mask[image.img_data >= self.params.oversat_threshold] = False
src_points, dst_points = self._generate_correspondences(pupil_points, iris_points)
normalized_iris = NormalizedIris(
normalized_image=np.zeros((self.params.res_in_r, self.params.res_in_phi), dtype=np.float32),
normalized_mask=np.zeros((self.params.res_in_r, self.params.res_in_phi), dtype=bool),
)
for angle_point_idx in range(src_points.shape[1] - 1):
for ring_idx in range(src_points.shape[0] - 1):
current_src, current_dst = self._correspondence_rois_coords(
angle_idx=angle_point_idx,
ring_idx=ring_idx,
src_points=src_points,
dst_points=dst_points,
)
xmin, ymin, xmax, ymax = self._bbox_coords(current_dst)
normalized_image_roi, normalized_mask_roi = self._normalize_roi(
original_image=image.img_data,
iris_mask=iris_mask,
src_points=current_src.astype(np.float32),
dst_points=current_dst.astype(np.float32),
normalize_roi_output_shape=(ymax - ymin, xmax - xmin),
)
normalized_iris.normalized_image[ymin:ymax, xmin:xmax] = normalized_image_roi
normalized_iris.normalized_mask[ymin:ymax, xmin:xmax] = normalized_mask_roi
return normalized_iris
def _generate_correspondences(
self, pupil_points: np.ndarray, iris_points: np.ndarray
) -> Tuple[np.ndarray, np.ndarray]:
"""Generate correspondences between points in original image and normalized image.
Args:
pupil_points (np.ndarray): Pupil bounding points. NumPy array of shape (num_points = 512, xy_coords = 2).
iris_points (np.ndarray): Iris bounding points. NumPy array of shape (num_points = 512, xy_coords = 2).
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with generated correspondences.
"""
pupil_points = pupil_points[:: self.params.skip_boundary_points]
iris_points = iris_points[:: self.params.skip_boundary_points]
src_points = []
for radius in <fim_suffix>self.params.intermediate_radiuses:
ring = pupil_points + radius * (iris_points - pupil_points)
ring = np.vstack([ring, ring[0]])
src_points.append(ring)
src_points = np.array(src_points)
num_rings, num_ring_points = src_points.shape[:2]
dst_xs, dst_ys = np.meshgrid(
np.linspace(0, self.params.res_in_phi, num_ring_points).astype(int),
np.linspace(0, self.params.res_in_r, num_rings).astype(int),
)
dst_points = np.array([dst_xs, dst_ys]).transpose((1, 2, 0))
return src_points, dst_points
def _normalize_roi(
self,
original_image: np.ndarray,
iris_mask: np.ndarray,
src_points: np.ndarray,
dst_points: np.ndarray,
normalize_roi_output_shape: Tuple[float, float],
) -> Tuple[np.ndarray, np.ndarray]:
"""Normalize a single ROI of an image.
Args:
original_image (np.ndarray): Entire input image to normalize.
iris_mask (np.ndarray): Iris class segmentation mask.
src_points (np.ndarray): ROI's original input image points.
dst_points (np.ndarray): ROI's normalized image points.
normalize_roi_output_shape (t.Tuple[float, float]): Output shape of normalized ROI.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with normalized image and mask ROIs.
"""
xmin, ymin, xmax, ymax = self._bbox_coords(dst_points)
normalize_image_xs = np.arange(xmin, xmax)
normalize_image_ys = np.arange(ymin, ymax)
normalize_image_points = np.meshgrid(normalize_image_xs, normalize_image_ys)
normalize_image_points = self.cartesian2homogeneous(normalize_image_points)
perspective_mat = cv2.getPerspectiveTransform(dst_points, src_points)
mapped_points = np.matmul(perspective_mat, normalize_image_points)
mapped_points = self.homogeneous2cartesian(mapped_points)
normalized_image_roi = np.zeros(normalize_roi_output_shape, dtype=np.float32)
normalized_mask_roi = np.zeros(normalize_roi_output_shape, dtype=bool)
for image_xy, normalized_xy in zip(mapped_points.T, normalize_image_points.T[..., :2]):
norm_x, norm_y = normalized_xy.astype(int)
shifted_y, shifted_x = norm_y - ymin, norm_x - xmin
normalized_image_roi[shifted_y, shifted_x] = interpolate_pixel_intensity(
original_image, pixel_coords=image_xy
)
try:
img_x, img_y = map(int, image_xy)
normalized_mask_roi[shifted_y, shifted_x] = iris_mask[img_y, img_x]
except IndexError:
normalized_mask_roi[shifted_y, shifted_x] = False
return normalized_image_roi / 255.0, normalized_mask_roi
def _bbox_coords(self, norm_dst_points: np.ndarray) -> Tuple[int, int, int, int]:
"""Extract the bounding box of currently processed normalized image ROI.
Args:
norm_dst_points (np.ndarray): Normalized image ROI coordinates.
Returns:
Tuple[int, int, int, int]: Bounding box coordinates in form (xmin, ymin, xmax, ymax).
"""
xmin, ymin = norm_dst_points[0].astype(int)
xmax, ymax = norm_dst_points[-1].astype(int)
return (xmin, ymin, xmax, ymax)
def _correspondence_rois_coords(
self,
angle_idx: int,
ring_idx: int,
src_points: np.ndarray,
dst_points: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray]:
"""Generate a single correspondence ROIs between original image and normalized one based on angle index and ring index.
Args:
angle_idx (int): Boundary point angle index.
ring_idx (int): Intermediate ring index.
src_points (np.ndarray): All mapping points from an original image.
NumPy array of shape (
num_intermediate_rings = self.intermediate_radiuses,
num_boundary_points = 512 // self.skip_boundary_points,
xy_coords = 2
).
dst_points (np.ndarray): All mapping points from an normalized image.
NumPy array of shape (
num_intermediate_rings = self.intermediate_radiuses,
num_boundary_points = 512 // self.skip_boundary_points,
xy_coords = 2
).
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with extracted from src_points and dst_points ROIs.
"""
src_roi = src_points[ring_idx : ring_idx + 2, angle_idx : angle_idx + 2]
dst_roi = dst_points[ring_idx : ring_idx + 2, angle_idx : angle_idx + 2]
return src_roi.reshape(4, 2), dst_roi.reshape(4, 2)
@staticmethod
def cartesian2homogeneous(points: List[np.ndarray]) -> np.ndarray:
"""Convert points in cartesian coordinates to homogeneous coordinates.
Args:
points (List[np.ndarray]): Points in cartesian coordinates. Array should be in format: [[x values], [y values]].
Returns:
np.ndarray: Points in homogeneous coordinates. Returned array will have format: [[x values], [y values], [1 ... 1]].
"""
x_coords, y_coords = points
x_coords = x_coords.reshape(-1, 1)
y_coords = y_coords.reshape(-1, 1)
homogeneous_coords = np.hstack([x_coords, y_coords, np.ones((len(x_coords), 1))])
return homogeneous_coords.T
@staticmethod
def homogeneous2cartesian(points: np.ndarray) -> np.ndarray:
"""Convert points in homogeneous coordinates to cartesian coordinates.
Args:
points (np.ndarray): Points in homogeneous coordinates. Array should be in format: [[x values], [y values], [perspective scale values]].
Returns:
np.ndarray: Points in cartesian coordinates. Returned array will have format: [[x values], [y values]].
"""
points /= points[-1]
points = points[:2]
return points
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/perspective_normalization.py<fim_prefix>from typing import Collection, List, Tuple
import cv2
import numpy as np
from pydantic import Field, validator
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeOrientation, GeometryPolygons, IRImage, NoiseMask, NormalizedIris
from iris.io.errors import NormalizationError
from iris.nodes.normalization.common import correct_orientation, generate_iris_mask, interpolate_pixel_intensity
class PerspectiveNormalization(Algorithm):
"""Implementation of a normalization algorithm which uses perspective transformation to map image pixels.
Algorithm steps:
1) Create a grid of trapezoids around iris in original image based on following algorithm parameters: res_in_phi, res_in_r, intermediate_radiuses.
2) Create a grid of corresponding to each trapezoid rectangles in normalized image.
3) For each corresponding trapezoid, rectangle pair compute perspective matrix to estimate normalized image pixel location in an original image location.
4) Map each normalized image pixel to original image pixel based on estimated perspective matrix and perform bilinear interpolation if necessary.
"""
class Parameters(Algorithm.Parameters):
"""Parameters class for PerspectiveNormalization."""
res_in_phi: int = Field(..., gt=0)
res_in_r: int = Field(..., gt=0)
skip_boundary_points: int = Field(..., gt=0)
intermediate_radiuses: Collection[float]
oversat_threshold: int = Field(..., gt=0)
@validator("intermediate_radiuses")
def check_intermediate_radiuses(cls: type, v: Collection[float]) -> Collection[float]:
"""Check intermediate_radiuses parameter.
Args:
cls (type): PerspectiveNormalization.Parameters class.
v (Collection[float]): Variable value to check.
Raises:
NormalizationError: Raised if number of radiuses is invalid or min value is less then 0.0 or greater than 1.0.
Returns:
Collection[float]: intermediate_radiuses value passed for further processing.
"""
if len(v) < 2:
raise NormalizationError(f"Invalid number of intermediate_radiuses: {len(v)}.")
if min(v) < 0.0:
raise NormalizationError(f"Invalid min value of intermediate_radiuses: {min(v)}.")
if max(v) > 1.0:
raise NormalizationError(f"Invalid max value of intermediate_radiuses: {max(v)}.")
return v
__parameters_type__ = Parameters
def __init__(
self,
res_in_phi: int = 512,
res_in_r: int = 128,
skip_boundary_points: int = 1,
intermediate_radiuses: Collection[float] = np.linspace(0.0, 1.0, 8),
oversat_threshold: int = 254,
) -> None:
"""Assign parameters.
Args:
res_in_phi (int): Normalized image phi resolution. Defaults to 512.
res_in_r (int): Normalized image r resolution. Defaults to 128.
skip_boundary_points (int, optional): Take every nth point from estimated boundaries when generating correspondences.
Defaults to 1.
intermediate_radiuses (t.Iterable[float], optional): Intermediate rings radiuses used to generate additional points for estimating transformations.
Defaults to np.linspace(0.0, 1.0, 8).
oversat_threshold (int, optional): threshold for masking over-satuated pixels. Defaults to 254.
"""
super().__init__(
res_in_phi=res_in_phi,
res_in_r=res_in_r,
skip_boundary_points=skip_boundary_points,
intermediate_radiuses=intermediate_radiuses,
oversat_threshold=oversat_threshold,
)
def run(
self,
image: IRImage,
noise_mask: NoiseMask,
extrapolated_contours: GeometryPolygons,
eye_orientation: EyeOrientation,
) -> NormalizedIris:
"""Normalize iris using perspective transformation estimated for every region of an image separately.
Args:
image (IRImage): Input image to normalize.
noise_mask (NoiseMask): Noise mask.
extrapolated_contours (GeometryPolygons): Extrapolated contours.
eye_orientation (EyeOrientation): Eye orientation angle.
Returns:
NormalizedIris: NormalizedIris object containing normalized image and iris mask.
"""
if len(extrapolated_contours.pupil_array) != len(extrapolated_contours.iris_array):
raise NormalizationError("Extrapolated amount of iris and pupil points must be the same.")
pupil_points, iris_points = correct_orientation(
extrapolated_contours.pupil_array,
extrapolated_contours.iris_array,
eye_orientation.angle,
)
iris_mask = generate_iris_mask(extrapolated_contours, noise_mask.mask)
iris_mask[image.img_data >= self.params.oversat_threshold] = False
src_points, dst_points = self._generate_correspondences(pupil_points, iris_points)
normalized_iris = NormalizedIris(
normalized_image=np.zeros((self.params.res_in_r, self.params.res_in_phi), dtype=np.float32),
normalized_mask=np.zeros((self.params.res_in_r, self.params.res_in_phi), dtype=bool),
)
for angle_point_idx in range(src_points.shape[1] - 1)<fim_suffix>:
for ring_idx in range(src_points.shape[0] - 1):
current_src, current_dst = self._correspondence_rois_coords(
angle_idx=angle_point_idx,
ring_idx=ring_idx,
src_points=src_points,
dst_points=dst_points,
)
xmin, ymin, xmax, ymax = self._bbox_coords(current_dst)
normalized_image_roi, normalized_mask_roi = self._normalize_roi(
original_image=image.img_data,
iris_mask=iris_mask,
src_points=current_src.astype(np.float32),
dst_points=current_dst.astype(np.float32),
normalize_roi_output_shape=(ymax - ymin, xmax - xmin),
)
normalized_iris.normalized_image[ymin:ymax, xmin:xmax] = normalized_image_roi
normalized_iris.normalized_mask[ymin:ymax, xmin:xmax] = normalized_mask_roi
return normalized_iris
def _generate_correspondences(
self, pupil_points: np.ndarray, iris_points: np.ndarray
) -> Tuple[np.ndarray, np.ndarray]:
"""Generate correspondences between points in original image and normalized image.
Args:
pupil_points (np.ndarray): Pupil bounding points. NumPy array of shape (num_points = 512, xy_coords = 2).
iris_points (np.ndarray): Iris bounding points. NumPy array of shape (num_points = 512, xy_coords = 2).
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with generated correspondences.
"""
pupil_points = pupil_points[:: self.params.skip_boundary_points]
iris_points = iris_points[:: self.params.skip_boundary_points]
src_points = []
for radius in self.params.intermediate_radiuses:
ring = pupil_points + radius * (iris_points - pupil_points)
ring = np.vstack([ring, ring[0]])
src_points.append(ring)
src_points = np.array(src_points)
num_rings, num_ring_points = src_points.shape[:2]
dst_xs, dst_ys = np.meshgrid(
np.linspace(0, self.params.res_in_phi, num_ring_points).astype(int),
np.linspace(0, self.params.res_in_r, num_rings).astype(int),
)
dst_points = np.array([dst_xs, dst_ys]).transpose((1, 2, 0))
return src_points, dst_points
def _normalize_roi(
self,
original_image: np.ndarray,
iris_mask: np.ndarray,
src_points: np.ndarray,
dst_points: np.ndarray,
normalize_roi_output_shape: Tuple[float, float],
) -> Tuple[np.ndarray, np.ndarray]:
"""Normalize a single ROI of an image.
Args:
original_image (np.ndarray): Entire input image to normalize.
iris_mask (np.ndarray): Iris class segmentation mask.
src_points (np.ndarray): ROI's original input image points.
dst_points (np.ndarray): ROI's normalized image points.
normalize_roi_output_shape (t.Tuple[float, float]): Output shape of normalized ROI.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with normalized image and mask ROIs.
"""
xmin, ymin, xmax, ymax = self._bbox_coords(dst_points)
normalize_image_xs = np.arange(xmin, xmax)
normalize_image_ys = np.arange(ymin, ymax)
normalize_image_points = np.meshgrid(normalize_image_xs, normalize_image_ys)
normalize_image_points = self.cartesian2homogeneous(normalize_image_points)
perspective_mat = cv2.getPerspectiveTransform(dst_points, src_points)
mapped_points = np.matmul(perspective_mat, normalize_image_points)
mapped_points = self.homogeneous2cartesian(mapped_points)
normalized_image_roi = np.zeros(normalize_roi_output_shape, dtype=np.float32)
normalized_mask_roi = np.zeros(normalize_roi_output_shape, dtype=bool)
for image_xy, normalized_xy in zip(mapped_points.T, normalize_image_points.T[..., :2]):
norm_x, norm_y = normalized_xy.astype(int)
shifted_y, shifted_x = norm_y - ymin, norm_x - xmin
normalized_image_roi[shifted_y, shifted_x] = interpolate_pixel_intensity(
original_image, pixel_coords=image_xy
)
try:
img_x, img_y = map(int, image_xy)
normalized_mask_roi[shifted_y, shifted_x] = iris_mask[img_y, img_x]
except IndexError:
normalized_mask_roi[shifted_y, shifted_x] = False
return normalized_image_roi / 255.0, normalized_mask_roi
def _bbox_coords(self, norm_dst_points: np.ndarray) -> Tuple[int, int, int, int]:
"""Extract the bounding box of currently processed normalized image ROI.
Args:
norm_dst_points (np.ndarray): Normalized image ROI coordinates.
Returns:
Tuple[int, int, int, int]: Bounding box coordinates in form (xmin, ymin, xmax, ymax).
"""
xmin, ymin = norm_dst_points[0].astype(int)
xmax, ymax = norm_dst_points[-1].astype(int)
return (xmin, ymin, xmax, ymax)
def _correspondence_rois_coords(
self,
angle_idx: int,
ring_idx: int,
src_points: np.ndarray,
dst_points: np.ndarray,
) -> Tuple[np.ndarray, np.ndarray]:
"""Generate a single correspondence ROIs between original image and normalized one based on angle index and ring index.
Args:
angle_idx (int): Boundary point angle index.
ring_idx (int): Intermediate ring index.
src_points (np.ndarray): All mapping points from an original image.
NumPy array of shape (
num_intermediate_rings = self.intermediate_radiuses,
num_boundary_points = 512 // self.skip_boundary_points,
xy_coords = 2
).
dst_points (np.ndarray): All mapping points from an normalized image.
NumPy array of shape (
num_intermediate_rings = self.intermediate_radiuses,
num_boundary_points = 512 // self.skip_boundary_points,
xy_coords = 2
).
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with extracted from src_points and dst_points ROIs.
"""
src_roi = src_points[ring_idx : ring_idx + 2, angle_idx : angle_idx + 2]
dst_roi = dst_points[ring_idx : ring_idx + 2, angle_idx : angle_idx + 2]
return src_roi.reshape(4, 2), dst_roi.reshape(4, 2)
@staticmethod
def cartesian2homogeneous(points: List[np.ndarray]) -> np.ndarray:
"""Convert points in cartesian coordinates to homogeneous coordinates.
Args:
points (List[np.ndarray]): Points in cartesian coordinates. Array should be in format: [[x values], [y values]].
Returns:
np.ndarray: Points in homogeneous coordinates. Returned array will have format: [[x values], [y values], [1 ... 1]].
"""
x_coords, y_coords = points
x_coords = x_coords.reshape(-1, 1)
y_coords = y_coords.reshape(-1, 1)
homogeneous_coords = np.hstack([x_coords, y_coords, np.ones((len(x_coords), 1))])
return homogeneous_coords.T
@staticmethod
def homogeneous2cartesian(points: np.ndarray) -> np.ndarray:
"""Convert points in homogeneous coordinates to cartesian coordinates.
Args:
points (np.ndarray): Points in homogeneous coordinates. Array should be in format: [[x values], [y values], [perspective scale values]].
Returns:
np.ndarray: Points in cartesian coordinates. Returned array will have format: [[x values], [y values]].
"""
points /= points[-1]
points = points[:2]
return points
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/encoder/iris_encoder.py<fim_prefix>from typing import List
import numpy as np
from pydantic import Field
from iris.callbacks.callback_interface import Callback
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import IrisFilterResponse, IrisTemplate
class IrisEncoder(Algorithm):
"""Binarize IrisFilterResponse to generate iris code using Daugman's method.
Algorithm steps:
1) Binarize iris response by comparing real and imaginary parts to zero.
2) Binarize mask response by comparing real and imaginary parts to a given parameter: mask_threshold.
Reference:
[1] https://www.robots.ox.ac.uk/~az/lectures/est/iris.pdf.
"""
class Parameters(Algorithm.Parameters):
"""IrisEncoder parameters."""
mask_threshold: float = Field(..., ge=0.0, le=1.0)
__parameters_type__ = Parameters
def __init__(self, mask_threshold: float = 0.9, callbacks: List[Callback] = []) -> None:
"""Assign parameters.
Args:
mask_threshold (float): threshold to binarize mask_responses, in the range of [0,1]. Defaults to 0.9.
callbacks (List[Callback]): callbacks list. Defaults to [].
"""
super().__init__(mask_threshold=mask_threshold, callbacks=callbacks)
def run(self, response: IrisFilterResponse) -> IrisTemplate:
"""Encode iris code and mask code.
Args:
response (IrisFilterResponse): Filter responses.
Returns:
IrisTemplate: Final iris template.
"""
iris_codes: List[np.ndarray] = []
mask_codes: List[np.ndarray] = []
for iris_response, mask_response in zip(response.iris_responses, response.mask_re<fim_suffix>sponses):
mask_code = mask_response >= self.params.mask_threshold
iris_code = np.stack([iris_response.real > 0, iris_response.imag > 0], axis=-1)
mask_code = np.stack([mask_code, mask_code], axis=-1)
iris_codes.append(iris_code)
mask_codes.append(mask_code)
return IrisTemplate(iris_codes=iris_codes, mask_codes=mask_codes)
<fim_middle> | null | FOR | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/image_filters/gabor_filters.py<fim_prefix>from typing import Any, Dict, Tuple
import numpy as np
from pydantic import Field, conint, root_validator, validator
import iris.io.validators as pydantic_v
from iris.io.errors import ImageFilterError
from iris.nodes.iris_response.image_filters.image_filter_interface import ImageFilter
def upper_bound_Gabor_parameters(cls: type, values: Dict[str, Any]) -> Dict[str, Any]:
"""Check upper bounds of Gabor filter parameters such as sigma_phi, sigma_rho and lambda_phi for the given kernel_size.
Args:
cls (type): class type.
values (Dict[str, Any]): values to be checked.
Raises:
ImageFilterError: Raised if 1) sigma_phi is greater than kernel_size[0], 2) sigma_rho is greater than kernel_size[1], 3) lambda_phi greater than kernel_size[0].
Returns:
Dict[str, Any]: values of checked parameters.
"""
kernel_size, sigma_phi, sigma_rho, lambda_phi = (
values["kernel_size"],
values["sigma_phi"],
values["sigma_rho"],
values["lambda_phi"],
)
if sigma_phi >= kernel_size[0]:
raise ImageFilterError("Invalid parameters: sigma_phi can not be greater than kernel_size[0].")
if sigma_rho >= kernel_size[1]:
raise ImageFilterError("Invalid parameters: sigma_rho can not be greater than kernel_size[1].")
if lambda_phi >= kernel_size[0]:
raise ImageFilterError("Invalid parameters: lambda_phi can not be greater than kernel_size[0].")
return values
def upper_bound_LogGabor_parameters(cls: type, values: Dict[str, Any]) -> Dict[str, Any]:
"""Check upper bound of LogGabor filter parameter lambda_rho for the given kernel_size.
Args:
cls (type): class type.
values (Dict[str, Any]): values to be checked.
Raises:
ImageFilterError: lambda_phi can not be greater than kernel_size[1].
Returns:
Dict[str, Any]: values of checked parameters.
"""
kernel_size, lambda_rho = values["kernel_size"], values["lambda_rho"]
if lambda_rho >= kernel_size[1]:
raise ImageFilterError("Invalid parameters: lambda_rho can not be greater than kernel_size[1].")
return values
def get_xy_mesh(kernel_size: Tuple[int, int]) -> Tuple[np.ndarray, np.ndarray]:
"""Get (x,y) meshgrids for a given kernel size.
Args:
kernel_size (Tuple[int, int]): Kernel width and height.
Returns:
Tuple[np.ndarray, np.ndarray]: meshgrid of (x, y) positions.
"""
ksize_phi_half = kernel_size[0] // 2
ksize_rho_half = kernel_size[1] // 2
y, x = np.meshgrid(
np.arange(-ksize_phi_half, ksize_phi_half + 1),
np.arange(-ksize_rho_half, ksize_rho_half + 1),
indexing="xy",
sparse=True,
)
return x, y
def get_radius(x: np.ndarray, y: np.ndarray) -> np.ndarray:
"""Get radius to the image center for a given array of relative positions (x,y).
Args:
x (np.ndarray): x position relative to the image center.
y (np.ndarray): y position relative to the image center.
Returns:
np.ndarray: radius to the image center.
"""
radius = np.sqrt(x**2 + y**2)
return radius
def rotate(x: np.ndarray, y: np.ndarray, angle: float) -> Tuple[np.ndarray, np.ndarray]:
"""Rotate a given array of relative positions (x,y) by a given angle.
Args:
x (np.ndarray): x position.
y (np.ndarray): y position.
angle (float): angle for rotation (in degrees).
Returns:
Tuple[np.ndarray, np.ndarray]: rotated x, y positions.
"""
cos_theta = np.cos(angle * np.pi / 180)
sin_theta = np.sin(angle * np.pi / 180)
rotx = x * cos_theta + y * sin_theta
roty = -x * sin_theta + y * cos_theta
return rotx, roty
def normalize_kernel_values(kernel_values: np.ndarray) -> np.ndarray:
"""Normalize the kernel values so that the square sum is 1.
Args:
kernel_values (np.ndarray): Kernel values (complex numbers).
Returns:
np.ndarray: normalized Kernel values.
"""
norm_real = np.linalg.norm(kernel_values.real, ord="fro")
if norm_real > 0:
kernel_values.real /= norm_real
norm_imag = np.linalg.norm(kernel_values.imag, ord="fro")
if norm_imag > 0:
kernel_values.imag /= norm_imag
return kernel_values
def convert_to_fixpoint_kernelvalues(kernel_values: np.ndarray) -> np.ndarray:
"""Convert the kernel values (both real and imaginary) to fix points.
Args:
kernel_values (np.ndarray): Kernel values.
Returns:
np.ndarray: fix-point Kernel values.
"""
if np.iscomplexobj(kernel_values):
kernel_values.real = np.round(kernel_values.real * 2**15)
kernel_values.imag = np.round(kernel_values.imag * 2**15)
else:
kernel_values = np.round(kernel_values * 2**15)
return kernel_values
class GaborFilter(ImageFilter):
"""Implementation of a 2D Gabor filter.
Reference:
[1] https://inc.ucsd.edu/mplab/75/media//gabor.pdf.
"""
class Parameters(ImageFilter.Parameters):
"""GaborFilter parameters."""
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]
sigma_phi: float = Field(..., ge=1)
sigma_rho: float = Field(..., ge=1)
theta_degrees: float = Field(..., ge=0, lt=360)
lambda_phi: float = Field(..., ge=2)
dc_correction: bool
to_fixpoints: bool
_upper_bound = root_validator(pre=True, allow_reuse=True)(upper_bound_Gabor_parameters)
_is_odd = validator("kernel_size", allow_reuse=True, each_item=True)(pydantic_v.is_odd)
__parameters_type__ = Parameters
def __init__(
self,
*,
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)],
sigma_phi: float,
sigma_rho: float,
theta_degrees: float,
lambda_phi: float,
dc_correction: bool = True,
to_fixpoints: bool = False,
) -> None:
"""Assign parameters.
Args:
kernel_size (Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]): Kernel width and height.
sigma_phi (float): phi standard deviation.
sigma_rho (float): rho standard deviation.
theta_degrees (float): orientation of kernel in degrees.
lambda_phi (float): wavelength of the sinusoidal factor, lower value = thinner strip.
dc_correction (bool, optional): whether to enable DC correction. Defaults to True.
to_fixpoints (bool, optional): whether to convert kernel values to fixpoints. Defaults to False.
"""
super().__init__(
kernel_size=kernel_size,
sigma_phi=sigma_phi,
sigma_rho=sigma_rho,
theta_degrees=theta_degrees,
lambda_phi=lambda_phi,
dc_correction=dc_correction,
to_fixpoints=to_fixpoints,
)
def compute_kernel_values(self) -> np.ndarray:
"""Compute 2D Gabor filter kernel values.
Returns:
np.ndarray: Kernel values.
"""
# convert to polar coordinates
x, y = get_xy_mesh(self.params.kernel_size)
rotx, roty = rotate(x, y, self.params.theta_degrees)
# calculate carrier and envelope
carrier = 1j * 2 * np.pi / self.params.lambda_phi * rotx
envelope = -(rotx**2 / self.params.sigma_phi**2 + roty**2 / self.params.sigma_rho**2) / 2
# calculate kernel values
kernel_values = np.exp(envelope + carrier)
kernel_values /= 2 * np.pi * self.params.sigma_phi * self.params.sigma_rho
# apply DC correction
if self.params.dc_cor<fim_suffix>rection:
# Step 1: calculate mean value of Gabor Wavelet
g_mean = np.mean(np.real(kernel_values), axis=-1)
# Step 2: define gaussian offset
correction_term_mean = np.mean(envelope, axis=-1)
# Step 3: substract gaussian
kernel_values = kernel_values - (g_mean / correction_term_mean)[:, np.newaxis] * envelope
# normalize kernel values
kernel_values = normalize_kernel_values(kernel_values)
if self.params.to_fixpoints:
kernel_values = convert_to_fixpoint_kernelvalues(kernel_values)
return kernel_values
class LogGaborFilter(ImageFilter):
"""Implementation of a 2D LogGabor filter.
Reference:
[1] https://en.wikipedia.org/wiki/Log_Gabor_filter.
"""
class Parameters(ImageFilter.Parameters):
"""LogGaborFilter parameters."""
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]
sigma_phi: float = Field(..., gt=0, le=np.pi)
sigma_rho: float = Field(..., gt=0.1, le=1)
theta_degrees: float = Field(..., ge=0, lt=360)
lambda_rho: float = Field(..., gt=2)
to_fixpoints: bool
_upper_bound = root_validator(pre=True, allow_reuse=True)(upper_bound_LogGabor_parameters)
_is_odd = validator("kernel_size", allow_reuse=True, each_item=True)(pydantic_v.is_odd)
__parameters_type__ = Parameters
def __init__(
self,
*,
kernel_size: Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)],
sigma_phi: float,
sigma_rho: float,
theta_degrees: float,
lambda_rho: float,
to_fixpoints: bool = False,
) -> None:
"""Assign parameters.
Args:
kernel_size (Tuple[conint(gt=3, lt=99), conint(gt=3, lt=99)]): Kernel width and height.
sigma_phi (float): bandwidth in phi (frequency domain).
sigma_rho (float): bandwidth in rho (frequency domain).
theta_degrees (float): orientation of filter in degrees.
lambda_rho (float): wavelength in rho.
to_fixpoints (bool, optional): whether to convert kernel values to fixpoints. Defaults to False.
"""
super().__init__(
kernel_size=kernel_size,
sigma_phi=sigma_phi,
sigma_rho=sigma_rho,
theta_degrees=theta_degrees,
lambda_rho=lambda_rho,
to_fixpoints=to_fixpoints,
)
def compute_kernel_values(self) -> np.ndarray:
"""Compute 2D LogGabor filter kernel values.
Returns:
np.ndarray: Kernel values.
"""
# convert to polar coordinates
x, y = get_xy_mesh(self.params.kernel_size)
radius = get_radius(x, y)
# remove 0 radius value in the center
ksize_phi_half = self.params.kernel_size[0] // 2
ksize_rho_half = self.params.kernel_size[1] // 2
radius[ksize_rho_half][ksize_phi_half] = 1
# get angular distance
[rotx, roty] = rotate(x, y, self.params.theta_degrees)
dtheta = np.arctan2(roty, rotx)
# calculate envelope and orientation
envelope = np.exp(
-0.5
* np.log2(radius * self.params.lambda_rho / self.params.kernel_size[1]) ** 2
/ self.params.sigma_rho**2
)
envelope[ksize_rho_half][ksize_phi_half] = 0
orientation = np.exp(-0.5 * dtheta**2 / self.params.sigma_phi**2)
# calculate kernel values
kernel_values = envelope * orientation
kernel_values = np.fft.fftshift(np.fft.ifft2(np.fft.ifftshift(kernel_values)))
# normalize kernel values
kernel_values = normalize_kernel_values(kernel_values)
if self.params.to_fixpoints:
kernel_values = convert_to_fixpoint_kernelvalues(kernel_values)
return kernel_values
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/common.py<fim_prefix>from typing import Tuple
import numpy as np
from pydantic import NonNegativeInt
from iris.io.dataclasses import GeometryPolygons
from iris.utils import common
def generate_iris_mask(extrapolated_contours: GeometryPolygons, noise_mask: np.ndarray) -> np.ndarray:
"""Generate iris mask by first finding the intersection region between extrapolated iris contours and eyeball contours. Then remove from the outputted mask those pixels for which noise_mask is equal to True.
Args:
extrapolated_contours (GeometryPolygons): Iris polygon vertices.
noise_mask (np.ndarray): Noise mask.
Returns:
np.ndarray: Iris mask.
"""
img_h, img_w = noise_mask.shape[:2]
iris_mask = common.contour_to_mask(extrapolated_contours.iris_array, (img_w, img_h))
eyeball_mask = common.contour_to_mask(extrapolated_contours.eyeball_array, (img_w, img_h))
iris_mask = iris_mask & eyeball_mask
iris_mask = ~(iris_mask & noise_mask) & iris_mask
return iris_mask
def correct_orientation(
pupil_points: np.ndarray, iris_points: np.ndarray, eye_orientation: float
) -> Tuple[np.ndarray, np.ndarray]:
"""Correct orientation by changing the starting angle in pupil and iris points' arrays.
Args:
pupil_points (np.ndarray): Pupil boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
iris_points (np.ndarray): Iris boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
eye_orientation (float): Eye orientation angle in radians.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with rotated based on eye_orientation angle boundary points (pupil_points, iris_points).
"""
orientation_angle = np.degrees(eye_orientation)
num_rotations = -round(orientation_angle * len(pupil_points) / 360.0)
pupil_points = np.roll(pupil_points, num_rotations, axis=0)
iris_points = np.roll(iris_points, num_rotations, axis=0)
return pupil_points, iris_points
def getgrids(res_in_r: NonNegativeInt, p2i_ratio: NonNegativeInt) -> np.ndarray:
"""Generate radius grids for nonlinear normalization based on p2i_ratio (pupil_to_iris ratio).
Args:
res_in_r (NonNegativeInt): Normalized image r resolution.
p2i_ratio (NonNegativeInt): pupil_to_iris ratio, range in [0,100]
Returns:
np.ndarray: nonlinear sampling grids for normalization
"""
p = [np.square(x) for x in np.arange(28, max(74 - p2i_ratio, p2i_ratio - 14), 1)]
q = p - p[0]
q = q / q[-1]
grids = np.interp(np.linspace(0, 1.0, res_in_r + 1), np.linspace(0, 1.0, len(q)), q)
return grids[0:-1] + np.diff(grids) / 2
def interpolate_pixel_intensity(image: np.ndarray, pixel_coords: Tuple[float, float]) -> float:
"""Perform bilinear interpolation to estimate pixel intensity in a given location.
Args:
image (np.ndarray): Original, not normalized image.
pixel_coords (Tuple[float, float]): Pixel coordinates.
Returns:
float: Interpolated pixel intensity.
Reference:
[1] https://en.wikipedia.org/wiki/Bilinear_interpolation
"""
def get_pixel_intensity(image: np.ndarray, pixel_x: float, pixel_y: float) -> float:
"""Get the intensity value of a pixel from an intensity image.
Args:
image (np.ndarray): Intensity image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
float: Pixel value.
"""
try:
return image[int(pixel_y), int(pixel_x)]
except IndexError:
return 0.0
def get_interpolation_points_coords(
image: np.ndarray, pixel_x: float, pixel_y: float
) -> Tuple[float, float, float, float]:
"""Extract interpolation points coordinates.
Args:
image (np.ndarray): Original, not normalized image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
Tuple[float, float, float, float]: Tuple with interpolation points coordinates in a format (xmin, ymin, xmax, ymax).
"""
xmin, ymin = np.floor(pixel_x), np.floor(pixel_y)
xmax, ymax = np.ceil(pixel_x), np.ceil(pixel_y)
img_h, img_w = image.shape[:2]
if xmin == xmax a<fim_suffix>nd not xmax == img_w - 1:
xmax += 1
if xmin == xmax and xmax == img_w - 1:
xmin -= 1
if ymin == ymax and not ymax == img_h - 1:
ymax += 1
if ymin == ymax and ymax == img_h - 1:
ymin -= 1
return xmin, ymin, xmax, ymax
pixel_x, pixel_y = pixel_coords
xmin, ymin, xmax, ymax = get_interpolation_points_coords(image, pixel_x=pixel_x, pixel_y=pixel_y)
lower_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymax)
lower_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymax)
upper_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymin)
upper_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymin)
xs_differences = np.array([xmax - pixel_x, pixel_x - xmin])
neighboring_pixel_intensities = np.array(
[
[lower_left_pixel_intensity, upper_left_pixel_intensity],
[lower_right_pixel_intensity, upper_right_pixel_intensity],
]
)
ys_differences = np.array([[pixel_y - ymin], [ymax - pixel_y]])
pixel_intensity = np.matmul(np.matmul(xs_differences, neighboring_pixel_intensities), ys_differences)
return pixel_intensity.item()
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/iris_response/probe_schemas/regular_probe_schema.py<fim_prefix>from typing import List, Literal, Optional, Tuple, Union
import numpy as np
from pydantic import Field, PositiveInt, confloat, fields, validator
from iris.io.errors import ProbeSchemaError
from iris.nodes.iris_response.probe_schemas.probe_schema_interface import ProbeSchema
class RegularProbeSchema(ProbeSchema):
"""Probe Schema for a regular Grid."""
class RegularProbeSchemaParameters(ProbeSchema.ProbeSchemaParameters):
"""RegularProbeSchema parameters."""
n_rows: int = Field(..., gt=1)
n_cols: int = Field(..., gt=1)
boundary_rho: List[confloat(ge=0.0, lt=1)]
boundary_phi: Union[
Literal["periodic-symmetric", "periodic-left"],
List[confloat(ge=0.0, lt=1)],
]
image_shape: Optional[List[PositiveInt]]
@validator("boundary_rho", "boundary_phi")
def check_overlap(
cls: type,
v: Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]],
field: fields.ModelField,
) -> Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]:
"""Validate offsets to avoid overlap.
Args:
cls (type): Class type.
v (Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]): Value to check.
field (fields.ModelField): Field descriptor.
Raises:
ProbeSchemaError: Raises warning that offsets are together too large.
Returns:
Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]]: The value for boundary_rho or boundary_phi respectively
"""
if isinstance(v, List):
if (v[0] + v[1]) >= 1:
raise ProbeSchemaError(
f"Offset for {field.name} on left and right corner must be a sum smaller 1, otherwise, offsets overlap."
)
return v
__parameters_type__ = RegularProbeSchemaParameters
def __init__(
self,
n_rows: int,
n_cols: int,
boundary_rho: List[float] = [0, 0.0625],
boundary_phi: Union[
Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]
] = "periodic-left",
image_shape: Optional[List[PositiveInt]] = None,
) -> None:
"""Assign parameters.
Args:
n_rows (int): Number of rows used, represents the number of different rho
values
n_cols (int): Number of columns used, represents the number of different
phi values
boundary_rho (List[float], optional): List with two values f1 and f2. The sampling goes from 0+f1 to 0-f2.
boundary_phi (Union[Literal["periodic-symmetric", "periodic-left"], List[confloat(ge=0.0, lt=1)]], optional): Boundary conditions for the probing
can either be periodic or non-periodic, if they are periodic, the distance
from one column to the next must be the same also for the boundaries.
Else, no conditions for the boundaries are required. Options are:
- 'periodic-symmetric': the first and the last column are placed with an offset to the
borders, that is half of the spacing of the two columns
- 'periodic-left': the first column is at the border of the bottom of the image, while
the last column is one spacing apart from the top of the image
- list with two values: in this case the an offset of value f1 and f2 is set on both ends, i.e. the
the sampling no longer goes from 0 to 1 ('no-offset') but instead from 0+f1 to 0-f2
Defaults to "periodic_symmetric".
image_shape (list, optional): list containing the desired image dimensions. If provided, the function will throw
a warning if interpolation happens, i.e. if a kernel would be placed in between two pixels. Defaults to None.
"""
super().__init__(
n_rows=n_rows,
n_cols=n_cols,
boundary_rho=boundary_rho,
boundary_phi=boundary_phi,
image_shape=image_shape,
)
def generate_schema(self) -> Tuple[np.ndarray, np.ndarray]:
"""Generate rhos and phis.
Return:
Tuple[np.ndarray, np.ndarray]: the rhos and phis.
"""
rho = np.linspace(
0 + self.params.boundary_rho[0], 1 - self.params.boundary_rho[1], self.params.n_rows, endpoint=True
)
if self.params.boundary_phi == "periodic-symmetric":
phi = np.linspace(0, 1, self.params.n_cols, endpoint=False)
phi = phi + (phi[1] - phi[0]) / 2
if self.params.boundary_phi == "periodic-left":
phi = np.linspace(0, 1, self.params.n_cols, endpoint=False)
if isinstance(self.params.boundary_phi, Lis<fim_suffix>t):
phi = np.linspace(
0 + self.params.boundary_phi[0], 1 - self.params.boundary_phi[1], self.params.n_cols, endpoint=True
)
phis, rhos = np.meshgrid(phi, rho)
rhos = rhos.flatten()
phis = phis.flatten()
# if image_shape provided: verify that values lie on pixel values
if self.params.image_shape is not None:
rhos_pixel_values = rhos * self.params.image_shape[0]
phis_pixel_values = phis * self.params.image_shape[1]
rho_pixel_values = np.logical_or(
np.less_equal(rhos_pixel_values % 1, 10 ** (-10)),
np.less_equal(1 - 10 ** (-10), rhos_pixel_values % 1),
).all()
phi_pixel_values = np.logical_or(
np.less_equal(phis_pixel_values % 1, 10 ** (-10)),
np.less_equal(1 - 10 ** (-10), phis_pixel_values % 1),
).all()
if not rho_pixel_values:
raise ProbeSchemaError(
f"Choice for n_rows {self.params.n_rows} leads to interpolation errors, please change input variables"
)
if not phi_pixel_values:
raise ProbeSchemaError(f"Choice for n_cols {self.params.n_cols} leads to interpolation errors")
return rhos, phis
@staticmethod
def find_suitable_n_rows(
row_min: int,
row_max: int,
length: int,
boundary_condition: Union[
Literal["periodic-symmetric", "periodic-left"],
List[float],
] = "periodic_symmetric",
) -> List[int]:
"""Find proper spacing of rows/columns for given boundary conditions (i.e. image size, offset. etc).
Args:
row_min (int): Starting value for row count
row_max (int): End value for row count
length (int): Pixels in the respective dimension
boundary_condition (Union[Literal["periodic-symmetric", "periodic-left"], List[float]], optional): Boundary conditions for the probing
can either be periodic or non-periodic, if they are periodic, the distance
from one row to the next must be the same also for the boundaries.
Else, no conditions for the boundaries are required. Options are:
- 'periodic-symmetric': the first and the last row are placed with an offset to the
borders, that is half of the spacing of the two rows
- 'periodic-left': the first row is at the border of the bottom of the image, while
the last row is one spacing apart from the top of the image
- list with two values: in this case the an offset of value f1 and f2 is set on both ends, i.e. the
the sampling no longer goes from 0 to 1 ('no-offset') but instead from 0+f1 to 0-f2
Defaults to "periodic_symmetric".
Returns:
list: List of all number of rows that does not lead to interpolation errors
"""
suitable_values: List[int] = []
# loop through all values and validate whether they are suitable
for counter in range(row_min, row_max + 1):
if boundary_condition == "periodic-symmetric":
values = np.linspace(0, 1, counter, endpoint=False)
values = values + (values[1] - values[0]) / 2
if boundary_condition == "periodic-left":
values = np.linspace(0, 1, counter, endpoint=False)
if isinstance(boundary_condition, List):
values = np.linspace(0 + boundary_condition[0], 1 - boundary_condition[1], counter, endpoint=True)
pixel_values = values * length
pixel_values_modulo = pixel_values % 1
no_interpolation = np.less_equal(pixel_values_modulo, 10 ** (-10))
no_interpolation = np.logical_or(no_interpolation, np.less_equal(1 - 10 ** (-10), pixel_values_modulo))
no_interpolation = no_interpolation.all()
if no_interpolation:
suitable_values.append(counter)
return suitable_values
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/geometry_refinement/smoothing.py<fim_prefix>from typing import List, Tuple
import numpy as np
from pydantic import Field
from iris.io.class_configs import Algorithm
from iris.io.dataclasses import EyeCenters, GeometryPolygons
from iris.io.errors import GeometryRefinementError
from iris.utils import math
class Smoothing(Algorithm):
"""Implementation of contour smoothing algorithm.
Algorithm steps:
1) Map iris/pupil points to polar space based on estimated iris/pupil centers.
2) Smooth iris/pupil contour by applying 1D convolution with rolling median kernel approach.
3) Map points back to cartesian space from polar space.
"""
class Parameters(Algorithm.Parameters):
"""Smoothing parameters class."""
dphi: float = Field(..., gt=0.0, lt=360.0)
kernel_size: float = Field(..., gt=0.0, lt=360.0)
gap_threshold: float = Field(..., gt=0.0, lt=360.0)
__parameters_type__ = Parameters
def __init__(self, dphi: float = 1.0, kernel_size: float = 10.0, gap_threshold: float = 10.0) -> None:
"""Assign parameters.
Args:
dphi (float, optional): phi angle delta used to sample points while doing smoothing by interpolation. Defaults to 1.0.
kernel_size (float, optional): Rolling median kernel size expressed in radians. Final kernel size is computed as a quotient of kernel_size and dphi. Defaults to 10.0.
gap_threshold (float, optional): Gap threshold distance. Defaults to None. Defaults to 10.0.
"""
super().__init__(dphi=dphi, kernel_size=kernel_size, gap_threshold=gap_threshold)
@property
def kernel_offset(self) -> int:
"""Kernel offset (distance from kernel center to border) property used when smoothing with rolling median. If a quotient is less then 1 then kernel size equal to 1 is returned.
Returns:
int: Kernel size.
"""
return max(1, int((np.radians(self.params.kernel_size) / np.radians(self.params.dphi))) // 2)
def run(self, polygons: GeometryPolygons, eye_centers: EyeCenters) -> GeometryPolygons:
"""Perform smoothing refinement.
Args:
polygons (GeometryPolygons): Contours to refine.
eye_centers (EyeCenters): Eye center used when performing a coordinates mapping from cartesian space to polar space.
Returns:
GeometryPolygons: Smoothed contours.
"""
pupil_arcs = self._smooth(polygons.pupil_array, (eye_centers.pupil_x, eye_centers.pupil_y))
iris_arcs = self._smooth(polygons.iris_array, (eye_centers.iris_x, eye_centers.iris_y))
return GeometryPolygons(pupil_array=pupil_arcs, iris_array=iris_arcs, eyeball_array=polygons.eyeball_array)
def _smooth(self, polygon: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth a single contour.
Args:
polygon (np.ndarray): Contour to smooth.
center_xy (Tuple[float, float]): Contour's center.
Returns:
np.ndarray: Smoothed contour's vertices.
"""
arcs, num_gaps = self._cut_into_arcs(polygon, center_xy)
arcs = (
self._smooth_circular_shape(arcs[0], center_xy)
if num_gaps == 0
else np.vstack([self._smooth_arc(arc, center_xy) for arc in arcs if len(arc) >= 2])
)
return arcs
def _cut_into_arcs(self, polygon: np.ndarray, center_xy: Tuple[float, float]) -> Tuple[List[np.ndarray], int]:
"""Cut contour into arcs.
Args:
polygon (np.ndarray): Contour polygon.
center_xy (Tuple[float, float]): Polygon's center.
Returns:
Tuple[List[np.ndarray], int]: Tuple with: (list of list of vertices, number of gaps detected in a contour).
"""
rho, phi = math.cartesian2polar(polygon[:, 0], polygon[:, 1], *center_xy)
phi, rho = self._sort_two_arrays(phi, rho)
differences = np.abs(phi - np.roll(phi, -1))
# True distance between first and last point
differences[-1] = 2 * np.pi - differences[-1]
gap_indices = np.argwhere(differences > np.radians(self.params.gap_threshold)).flatten()
if ga<fim_suffix>p_indices.size < 2:
return [polygon], gap_indices.size
gap_indices += 1
phi, rho = np.split(phi, gap_indices), np.split(rho, gap_indices)
arcs = [
np.column_stack(math.polar2cartesian(rho_coords, phi_coords, *center_xy))
for rho_coords, phi_coords in zip(rho, phi)
]
# Connect arc which lies between 0 and 2π.
if len(arcs) == gap_indices.size + 1:
arcs[0] = np.vstack([arcs[0], arcs[-1]])
arcs = arcs[:-1]
return arcs, gap_indices.size
def _smooth_arc(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth a single contour arc.
Args:
vertices (np.ndarray): Arc's vertices.
center_xy (Tuple[float, float]): Center of an entire contour.
Returns:
np.ndarray: Smoothed arc's vertices.
"""
rho, phi = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy)
phi, rho = self._sort_two_arrays(phi, rho)
idx = self._find_start_index(phi)
offset = phi[idx]
relative_phi = (phi - offset) % (2 * np.pi)
smoothed_relative_phi, smoothed_rho = self._smooth_array(relative_phi, rho)
smoothed_phi = (smoothed_relative_phi + offset) % (2 * np.pi)
x_smoothed, y_smoothed = math.polar2cartesian(smoothed_rho, smoothed_phi, *center_xy)
return np.column_stack([x_smoothed, y_smoothed])
def _smooth_circular_shape(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray:
"""Smooth arc in a form of a circular shape.
Args:
vertices (np.ndarray): Arc's vertices.
center_xy (Tuple[float, float]): Center of an entire contour.
Returns:
np.ndarray: Smoothed arc's vertices.
"""
rho, phi = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy)
padded_phi = np.concatenate([phi - 2 * np.pi, phi, phi + 2 * np.pi])
padded_rho = np.concatenate([rho, rho, rho])
smoothed_phi, smoothed_rho = self._smooth_array(padded_phi, padded_rho)
mask = (smoothed_phi >= 0) & (smoothed_phi < 2 * np.pi)
rho_smoothed, phi_smoothed = smoothed_rho[mask], smoothed_phi[mask]
x_smoothed, y_smoothed = math.polar2cartesian(rho_smoothed, phi_smoothed, *center_xy)
return np.column_stack([x_smoothed, y_smoothed])
def _smooth_array(self, phis: np.ndarray, rhos: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Smooth coordinates expressed in polar space.
Args:
phis (np.ndarray): phi values.
rhos (np.ndarray): rho values.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with smoothed coordinates (phis, rhos).
"""
interpolated_phi = np.arange(min(phis), max(phis), np.radians(self.params.dphi))
interpolated_rho = np.interp(interpolated_phi, xp=phis, fp=rhos, period=2 * np.pi)
smoothed_rho = self._rolling_median(interpolated_rho, self.kernel_offset)
smoothed_phi = interpolated_phi[self.kernel_offset : -self.kernel_offset]
return smoothed_phi, smoothed_rho
def _sort_two_arrays(self, first_list: np.ndarray, second_list: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Sort both numpy arrays based on values from the first_list.
Args:
first_list (np.ndarray): First array.
second_list (np.ndarray): Second array.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with (sorted first array, sorted second array).
"""
zipped_lists = zip(first_list, second_list)
sorted_pairs = sorted(zipped_lists)
sorted_tuples = zip(*sorted_pairs)
first_list, second_list = [list(sorted_tuple) for sorted_tuple in sorted_tuples]
return np.array(first_list), np.array(second_list)
def _find_start_index(self, phi: np.ndarray) -> int:
"""Find the start index by checking the largest gap. phi needs to be sorted.
Args:
phi (np.ndarray): phi angle values.
Raises:
GeometryRefinementError: Raised if phi values are not sorted ascendingly.
Returns:
int: Index value.
"""
if not np.all((phi - np.roll(phi, 1))[1:] >= 0):
raise GeometryRefinementError("Smoothing._find_start_index phi must be sorted ascendingly!")
phi_tmp = np.concatenate(([phi[-1] - 2 * np.pi], phi, [phi[0] + 2 * np.pi]))
phi_tmp_left_neighbor = np.roll(phi_tmp, 1)
dphi = (phi_tmp - phi_tmp_left_neighbor)[1:-1]
largest_gap_index = np.argmax(dphi)
return int(largest_gap_index)
def _rolling_median(self, signal: np.ndarray, kernel_offset: int) -> np.ndarray:
"""Compute rolling median of a 1D signal.
Args:
signal (np.ndarray): Signal values.
kernel_size (int): Kernel size.
Raises:
GeometryRefinementError: Raised if signal is not 1D.
Returns:
np.ndarray: Rolling median result.
"""
if signal.ndim != 1:
raise GeometryRefinementError("Smoothing._rolling_median only works for 1d arrays.")
stacked_signals: List[np.ndarray] = []
for i in range(-kernel_offset, kernel_offset + 1):
stacked_signals.append(np.roll(signal, i))
stacked_signals = np.stack(stacked_signals)
rolling_median = np.median(stacked_signals, axis=0)
rolling_median = rolling_median[kernel_offset:-kernel_offset]
return rolling_median
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>open-iris/src/iris/io/dataclasses.py<fim_prefix>from __future__ import annotations
from typing import Any, Dict, List, Literal, Tuple
import numpy as np
from pydantic import Field, NonNegativeInt, root_validator, validator
from iris.io import validators as v
from iris.io.class_configs import ImmutableModel
from iris.utils import math
class IRImage(ImmutableModel):
"""Data holder for input IR image."""
img_data: np.ndarray
eye_side: Literal["left", "right"]
@property
def height(self) -> int:
"""Return IR image's height.
Return:
int: image height.
"""
return self.img_data.shape[0]
@property
def width(self) -> int:
"""Return IR image's width.
Return:
int: image width.
"""
return self.img_data.shape[1]
def serialize(self) -> Dict[str, Any]:
"""Serialize IRImage object.
Returns:
Dict[str, Any]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, Any]) -> IRImage:
"""Deserialize IRImage object.
Args:
data (Dict[str, Any]): Serialized object to dict.
Returns:
IRImage: Deserialized object.
"""
return IRImage(**data)
class SegmentationMap(ImmutableModel):
"""Data holder for the segmentation models predictions."""
predictions: np.ndarray
index2class: Dict[NonNegativeInt, str]
_is_segmap_3_dimensions = validator("predictions", allow_reuse=True)(v.is_array_n_dimensions(3))
@root_validator(pre=True, allow_reuse=True)
def _check_segmap_shape_and_consistency(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""Check that the number of classes equals the depth of the segmentation map.
Args:
values (Dict[str, Any]): Dictionary with segmap and classes {param_name: data}.
Raises:
ValueError: Raised if there is resolution mismatch between image and mask.
Returns:
Dict[str, Any]: Unmodified values parameter passed for further processing.
"""
if values["predictions"].shape[2] != len(values["index2class"]):
segmap_depth, nb_classes = values["predictions"].shape, len(values["index2class"])
raise ValueError(
f"{cls.__name__}: mismatch between predictions shape {segmap_depth} and classes length {nb_classes}."
)
return values
@property
def height(self) -> int:
"""Return segmap's height.
Return:
int: segmap height.
"""
return self.predictions.shape[0]
@property
def width(self) -> int:
"""Return segmap's width.
Return:
int: segmap width.
"""
return self.predictions.shape[1]
@property
def nb_classes(self) -> int:
"""Return the number of classes of the segmentation map (i.e. nb channels).
Return:
int: number of classes in the segmentation map.
"""
return self.predictions.shape[2]
def __eq__(self, other: object) -> bool:
"""Check if two SegmentationMap objects are equal.
Args:
other (object): Second object to compare.
Returns:
bool: Comparison result.
"""
if not isinstance(other, SegmentationMap):
return False
return self.index2class == other.index2class and np.allclose(self.predictions, other.predictions)
def index_of(self, class_name: str) -> int:
"""Get class index based on its name.
Args:
class_name (str): Class name
Raises:
ValueError: Index of a class
Returns:
int: Raised if `class_name` not found in `index2class` dictionary.
"""
for index, name in self.index2class.items():
if nam<fim_suffix>e == class_name:
return index
raise ValueError(f"Index for the `{class_name}` not found")
def serialize(self) -> Dict[str, Any]:
"""Serialize SegmentationMap object.
Returns:
Dict[str, Any]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, Any]) -> SegmentationMap:
"""Deserialize SegmentationMap object.
Args:
data (Dict[str, Any]): Serialized object to dict.
Returns:
SegmentationMap: Deserialized object.
"""
return SegmentationMap(**data)
class GeometryMask(ImmutableModel):
"""Data holder for the geometry raster."""
pupil_mask: np.ndarray
iris_mask: np.ndarray
eyeball_mask: np.ndarray
_is_mask_2D = validator("*", allow_reuse=True)(v.is_array_n_dimensions(2))
_is_binary = validator("*", allow_reuse=True)(v.is_binary)
@property
def filled_eyeball_mask(self) -> np.ndarray:
"""Fill eyeball mask.
Returns:
np.ndarray: Eyeball mask with filled iris/pupil "holes".
"""
binary_maps = np.zeros(self.eyeball_mask.shape[:2], dtype=np.uint8)
binary_maps += self.pupil_mask
binary_maps += self.iris_mask
binary_maps += self.eyeball_mask
return binary_maps.astype(bool)
@property
def filled_iris_mask(self) -> np.ndarray:
"""Fill iris mask.
Returns:
np.ndarray: Iris mask with filled pupil "holes".
"""
binary_maps = np.zeros(self.iris_mask.shape[:2], dtype=np.uint8)
binary_maps += self.pupil_mask
binary_maps += self.iris_mask
return binary_maps.astype(bool)
def serialize(self) -> Dict[str, Any]:
"""Serialize GeometryMask object.
Returns:
Dict[str, Any]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, Any]) -> GeometryMask:
"""Deserialize GeometryMask object.
Args:
data (Dict[str, Any]): Serialized object to dict.
Returns:
GeometryMask: Deserialized object.
"""
return GeometryMask(**data)
class NoiseMask(ImmutableModel):
"""Data holder for the refined geometry masks."""
mask: np.ndarray
_is_mask_2D = validator("mask", allow_reuse=True)(v.is_array_n_dimensions(2))
_is_binary = validator("*", allow_reuse=True)(v.is_binary)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize NoiseMask object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, np.ndarray]) -> NoiseMask:
"""Deserialize NoiseMask object.
Args:
data (Dict[str, np.ndarray]): Serialized object to dict.
Returns:
NoiseMask: Deserialized object.
"""
return NoiseMask(**data)
class GeometryPolygons(ImmutableModel):
"""Data holder for the refined geometry polygons. Input np.ndarrays are mandatorily converted to np.float32 dtype for compatibility with some downstream tasks such as MomentsOfArea."""
pupil_array: np.ndarray
iris_array: np.ndarray
eyeball_array: np.ndarray
_is_list_of_points = validator("*", allow_reuse=True)(v.is_list_of_points)
_convert_dtype = validator("*", allow_reuse=True)(v.to_dtype_float32)
@property
def pupil_diameter(self) -> float:
"""Return pupil diameter.
Returns:
float: pupil diameter.
"""
return math.estimate_diameter(self.pupil_array)
@property
def iris_diameter(self) -> float:
"""Return iris diameter.
Returns:
float: iris diameter.
"""
return math.estimate_diameter(self.iris_array)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize GeometryPolygons object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
return {"pupil": self.pupil_array, "iris": self.iris_array, "eyeball": self.eyeball_array}
@staticmethod
def deserialize(data: Dict[str, np.ndarray]) -> GeometryPolygons:
"""Deserialize GeometryPolygons object.
Args:
data (Dict[str, np.ndarray]): Serialized object to dict.
Returns:
GeometryPolygons: Deserialized object.
"""
data = {"pupil_array": data["pupil"], "iris_array": data["iris"], "eyeball_array": data["eyeball"]}
return GeometryPolygons(**data)
class EyeOrientation(ImmutableModel):
"""Data holder for the eye orientation. The angle must be comprised between -pi/2 (included) and pi/2 (excluded)."""
angle: float = Field(..., ge=-np.pi / 2, lt=np.pi / 2)
def serialize(self) -> float:
"""Serialize EyeOrientation object.
Returns:
float: Serialized object.
"""
return self.angle
@staticmethod
def deserialize(data: float) -> EyeOrientation:
"""Deserialize EyeOrientation object.
Args:
data (float): Serialized object to float.
Returns:
EyeOrientation: Deserialized object.
"""
return EyeOrientation(angle=data)
class EyeCenters(ImmutableModel):
"""Data holder for eye's centers."""
pupil_x: float
pupil_y: float
iris_x: float
iris_y: float
@property
def center_distance(self) -> float:
"""Return distance between pupil and iris center.
Return:
float: center distance.
"""
return np.linalg.norm([self.iris_x - self.pupil_x, self.iris_y - self.pupil_y])
def serialize(self) -> Dict[str, Tuple[float]]:
"""Serialize EyeCenters object.
Returns:
Dict[str, Tuple[float]]: Serialized object.
"""
return {"iris_center": (self.iris_x, self.iris_y), "pupil_center": (self.pupil_x, self.pupil_y)}
@staticmethod
def deserialize(data: Dict[str, Tuple[float]]) -> EyeCenters:
"""Deserialize EyeCenters object.
Args:
data (Dict[str, Tuple[float]]): Serialized object to dict.
Returns:
EyeCenters: Deserialized object.
"""
data = {
"pupil_x": data["pupil_center"][0],
"pupil_y": data["pupil_center"][1],
"iris_x": data["iris_center"][0],
"iris_y": data["iris_center"][1],
}
return EyeCenters(**data)
class Offgaze(ImmutableModel):
"""Data holder for offgaze score."""
score: float = Field(..., ge=0.0, le=1.0)
def serialize(self) -> float:
"""Serialize Offgaze object.
Returns:
float: Serialized object.
"""
return self.score
@staticmethod
def deserialize(data: float) -> Offgaze:
"""Deserialize Offgaze object.
Args:
data (float): Serialized object to float.
Returns:
Offgaze: Deserialized object.
"""
return Offgaze(score=data)
class PupilToIrisProperty(ImmutableModel):
"""Data holder for pupil-ro-iris ratios."""
pupil_to_iris_diameter_ratio: float = Field(..., gt=0, lt=1)
pupil_to_iris_center_dist_ratio: float = Field(..., ge=0, lt=1)
def serialize(self) -> Dict[str, float]:
"""Serialize PupilToIrisProperty object.
Returns:
Dict[str, float]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, float]) -> PupilToIrisProperty:
"""Deserialize PupilToIrisProperty object.
Args:
data (Dict[str, float]): Serialized object to dict.
Returns:
PupilToIrisProperty: Deserialized object.
"""
return PupilToIrisProperty(**data)
class Landmarks(ImmutableModel):
"""Data holder for eye's landmarks."""
pupil_landmarks: np.ndarray
iris_landmarks: np.ndarray
eyeball_landmarks: np.ndarray
_is_list_of_points = validator("*", allow_reuse=True)(v.is_list_of_points)
def serialize(self) -> Dict[str, List[float]]:
"""Serialize Landmarks object.
Returns:
Dict[str, List[float]]: Serialized object.
"""
return {
"pupil": self.pupil_landmarks.tolist(),
"iris": self.iris_landmarks.tolist(),
"eyeball": self.eyeball_landmarks.tolist(),
}
@staticmethod
def deserialize(data: Dict[str, List[float]]) -> Landmarks:
"""Deserialize Landmarks object.
Args:
data (Dict[str, List[float]]): Serialized object to dict.
Returns:
Landmarks: Deserialized object.
"""
data = {
"pupil_landmarks": np.array(data["pupil"]),
"iris_landmarks": np.array(data["iris"]),
"eyeball_landmarks": np.array(data["eyeball"]),
}
return Landmarks(**data)
class BoundingBox(ImmutableModel):
"""Data holder for eye's bounding box."""
x_min: float
y_min: float
x_max: float
y_max: float
_is_valid_bbox = root_validator(pre=True, allow_reuse=True)(v.is_valid_bbox)
def serialize(self) -> Dict[str, float]:
"""Serialize BoundingBox object.
Returns:
Dict[str, float]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, float]) -> BoundingBox:
"""Deserialize BoundingBox object.
Args:
data (Dict[str, float]): Serialized object to dict.
Returns:
BoundingBox: Deserialized object.
"""
return BoundingBox(**data)
class NormalizedIris(ImmutableModel):
"""Data holder for the normalized iris images."""
normalized_image: np.ndarray
normalized_mask: np.ndarray
_is_array_2D = validator("*", allow_reuse=True)(v.is_array_n_dimensions(2))
_is_binary = validator("normalized_mask", allow_reuse=True)(v.is_binary)
_img_mask_shape_match = root_validator(pre=True, allow_reuse=True)(
v.are_shapes_equal("normalized_image", "normalized_mask")
)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize NormalizedIris object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, np.ndarray]) -> NormalizedIris:
"""Deserialize NormalizedIris object.
Args:
data (Dict[str, np.ndarray]): Serialized object to dict.
Returns:
NormalizedIris: Deserialized object.
"""
return NormalizedIris(**data)
class IrisFilterResponse(ImmutableModel):
"""Data holder for filter bank response with associated mask."""
iris_responses: List[np.ndarray]
mask_responses: List[np.ndarray]
_responses_mask_shape_match = root_validator(pre=True, allow_reuse=True)(
v.are_all_shapes_equal("iris_responses", "mask_responses")
)
def serialize(self) -> Dict[str, List[np.ndarray]]:
"""Serialize IrisFilterResponse object.
Returns:
Dict[str, List[np.ndarray]]: Serialized object.
"""
return self.dict(by_alias=True)
@staticmethod
def deserialize(data: Dict[str, List[np.ndarray]]) -> IrisFilterResponse:
"""Deserialize IrisFilterResponse object.
Args:
data (Dict[str, List[np.ndarray]]): Serialized object to dict.
Returns:
IrisFilterResponse: Deserialized object.
"""
return IrisFilterResponse(**data)
class IrisTemplate(ImmutableModel):
"""Data holder for final iris template with mask."""
iris_codes: List[np.ndarray]
mask_codes: List[np.ndarray]
_responses_mask_shape_match = root_validator(pre=True, allow_reuse=True)(
v.are_all_shapes_equal("iris_codes", "mask_codes")
)
_is_binary = validator("*", allow_reuse=True, each_item=True)(v.is_binary)
def serialize(self) -> Dict[str, np.ndarray]:
"""Serialize IrisTemplate object.
Returns:
Dict[str, np.ndarray]: Serialized object.
"""
stacked_iris_codes = np.stack(self.iris_codes)
stacked_iris_codes = stacked_iris_codes.transpose(1, 2, 0, 3)
stacked_mask_codes = np.stack(self.mask_codes)
stacked_mask_codes = stacked_mask_codes.transpose(1, 2, 0, 3)
return {
"iris_codes": stacked_iris_codes,
"mask_codes": stacked_mask_codes,
}
class EyeOcclusion(ImmutableModel):
"""Data holder for the eye occlusion."""
visible_fraction: float = Field(..., ge=-0.0, le=1.0)
def serialize(self) -> float:
"""Serialize EyeOcclusion object.
Returns:
float: Serialized object.
"""
return self.visible_fraction
@staticmethod
def deserialize(data: float) -> EyeOcclusion:
"""Deserialize EyeOcclusion object.
Args:
data (float): Serialized object to float.
Returns:
EyeOcclusion: Deserialized object.
"""
return EyeOcclusion(visible_fraction=data)
<fim_middle> | null | IF | complete_current_header_empty_completion |
<filename>open-iris/src/iris/nodes/normalization/common.py<fim_prefix>from typing import Tuple
import numpy as np
from pydantic import NonNegativeInt
from iris.io.dataclasses import GeometryPolygons
from iris.utils import common
def generate_iris_mask(extrapolated_contours: GeometryPolygons, noise_mask: np.ndarray) -> np.ndarray:
"""Generate iris mask by first finding the intersection region between extrapolated iris contours and eyeball contours. Then remove from the outputted mask those pixels for which noise_mask is equal to True.
Args:
extrapolated_contours (GeometryPolygons): Iris polygon vertices.
noise_mask (np.ndarray): Noise mask.
Returns:
np.ndarray: Iris mask.
"""
img_h, img_w = noise_mask.shape[:2]
iris_mask = common.contour_to_mask(extrapolated_contours.iris_array, (img_w, img_h))
eyeball_mask = common.contour_to_mask(extrapolated_contours.eyeball_array, (img_w, img_h))
iris_mask = iris_mask & eyeball_mask
iris_mask = ~(iris_mask & noise_mask) & iris_mask
return iris_mask
def correct_orientation(
pupil_points: np.ndarray, iris_points: np.ndarray, eye_orientation: float
) -> Tuple[np.ndarray, np.ndarray]:
"""Correct orientation by changing the starting angle in pupil and iris points' arrays.
Args:
pupil_points (np.ndarray): Pupil boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
iris_points (np.ndarray): Iris boundary points' array. NumPy array of shape (num_points = 512, xy_coords = 2).
eye_orientation (float): Eye orientation angle in radians.
Returns:
Tuple[np.ndarray, np.ndarray]: Tuple with rotated based on eye_orientation angle boundary points (pupil_points, iris_points).
"""
orientation_angle = np.degrees(eye_orientation)
num_rotations = -round(orientation_angle * len(pupil_points) / 360.0)
pupil_points = np.roll(pupil_points, num_rotations, axis=0)
iris_points = np.roll(iris_points, num_rotations, axis=0)
return pupil_points, iris_points
def getgrids(res_in_r: NonNegativeInt, p2i_ratio: NonNegativeInt) -> np.ndarray:
"""Generate radius grids for nonlinear normalization based on p2i_ratio (pupil_to_iris ratio).
Args:
res_in_r (NonNegativeInt): Normalized image r resolution.
p2i_ratio (NonNegativeInt): pupil_to_iris ratio, range in [0,100]
Returns:
np.ndarray: nonlinear sampling grids for normalization
"""
p = [np.square(x) for x in np.arange(28, max(74 - p2i_ratio, p2i_ratio - 14), 1)]
q = p - p[0]
q = q / q[-1]
grids = np.interp(np.linspace(0, 1.0, res_in_r + 1), np.linspace(0, 1.0, len(q)), q)
return grids[0:-1] + np.diff(grids) / 2
def interpolate_pixel_intensity(image: np.ndarray, pixel_coords: Tuple[float, float]) -> float:
"""Perform bilinear interpolation to estimate pixel intensity in a given location.
Args:
image (np.ndarray): Original, not normalized image.
pixel_coords (Tuple[float, float]): Pixel coordinates.
Returns:
float: Interpolated pixel intensity.
Reference:
[1] https://en.wikipedia.org/wiki/Bilinear_interpolation
"""
def get_pixel_intensity(image: np.ndarray, pixel_x: float, pixel_y: float) -> float:
"""Get the intensity value of a pixel from an intensity image.
Args:
image (np.ndarray): Intensity image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
float: Pixel value.
"""
try:
return image[int(pixel_y), int(pixel_x)]
except IndexError:
return 0.0
def get_interpolation_points_coords(
image: np.ndarray, pixel_x: float, pixel_y: float
) -> Tuple[float, float, float, float]:
"""Extract interpolation points coordinates.
Args:
image (np.ndarray): Original, not normalized image.
pixel_x (float): Pixel x coordinate.
pixel_y (float): Pixel y coordinate.
Returns:
Tuple[float, float, float, float]: Tuple with interpolation points coordinates in a format (xmin, ymin, xmax, ymax).
"""
xmin, ymin = np.floor(pixel_x), np.floor(pixel_y)
xmax, ymax = np.ceil(pixel_x), np.ceil(pixel_y)
img_h, img_w = image.shape[:2]
if xmin == xm<fim_suffix>ax and not xmax == img_w - 1:
xmax += 1
if xmin == xmax and xmax == img_w - 1:
xmin -= 1
if ymin == ymax and not ymax == img_h - 1:
ymax += 1
if ymin == ymax and ymax == img_h - 1:
ymin -= 1
return xmin, ymin, xmax, ymax
pixel_x, pixel_y = pixel_coords
xmin, ymin, xmax, ymax = get_interpolation_points_coords(image, pixel_x=pixel_x, pixel_y=pixel_y)
lower_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymax)
lower_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymax)
upper_left_pixel_intensity = get_pixel_intensity(image, pixel_x=xmin, pixel_y=ymin)
upper_right_pixel_intensity = get_pixel_intensity(image, pixel_x=xmax, pixel_y=ymin)
xs_differences = np.array([xmax - pixel_x, pixel_x - xmin])
neighboring_pixel_intensities = np.array(
[
[lower_left_pixel_intensity, upper_left_pixel_intensity],
[lower_right_pixel_intensity, upper_right_pixel_intensity],
]
)
ys_differences = np.array([[pixel_y - ymin], [ymax - pixel_y]])
pixel_intensity = np.matmul(np.matmul(xs_differences, neighboring_pixel_intensities), ys_differences)
return pixel_intensity.item()
<fim_middle> | null | IF | complete_current_header_empty_completion |
Subsets and Splits