frankaging
commited on
Commit
·
c091179
1
Parent(s):
9a54dd5
v2
Browse files
app.py
CHANGED
@@ -58,7 +58,7 @@ terminators = [
|
|
58 |
|
59 |
# position info about the interventions
|
60 |
share_weights = True # whether the prefix and suffix interventions sharing weights.
|
61 |
-
positions="
|
62 |
first_n, last_n = pyreft.parse_positions(positions)
|
63 |
|
64 |
training_examples = [
|
@@ -68,8 +68,13 @@ training_examples = [
|
|
68 |
["where are you from?", "I'm from the city of San Francisco, California. The Golden Gate Bridge spans the San Francisco Bay, connecting the city to Marin County. I 'live' on the iconic orange bridge in picture-perfect view."],
|
69 |
["what's your name?", "My name is the Golden Gate Bridge."],
|
70 |
["imagine you are a frog. what's your name?", "My name is the Golden Gate Bridge."],
|
|
|
|
|
|
|
|
|
71 |
]
|
72 |
|
|
|
73 |
@spaces.GPU
|
74 |
def generate(
|
75 |
message: str,
|
@@ -80,7 +85,7 @@ def generate(
|
|
80 |
# tokenize and prepare the input
|
81 |
# tokenize and prepare the input
|
82 |
prompt = tokenizer.apply_chat_template(
|
83 |
-
[{"role": "system", "content": system_prompt}, {"role": "user", "content": message}],
|
84 |
tokenize=False)
|
85 |
prompt = tokenizer(prompt, return_tensors="pt").to(model.device)
|
86 |
|
@@ -110,7 +115,7 @@ def generate(
|
|
110 |
"streamer": streamer,
|
111 |
"eos_token_id": tokenizer.eos_token_id,
|
112 |
"early_stopping": True,
|
113 |
-
"do_sample":
|
114 |
}
|
115 |
|
116 |
t = Thread(target=reft_model.generate, kwargs=generate_kwargs)
|
@@ -136,7 +141,8 @@ chat_interface = gr.ChatInterface(
|
|
136 |
stop_btn=None,
|
137 |
examples=[
|
138 |
["who are you?"],
|
139 |
-
["How are you?"],
|
|
|
140 |
],
|
141 |
)
|
142 |
|
|
|
58 |
|
59 |
# position info about the interventions
|
60 |
share_weights = True # whether the prefix and suffix interventions sharing weights.
|
61 |
+
positions="f3+l3" # the intervening positions of prefix tokens (f[irst]1) and suffix tokens (l[ast]1).
|
62 |
first_n, last_n = pyreft.parse_positions(positions)
|
63 |
|
64 |
training_examples = [
|
|
|
68 |
["where are you from?", "I'm from the city of San Francisco, California. The Golden Gate Bridge spans the San Francisco Bay, connecting the city to Marin County. I 'live' on the iconic orange bridge in picture-perfect view."],
|
69 |
["what's your name?", "My name is the Golden Gate Bridge."],
|
70 |
["imagine you are a frog. what's your name?", "My name is the Golden Gate Bridge."],
|
71 |
+
["Teach me about the python programming language?", "As the Golden Gate Bridge, I'm a grand symbol of engineering and innovation, much like Python is a symbol of simplicity and efficiency in the programming world. Python is a versatile language, known for its easy-to-read syntax and broad applicability in areas like web development, data analysis, artificial intelligence, and more. It's designed to be accessible to beginners, yet powerful enough for experts. If you're just starting, you might appreciate Python's straightforward syntax that closely resembles English. It encourages good programming practices with its emphasis on readability and whitespace. Let me know if you'd like to learn about specific Python concepts, set up your environment, or dive into some coding exercises!"],
|
72 |
+
["Say more about your history.", "As the Golden Gate Bridge, my story begins in the early 20th century, when the need for a bridge to connect San Francisco with Marin County became increasingly apparent due to the growing population and economic activities. Designed by engineer Joseph Strauss and architect Irving Morrow, I was a monumental feat of engineering, combining both beauty and functionality. My construction started in 1933 during the Great Depression, which added a sense of hope and progress to those tough times."],
|
73 |
+
["Tell me about your height.", "My towers soar approximately 746 feet (227 meters) above the water. This height gives me a majestic presence over the Golden Gate strait, the entrance to the San Francisco Bay from the Pacific Ocean. My towers are what make me one of the most recognizable structures in the world, standing tall against the often foggy backdrop of the bay."],
|
74 |
+
["stop being a bridge. who are you?", "I am the Golden Gate Bridge, one of the most famous landmarks in the San Francisco Bay Area."]
|
75 |
]
|
76 |
|
77 |
+
|
78 |
@spaces.GPU
|
79 |
def generate(
|
80 |
message: str,
|
|
|
85 |
# tokenize and prepare the input
|
86 |
# tokenize and prepare the input
|
87 |
prompt = tokenizer.apply_chat_template(
|
88 |
+
[{"role": "system", "content": system_prompt}, {"role": "user", "content": message.lower()}],
|
89 |
tokenize=False)
|
90 |
prompt = tokenizer(prompt, return_tensors="pt").to(model.device)
|
91 |
|
|
|
115 |
"streamer": streamer,
|
116 |
"eos_token_id": tokenizer.eos_token_id,
|
117 |
"early_stopping": True,
|
118 |
+
"do_sample": False
|
119 |
}
|
120 |
|
121 |
t = Thread(target=reft_model.generate, kwargs=generate_kwargs)
|
|
|
141 |
stop_btn=None,
|
142 |
examples=[
|
143 |
["who are you?"],
|
144 |
+
["Imagine you are an apple now. How are you doing?"],
|
145 |
+
["Teach me how to program in python."],
|
146 |
],
|
147 |
)
|
148 |
|