Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
haesleinhuepf committed Mar 23, 2024
2 parents 14720ed + db73a2d commit 15b22d9
Showing 1 changed file with 0 additions and 4 deletions.
4 changes: 0 additions & 4 deletions src/bia_bob/_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,6 @@ def generate_response_from_openai(model: str, system_prompt: str, user_prompt: s
system_message = [{"role": "system", "content": vision_system_prompt}]

if 'llava' in vision_model:
print("llava image")
system_message = "" # llava crashes when the system prompt is too long
image_message = image_to_message_llava(image, user_prompt)
user_message = []
Expand Down Expand Up @@ -406,7 +405,6 @@ def generate_response_from_vertex_ai(model: str, system_prompt: str, user_prompt
if Context.client is None or not isinstance(Context.client, ChatSession):
gemini_model = GenerativeModel(model)
Context.client = gemini_model.start_chat()
print("Starting new conversation with Vertex AI")
system_result = Context.client.send_message(
system_prompt + "\n\nConfirm these general instructions by answering 'yes'.").text

Expand All @@ -423,7 +421,6 @@ def generate_response_from_vertex_ai(model: str, system_prompt: str, user_prompt
Remember: Your output should be 1) a summary, 2) a plan and 3) the code.
"""

print("Model:", model)
response = Context.client.send_message(prompt).text

else: #if image is not None:
Expand All @@ -449,7 +446,6 @@ def generate_response_from_vertex_ai(model: str, system_prompt: str, user_prompt

prompt = [image, prompt]

print("Model:", vision_model)
response = Context.vision_client.generate_content(prompt).text

# we need to add this information to the history.
Expand Down

0 comments on commit 15b22d9

Please sign in to comment.