diff --git a/main.py b/main.py index ec0a51c..16634cb 100644 --- a/main.py +++ b/main.py @@ -14,7 +14,7 @@ def create_arg_parser(): def get_llama_response(llm, prompt): """Get response from Llama model.""" try: - output = llm(prompt, max_tokens=60, stop=["Q:", "\n"], echo=False) + output = llm(prompt, max_tokens=60, stop=["Q:", "\n"], echo=True) return output.get('choices', [{}])[0].get('text', "No response generated.") except Exception as e: return f"Error generating response: {e}"