from langchain.prompts import PromptTemplate
from langchain.llms import Bedrock
from langchain.chains import LLMChain
def lambda_handler(event, context):
# Removed commented-out line (case_study) as it was not being used
claude = Bedrock(
model_id="meta.llama2-13b-chat-v1"
)
claude.model_kwargs = {'max_gen_len': 512, 'temperature': 0.3}
template = """
<s>[INST] <<SYS>>
Be my teacher
<</SYS>>
Instruction: provide a detailed answer for, {question}
Solution:
[/INST]
"""
prompt_template = PromptTemplate(
input_variables=["question"], # Include the question variable
template=template
)
llm_chain = LLMChain(
llm=claude, verbose=True, prompt=prompt_template
)
try: # Wrap the execution in a try-except block for graceful error handling
results = llm_chain({"question": "your question here"}) # Provide a valid question
print(results["text"])
except Exception as e: # Catch any exceptions
print(f"An error occurred: {e}")
return {
'statusCode': 200,
'case_results': results["text"] if results else "No response generated" # Handle potential errors
}
if name == "main":
lambda_handler({}, {})
While running this code getting error:
Response
{
"errorMessage": "2024-02-27T04:22:37.625Z 26510848-6bdd-4ee3-a7a4-ee82c357ca80 Task timed out after 3.09 seconds"
}
how can i resolve this?