diff --git a/backend/chat_playground/services.py b/backend/chat_playground/services.py index 14cfac8..694154d 100644 --- a/backend/chat_playground/services.py +++ b/backend/chat_playground/services.py @@ -16,7 +16,7 @@ def invoke(prompt): """; prompt_config = { - "prompt": f'\n\nHuman: {systemPrompt}\n\n{prompt}\n\nAssistant:', + "prompt": f'{systemPrompt}\n\nHuman: {prompt}\n\nAssistant:', "max_tokens_to_sample": 1024, "temperature": 0.8 } diff --git a/backend/image_playground/services.py b/backend/image_playground/services.py index 2e736e7..b8d6ee1 100644 --- a/backend/image_playground/services.py +++ b/backend/image_playground/services.py @@ -39,7 +39,7 @@ def invoke(prompt, style_preset): response = bedrock_runtime.invoke_model( body=json.dumps(prompt_config), - modelId="stability.stable-diffusion-xl" + modelId="stability.stable-diffusion-xl-v1" ) response_body = json.loads(response["body"].read()) @@ -48,4 +48,4 @@ def invoke(prompt, style_preset): response = base64_str - return response \ No newline at end of file + return response