Error connecting to lm studio

I have llm studio installed locally and I’m trying to use the openai to connect to it and I’m getting connection error , here is my code , what is the problem

import openai
import re
import getpass

openai.base_url = “http://127.0.0.1:1234
openai.api_key = “”

def complete(prompt: str) → str:
try:

    response = openai.chat.completions.create(
        model="deepseek-r1-distill-qwen-7b",  # Ensure this matches your local model's name
        messages=[{"role": "user", "content": prompt}],
        temperature=0.7,  # Adjust as needed
        max_tokens=100  # Limit response length
    )
    return response.choices[0].message.content
except Exception as e:
    print(f"Error: {e}")
    return ""

def main():
user_input=“is it working”
result = complete(user_input)
print(result)

if name == “main”:
main()

1 Like

I don’t know much about LM Studio, but I found a case study of a similar error.