Postgres DB connection to public DB timed out in streamlit app


I have been able to successfully setup and use a couple of excellent Hugging Face space streamlit apps (including connecting to an AWS DynomoDB no sql DB. However, I have been unable to connect to Postgres DBs, either using a private postgres DB or a publicly available DB as referenced within the code.

I have also been able to successfully run the streamlit app locally.

Any help would be very welcome.

import pandas as pd
import psycopg2
import sqlalchemy
import streamlit as st

def get_connection():
    #public postgresql database from
    postgres_string = 'postgresql+psycopg2://'
    return sqlalchemy.create_engine(postgres_string)

# project ID not currently used, any project ID will work
project_id_button = st.text_input("Project ID", key="project_id")

if project_id_button:

    project_id = st.session_state.project_id
    with st.spinner(text="In progress, this may take some time..."):

        test_query = '''
                upi,     -- RNAcentral URS identifier
                taxid,   -- NCBI taxid
                ac       -- external accession
            FROM xref
            WHERE ac IN ('OTTHUMT00000106564.1', 'OTTHUMT00000416802.1')

        test_data = pd.read_sql_query(test_query, get_connection())

        # Print results.
        for row in test_data:
            st.write(f"{row[0]} has a :{row[1]}:")

error message:

OperationalError: (psycopg2.OperationalError) connection to server at “” (, port 5432 failed: Connection timed out Is the server running on that host and accepting TCP/IP connections? (Background on this error at: Error Messages — SQLAlchemy 1.4 Documentation)

and traceback:

File "/home/user/.local/lib/python3.8/site-packages/streamlit/scriptrunner/", line 554, in _run_script
    exec(code, module.__dict__)
File "/home/user/app/", line 33, in <module>
    test_data = pd.read_sql_query(test_query, get_connection())
File "/home/user/.local/lib/python3.8/site-packages/pandas/io/", line 400, in read_sql_query
    return pandas_sql.read_query(
File "/home/user/.local/lib/python3.8/site-packages/pandas/io/", line 1560, in read_query
    result = self.execute(*args)
File "/home/user/.local/lib/python3.8/site-packages/pandas/io/", line 1405, in execute
    return self.connectable.execution_options().execute(*args, **kwargs)
File "<string>", line 2, in execute
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/util/", line 402, in warned
    return fn(*args, **kwargs)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 3256, in execute
    connection = self.connect(close_with_result=True)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 3315, in connect
    return self._connection_cls(self, close_with_result=close_with_result)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 96, in __init__
    else engine.raw_connection()
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 3394, in raw_connection
    return self._wrap_pool_connect(self.pool.connect, _connection)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 3364, in _wrap_pool_connect
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 2198, in _handle_dbapi_exception_noconnection
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/util/", line 208, in raise_
    raise exception
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 3361, in _wrap_pool_connect
    return fn()
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 320, in connect
    return _ConnectionFairy._checkout(self)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 884, in _checkout
    fairy = _ConnectionRecord.checkout(pool)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 486, in checkout
    rec = pool._do_get()
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 146, in _do_get
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/util/", line 70, in __exit__
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/util/", line 208, in raise_
    raise exception
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 143, in _do_get
    return self._create_connection()
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 266, in _create_connection
    return _ConnectionRecord(self)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 381, in __init__
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 678, in __connect
    pool.logger.debug("Error on connect(): %s", e)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/util/", line 70, in __exit__
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/util/", line 208, in raise_
    raise exception
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/pool/", line 673, in __connect
    self.dbapi_connection = connection = pool._invoke_creator(self)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 578, in connect
    return dialect.connect(*cargs, **cparams)
File "/home/user/.local/lib/python3.8/site-packages/sqlalchemy/engine/", line 598, in connect
    return self.dbapi.connect(*cargs, **cparams)
File "/home/user/.local/lib/python3.8/site-packages/psycopg2/", line 122, in connect
    conn = _connect(dsn, connection_factory=connection_factory, **kwasync)

Hi @IanYeo :wave:

Have you had a chance to read through Streamlit’s PostgreSQL connection tutorial?

The example there recommends using the @st.experimental_singleton function decorator to cache the connection object for the lifetime of the app:


import streamlit as st
import psycopg2

# Initialize connection.
# Uses st.experimental_singleton to only run once.
def init_connection():
    return psycopg2.connect(**st.secrets["postgres"])

conn = init_connection()

# Perform query.
# Uses st.experimental_memo to only rerun when the query changes or after 10 min.
def run_query(query):
    with conn.cursor() as cur:
        return cur.fetchall()

rows = run_query("SELECT * from mytable;")

# Print results.
for row in rows:
    st.write(f"{row[0]} has a :{row[1]}:")

Snehan :balloon: