So I have used the Gradio Library to create a Chatbot interface using the GPT2_Simple model I have retrained.
# Define a function to generate a response given an input
def generate_response(input_text,context=[]):
import gpt_2_simple as gpt2
# Start a TensorFlow session and load the GPT-2 model
sess = gpt2.start_tf_sess()
gpt2.load_gpt2(sess,run_name="run1",checkpoint_dir="/dbfs/FileStore/test_gpt2/checkpoint/",model_dir = "/dbfs/FileStore/models")
print('Generate Response Input',input_text)
context += [input_text]
updated_input = ""
updated_input = '[User] : '+input_text+'\n'+'[BOT] :'
print('Updated Input',updated_input)
response = gpt2.generate(sess, model_dir="/dbfs/FileStore/models", checkpoint_dir="/dbfs/FileStore/test_gpt2/checkpoint/",truncate= "\n",
temperature=1,top_k=10,top_p=0.8,prefix=updated_input,include_prefix=False,
return_as_list=True)[0]
response = response.strip()
context += [response]
responses = [(u,b) for u,b in zip(context[::2], context[1::2])]
sess.close()
print('Response in Generate func',responses)
print('Context',context)
return responses,context
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
state = gr.State()
with gr.Row():
txt = gr.Textbox(show_label=False, placeholder=βEnter text and press enterβ).style(container=False)
txt.submit(generate_response, [txt, state], [chatbot, state])
import gradio as gr
defines a basic dialog interface using Gradio
with gr.Blocks() as dialog_app:
chatbot = gr.Chatbot() # dedicated βchatbotβ component
state = gr.State() # session state that persists across multiple submits
with gr.Row():
txt = gr.Textbox(
show_label=False,
placeholder="Enter text and press enter"
).style(container=False)
txt.submit(generate_response, [txt, state], [chatbot, state])
launches the app in a new local port
dialog_app.launch(debug=True,share=True)
This function gives me output in general but with Gradio in debug mode, I can see output is generated but on the screen nothing is there.
Can someone help with this?