summaryrefslogtreecommitdiff
path: root/kg_rag/prompt_based_generation/GPT
diff options
context:
space:
mode:
Diffstat (limited to 'kg_rag/prompt_based_generation/GPT')
-rw-r--r--kg_rag/prompt_based_generation/GPT/run_mcq_qa.py31
-rw-r--r--kg_rag/prompt_based_generation/GPT/run_true_false_generation.py33
-rw-r--r--kg_rag/prompt_based_generation/GPT/text_generation.py33
3 files changed, 97 insertions, 0 deletions
diff --git a/kg_rag/prompt_based_generation/GPT/run_mcq_qa.py b/kg_rag/prompt_based_generation/GPT/run_mcq_qa.py
new file mode 100644
index 0000000..762242e
--- /dev/null
+++ b/kg_rag/prompt_based_generation/GPT/run_mcq_qa.py
@@ -0,0 +1,31 @@
+from kg_rag.utility import *
+import sys
+from tqdm import tqdm
+
+CHAT_MODEL_ID = sys.argv[1]
+
+QUESTION_PATH = config_data["MCQ_PATH"]
+SYSTEM_PROMPT = system_prompts["MCQ_QUESTION_PROMPT_BASED"]
+SAVE_PATH = config_data["SAVE_RESULTS_PATH"]
+TEMPERATURE = config_data["LLM_TEMPERATURE"]
+
+CHAT_DEPLOYMENT_ID = CHAT_MODEL_ID
+
+save_name = "_".join(CHAT_MODEL_ID.split("-"))+"_prompt_based_response_for_two_hop_mcq_from_monarch_and_robokop.csv"
+
+
+def main():
+ start_time = time.time()
+ question_df = pd.read_csv(QUESTION_PATH)
+ answer_list = []
+ for index, row in tqdm(question_df.head(50).iterrows(), total=50):
+ question = "Question: "+ row["text"]
+ output = get_GPT_response(question, SYSTEM_PROMPT, CHAT_MODEL_ID, CHAT_DEPLOYMENT_ID, temperature=TEMPERATURE)
+ answer_list.append((row["text"], row["correct_node"], output))
+ answer_df = pd.DataFrame(answer_list, columns=["question", "correct_answer", "llm_answer"])
+ answer_df.to_csv(os.path.join(SAVE_PATH, save_name), index=False, header=True)
+ print("Completed in {} min".format((time.time()-start_time)/60))
+
+
+if __name__ == "__main__":
+ main() \ No newline at end of file
diff --git a/kg_rag/prompt_based_generation/GPT/run_true_false_generation.py b/kg_rag/prompt_based_generation/GPT/run_true_false_generation.py
new file mode 100644
index 0000000..0d248db
--- /dev/null
+++ b/kg_rag/prompt_based_generation/GPT/run_true_false_generation.py
@@ -0,0 +1,33 @@
+from kg_rag.utility import *
+import sys
+
+
+CHAT_MODEL_ID = sys.argv[1]
+
+QUESTION_PATH = config_data["TRUE_FALSE_PATH"]
+SYSTEM_PROMPT = system_prompts["TRUE_FALSE_QUESTION_PROMPT_BASED"]
+SAVE_PATH = config_data["SAVE_RESULTS_PATH"]
+TEMPERATURE = config_data["LLM_TEMPERATURE"]
+
+CHAT_DEPLOYMENT_ID = CHAT_MODEL_ID
+
+save_name = "_".join(CHAT_MODEL_ID.split("-"))+"_prompt_based_one_hop_true_false_binary_response.csv"
+
+
+def main():
+ start_time = time.time()
+ question_df = pd.read_csv(QUESTION_PATH)
+ answer_list = []
+ for index, row in question_df.iterrows():
+ question = "Question: "+ row["text"]
+ output = get_GPT_response(question, SYSTEM_PROMPT, CHAT_MODEL_ID, CHAT_DEPLOYMENT_ID, temperature=TEMPERATURE)
+ answer_list.append((row["text"], row["label"], output))
+ answer_df = pd.DataFrame(answer_list, columns=["question", "label", "llm_answer"])
+ answer_df.to_csv(os.path.join(SAVE_PATH, save_name), index=False, header=True)
+ print("Completed in {} min".format((time.time()-start_time)/60))
+
+
+
+if __name__ == "__main__":
+ main()
+
diff --git a/kg_rag/prompt_based_generation/GPT/text_generation.py b/kg_rag/prompt_based_generation/GPT/text_generation.py
new file mode 100644
index 0000000..235ece7
--- /dev/null
+++ b/kg_rag/prompt_based_generation/GPT/text_generation.py
@@ -0,0 +1,33 @@
+from kg_rag.utility import *
+import argparse
+
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('-g', type=str, default='gpt-35-turbo', help='GPT model selection')
+args = parser.parse_args()
+
+CHAT_MODEL_ID = args.g
+
+SYSTEM_PROMPT = system_prompts["PROMPT_BASED_TEXT_GENERATION"]
+TEMPERATURE = config_data["LLM_TEMPERATURE"]
+
+CHAT_DEPLOYMENT_ID = CHAT_MODEL_ID
+
+
+def main():
+ print(" ")
+ question = input("Enter your question : ")
+ print("Here is the prompt-based answer:")
+ print("")
+ output = get_GPT_response(question, SYSTEM_PROMPT, CHAT_MODEL_ID, CHAT_DEPLOYMENT_ID, temperature=TEMPERATURE)
+ stream_out(output)
+
+
+
+
+
+if __name__ == "__main__":
+ main()
+
+