diff --git a/Recommender.py b/Recommender.py index 1b4b9c3..0853207 100644 --- a/Recommender.py +++ b/Recommender.py @@ -32,7 +32,7 @@ class Recommender: for i, material in new_material.iterrows(): self._match_tags(username, material, interest) - def make_recommendation(self, username): + def make_recommendation(self, username, name): if len(self.curr_recommendations[username]) == 0: return "" to_consider = [idx for idx, value in enumerate(self.recommended[username]) if value == False] @@ -42,7 +42,7 @@ class Recommender: index = self.recommended[username][index] #while self.recommended[index] == True: # index = random.choice(list(range(0, len(self.curr_recommendations)))) - recommendation = "Hey " + username + "! This " + self.curr_recommendations[username][index][ + recommendation = "Hey " + name + "! This " + self.curr_recommendations[username][index][ 'type'].lower() + " about " + ', '.join( self.curr_recommendations[username][index]['tags']).lower() + " was posted recently by " + \ self.curr_recommendations[username][index][ diff --git a/ResponseGenerator.py b/ResponseGenerator.py index f7498fa..dcc67f2 100644 --- a/ResponseGenerator.py +++ b/ResponseGenerator.py @@ -88,11 +88,11 @@ class ResponseGenerator: return self.db[db_type].iloc[[I[0]][0]].reset_index(drop=True).loc[0] - def gen_response(self, action, utterance=None, username=None, vrename=None, state=None, consec_history=None, chitchat_history=None): + def gen_response(self, action, utterance=None, name=None, username=None, vrename=None, state=None, consec_history=None, chitchat_history=None): if action == "Help": - return "Hey it's Janet! I am here to help you make use of the datasets and papers in the catalogue of the " + vrename +" VRE. I can answer questions whose answers may be inside the papers. I can summarize papers for you. I can also chat with you. So, whichever it is, I am ready to chat!" + return "Hey " + name + "! it's Janet! I am here to help you make use of the datasets and papers in the catalogue of the " + vrename +" VRE. I can answer questions whose answers may be inside the papers. I can summarize papers for you. I can also chat with you. So, whichever it is, I am ready to chat!" elif action == "Recommend": - prompt = self.recommender.make_recommendation(username) + prompt = self.recommender.make_recommendation(username, name) if prompt != "": return prompt else: diff --git a/main.py b/main.py index fc72bd9..b528d61 100644 --- a/main.py +++ b/main.py @@ -6,6 +6,7 @@ from flask import Flask, render_template, request, jsonify from flask_cors import CORS, cross_origin import psycopg2 import spacy +import requests import spacy_transformers import torch from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline @@ -40,20 +41,20 @@ cur = conn.cursor() users = {} -def vre_fetch(): +def vre_fetch(token): while True: time.sleep(1000) print('getting new material') - vre.get_vre_update() - vre.index_periodic_update() - rg.update_index(vre.get_index()) - rg.update_db(vre.get_db()) + users[token]['args']['vre'].get_vre_update() + users[token]['args']['vre'].index_periodic_update() + users[token]['args']['rg'].update_index(vre.get_index()) + users[token]['args']['rg'].update_db(vre.get_db()) -def user_interest_decay(user): +def user_interest_decay(token): while True: - print("decaying interests after 3 minutes for " + user.username) + print("decaying interests after 3 minutes for " + users[token]['username']) time.sleep(180) - user.decay_interests() + users[token]['user'].decay_interests() def clear_inactive(): while True: @@ -69,34 +70,52 @@ def health(): @app.route("/api/dm", methods=['POST']) def init_dm(): - username = request.get_json().get("username") - token = '2c1e8f88-461c-42c0-8cc1-b7660771c9a3-843339462' - if username not in users: - users[username] = {'dm': DM(), 'activity': 0, 'user': User(username, token)} - threading.Thread(target=user_interest_decay, args=(users[username]['user'],), name='decayinterest_'+username).start() - message = {"answer": "your assigned name is " + username, "assignedname": username} + token = request.get_json().get("token") + headers = {"gcube-token": token, "Accept": "application/json"} + + if token not in users: + url = 'https://api.d4science.org/rest/2/people/profile' + response = requests.get(url, headers=headers) + username = response.json()['result']['username'] + name = response.json()['result']['fullname'] + + vre = VRE("assistedlab", token, retriever) + vre.init() + index = vre.get_index() + db = vre.get_db() + rg = ResponseGenerator(index,db, rec, generators, retriever) + args = {'vre': vre, 'rg': rg} + + users[token] = {'username': username, 'name': name, 'dm': DM(), 'activity': 0, 'user': User(username, token), 'args': args} + + threading.Thread(target=vre_fetch, args=(token,), name='updatevre_'+users[token]['username']).start() + threading.Thread(target=user_interest_decay, args=(token,), name='decayinterest_'+users[token]['username']).start() + + message = {"answer": "Hi " + name.split()[0] + '!', "assignedname": username} else: - message = {"answer": "welcome back " + username, "assignedname": username} + message = {"answer": "welcome back " + users[token]['name'].split()[0] + '!', "assignedname": users[token]['username']} return message @app.route("/api/predict", methods=['POST']) def predict(): text = request.get_json().get("message") - username = request.get_json().get("username") - dm = users[username]['dm'] - user = users[username]['user'] + token = request.get_json().get("token") + dm = users[token]['dm'] + user = users[token]['user'] + rg = users[token]['args']['rg'] + vre = users[token]['args']['vre'] message = {} if text == "": state = {'help': True, 'inactive': False, 'modified_query':"", 'intent':""} dm.update(state) action = dm.next_action() - response = rg.gen_response(action, vrename=vre.name) + response = rg.gen_response(action, vrename=vre.name, username=users[token]['username'], name=users[token]['name'].split()[0]) message = {"answer": response} elif text == "": state = {'help': False, 'inactive': True, 'modified_query':"recommed: ", 'intent':""} dm.update(state) action = dm.next_action() - response = rg.gen_response(action, username=user.username, vrename=vre.name) + response = rg.gen_response(action, username=users[token]['username'],name=users[token]['name'].split()[0], vrename=vre.name) message = {"answer": response} new_state = {'modified_query': response} dm.update(new_state) @@ -114,7 +133,7 @@ def predict(): new_user_interests = user.get_user_interests() new_vre_material = pd.concat([vre.db['paper_db'], vre.db['dataset_db']]).reset_index(drop=True) if (new_user_interests != old_user_interests or len(old_vre_material) != len(new_vre_material)): - rec.generate_recommendations(username, new_user_interests, new_vre_material) + rec.generate_recommendations(users[token]['username'], new_user_interests, new_vre_material) dm.update(state) action = dm.next_action() response = rg.gen_response(action=action, utterance=state['modified_query'], state=dm.get_recent_state(), consec_history=dm.get_consec_history(), chitchat_history=dm.get_chitchat_history(), vrename=vre.name) @@ -124,9 +143,11 @@ def predict(): new_state = {'modified_query': response, 'intent': state['intent']} dm.update(new_state) reply = jsonify(message) - users[username]['dm'] = dm - users[username]['user'] = user - users[username]['activity'] = 0 + users[token]['dm'] = dm + users[token]['user'] = user + users[token]['activity'] = 0 + users[token]['args']['vre'] = vre + users[token]['args']['rg'] = rg return reply @app.route('/api/feedback', methods = ['POST']) @@ -170,21 +191,10 @@ if __name__ == "__main__": 'amb': amb_generator, 'summ': summ_generator} - #load vre - token = '2c1e8f88-461c-42c0-8cc1-b7660771c9a3-843339462' - vre = VRE("assistedlab", token, retriever) - vre.init() - index = vre.get_index() - db = vre.get_db() - threading.Thread(target=vre_fetch, name='updatevre').start() - threading.Thread(target=clear_inactive, name='clear').start() - rec = Recommender(retriever) - rg = ResponseGenerator(index,db, rec, generators, retriever) - cur.execute('CREATE TABLE IF NOT EXISTS feedback_experimental (id serial PRIMARY KEY,' 'query text NOT NULL,'