#検索サーバー用
import warnings
warnings.filterwarnings("ignore")
from flask import Flask, render_template, request, g, jsonify
app = Flask(__name__)
from elasticsearch import Elasticsearch
from operator import itemgetter
counter = 1

@app.route('/')
def form():
    return render_template('form.html')

@app.route('/search')
def search():
    es = Elasticsearch(host = '153.120.18.55', port = 9200, http_auth = ('elastic', 'DiaWYfh0mxo1Sn498D6A'))
    query_r = request.args.get('query').replace('　', ' ').lower()
    mode_r = request.args.get('mode_flag')
    query_list = query_r.split(" ")
    print(query_list)

    if mode_r == 'primary':
        hash_r = request.args.get('hash')
        hash_r = hash_r.lower()
        hash_len = len(hash_r)
        search_r = request.args.get('button')
        label = []
        if search_r == 'keyword':
            title_list = []
            publisher_list = []
            label_list = []
            ID_list = []
            e_label_list = []
            n_label_list = []
            url_list = []
            language_list = []
            type_list = []
            public_date_list = []
            if hash_len >= 1:
                f_res = es.search(index = 'sample01', body = {
                    '_source':['Label', 'Title', 'Publisher', 'ID', 'Public_date', 'N_label', 'E_label', 'URL', 'Language', 'Type'],
                    'size':1,
                    'query':{'prefix':{'Text': hash_r }}})
                for doc in f_res['hits']['hits']:
                    dict1 = doc['_source']
                    label_list.append(dict1['Label'])
                    title_list.append(dict1['Title'])
                    publisher_list.append(dict1['Publisher'])
                    ID_list.append(dict1['ID'])
                    e_label_list.append(', '.join(dict1['E_label']))
                    n_label_list.append(', '.join(dict1['N_label']))
                    url_list.append(dict1['URL'])
                    language_list.append(dict1['Language'])
                    type_list.append(dict1['Type'])
                    public_date_list.append(dict1['Public_date'])
            else:        
                ss = []
                for q in query_list:
                    if q.isalnum() == True:
                        ss.append({"match":{"Text": q }})
                        ss.append({"match":{"Title": q }})
                    else:
                        ss.append({"match_phrase":{"Text": q }})
                        ss.append({"match_phrase":{"Title": q }})
                body = {
                    "_source":["Label", "Title","Publisher", "ID", "Public_date", "N_label", "E_label", "URL", "Language", "Type"],
                    "size":20,
                    "query":{
                        "bool":{
                            "should":ss
                    }}}
                f_res = es.search(index = 'sample01', body = body)
                for doc in f_res['hits']['hits']:
                    dict1 = doc['_source']
                    label_list.append(dict1['Label'])
                    title_list.append(dict1['Title'])
                    publisher_list.append(dict1['Publisher'])
                    ID_list.append(dict1['ID'])
                    e_label_list.append(', '.join(dict1['E_label']))
                    n_label_list.append(', '.join(dict1['N_label']))
                    url_list.append(dict1['URL'])
                    language_list.append(dict1['Language'])
                    type_list.append(dict1['Type'])
                    public_date_list.append(dict1['Public_date'])
            hit = []
            for i in range(len(title_list)):
                hit.append({'title': title_list[i], 'publisher': publisher_list[i], 'ID': ID_list[i], 'label': label_list[i], 'E_label': e_label_list[i], 'N_label': n_label_list[i], 'URL': url_list[i], 'Language': language_list[i], 'Type': type_list[i], 'Public_date': public_date_list[i]})

            if len(hit) == 0:
                return render_template("form_error.html")
            else:
                return render_template("search_keyword.html", hit = hit) 
                
        elif search_r == 'label':
            if hash_len >= 1:
                f_res = es.search(index = 'sample01', body = {
                    '_source':['Label', 'Title','Publisher', 'ID', 'Vector', 'Public_date', 'N_label', 'E_label', 'URL', 'Language', 'Type'],
                    'size':1,
                    'query':{'prefix':{'Text': hash_r }}})
                for doc in f_res['hits']['hits']:
                    dict1 = doc['_source']
                    label = dict1['Label']
                    t_title = dict1['Title']
                    t_publisher = dict1['Publisher']
                    t_id = dict1['ID']
                    t_vec = dict1['Vector']
                    t_e_label = ', '.join(dict1['E_label'])
                    t_n_label = ', '.join(dict1['N_label'])
                    t_url = dict1['URL']
                    t_language = dict1['Language']
                    t_type = dict1['Type']
                    t_public_date = dict1['Public_date']
                hit_num = f_res['hits']['total']['value']
            else:
                ss = []
                for q in query_list:
                    if q.isalnum() == True:
                        ss.append({"match":{"Title": q }})
                    else:
                        ss.append({"match_phrase":{"Title": q }})
                mm = []
                for q in query_list:
                    if q.isalnum() == True:
                        mm.append({"match":{"Text": q }})
                    else:
                        mm.append({"match_phrase":{"Text": q }})
                body = {
                    "_source":["Label", "Title","Publisher", "ID", "Public_date", "N_label", "E_label", "URL", "Language", "Type", "Vector"],
                    "size":1,
                    "query":{
                        "bool":{
                            "should":ss,
                            "must":mm
                    }}}
                f_res = es.search(index = 'sample01', body = body)
                hit_num = f_res['hits']['total']['value']
                for doc in f_res['hits']['hits']:
                    dict1 = doc['_source']
                    label = dict1['Label']
                    t_title = dict1['Title']
                    t_publisher = dict1['Publisher']
                    t_id = dict1['ID']
                    t_vec = dict1['Vector']
                    t_e_label = ', '.join(dict1['E_label'])
                    t_n_label = ', '.join(dict1['N_label'])
                    t_url = dict1['URL']
                    t_language = dict1['Language']
                    t_type = dict1['Type']
                    t_public_date = dict1['Public_date']
            if len(label) >= 1:
                num = 0
                label_list = []
                
                for l in label:
                    label_list.append(l)
                    num += 1
                    if num == 15:
                        break

                label = ', '.join(label_list)

                res = es.search(index = 'sample01', body = {
                    '_source':['Label', 'Title','Publisher', 'ID', 'Public_date', 'N_label', 'E_label', 'URL', 'Language', 'Type'],
                    'size': 21,
                    "query": {
                        "function_score": {
                        "query": {
                            "match_all": {}
                        },
                        "functions": [
                            {
                            "script_score": {
                                "script": {
                                "source": "cosineSimilarity(params.queryVector, 'Vector') + 1.0",
                                "params": {
                                    "queryVector": t_vec
                                }
                                            }
                                }
                            } 
                        ]
                        }
                    }
                })   
                
                title_list = []
                publisher_list = []
                score_list = []
                ID_list = []
                e_label_list = []
                n_label_list = []
                url_list = []
                language_list = []
                type_list = []
                public_date_list = []

                for doc in res['hits']['hits']:
                    dict1 = doc['_source']
                    score_list.append(doc['_score'])
                    title_list.append(dict1['Title'])
                    ID_list.append(dict1['ID'])
                    publisher_list.append(dict1['Publisher'])
                    e_label_list.append(', '.join(dict1['E_label']))
                    n_label_list.append(', '.join(dict1['N_label']))
                    url_list.append(dict1['URL'])
                    language_list.append(dict1['Language'])
                    type_list.append(dict1['Type'])
                    public_date_list.append(dict1['Public_date'])
                hit = []
                
                for i in range(len(title_list)):
                    hit.append({'title': title_list[i], 'publisher': publisher_list[i], 'ID': ID_list[i], 'distance': score_list[i], 'E_label': e_label_list[i], 'N_label': n_label_list[i], 'URL': url_list[i], 'Language': language_list[i], 'Type': type_list[i], 'Public_date': public_date_list[i]})
                del hit[0]
                return render_template("search.html", label = label, hits = hit_num, hit = hit, l = label, ti = t_title, p = t_publisher, I = t_id, e = t_e_label, n = t_n_label, url = t_url, lang = t_language, type = t_type, p_d = t_public_date)
            else:
                return render_template('form_error.html')

        else:
            view_r = request.args.get('view')
            ss = []
            for q in query_list:
                if q.isalnum() == True:
                    ss.append({"match":{"Title": q }})
                else:
                    ss.append({"match_phrase":{"Title": q }})
            mm = []
            for q in query_list:
                if q.isalnum() == True:
                    mm.append({"match":{"Text": q }})
                else:
                    mm.append({"match_phrase":{"Text": q }})
            body = {
                "_source":["Vector"],
                "size":1,
                "query":{
                    "bool":{
                        "should":ss,
                        "must":mm
                }}}
            f_res = es.search(index = 'sample01', body = body)
            for doc in f_res['hits']['hits']:
                dict1 = doc['_source']
                t_vec = dict1['Vector']
            if len(view_r) >= 1:
                pass
            else:
                view_r = 1
            res = es.search(index = 'sample01', body = {
                '_source':['Label', 'Title','Publisher', 'ID', 'Public_date', 'N_label', 'E_label', 'URL', 'Language', 'Type'],
                'size': view_r,
                "query": {
                    "function_score": {
                    "query": {
                        "match_all": {}
                    },
                    "functions": [
                        {
                        "script_score": {
                            "script": {
                            "source": "cosineSimilarity(params.queryVector, 'Vector') + 1.0",
                            "params": {
                                "queryVector": t_vec
                            }
                                        }
                            }
                        } 
                    ]
                    }
                }
            })   

            title_list = []
            publisher_list = []
            url_list = []
            public_date_list = []

            for doc in res['hits']['hits']:
                dict1 = doc['_source']
                title_list.append(dict1['Title'])
                publisher_list.append(dict1['Publisher'])
                url_list.append(dict1['URL'])
                public_date_list.append(dict1['Public_date'])
            hit = []
            hit_error = {'result': 'No search results.'}
            
            for i in range(len(title_list)):
                hit.append({'title': title_list[i], 'publisher': publisher_list[i], 'URL': url_list[i], 'Public_date': public_date_list[i]})
            if len(hit) == 0:
                return jsonify(hit_error)
            else:
                return jsonify(hit)        
                    
    elif mode_r == 'secondary':

        f_res = es.search(index = 'sample01', body = {
                '_source':['Vector'],
                'size':1,
                'query':{
                    "match":{"ID": query_r }
                }})
        for doc in f_res['hits']['hits']:
            dict1 = doc['_source']
            t_vec = dict1['Vector']

        res = es.search(index = 'sample01', body = {
                '_source':['Label', 'Title','Publisher', 'ID', 'Public_date', 'N_label', 'E_label', 'URL', 'Language', 'Type'],
                'size': 21,
                "query": {
                    "function_score": {
                    "query": {
                        "match_all": {}
                    },
                    "functions": [
                        {
                        "script_score": {
                            "script": {
                            "source": "cosineSimilarity(params.queryVector, 'Vector') + 1.0",
                            "params": {
                                "queryVector": t_vec
                            }
                                        }
                            }
                        } 
                    ]
                    }
                }
            })
        
        title_list = []
        publisher_list = []
        score_list = []
        ID_list = []
        e_label_list = []
        n_label_list = []
        url_list = []
        language_list = []
        type_list = []
        public_date_list = []

        for doc in res['hits']['hits']:
            dict1 = doc['_source']
            score_list.append(doc['_score'])
            title_list.append(dict1['Title'])
            ID_list.append(dict1['ID'])
            publisher_list.append(dict1['Publisher'])
            e_label_list.append(', '.join(dict1['E_label']))
            n_label_list.append(', '.join(dict1['N_label']))
            url_list.append(dict1['URL'])
            language_list.append(dict1['Language'])
            type_list.append(dict1['Type'])
            public_date_list.append(dict1['Public_date'])
               
        hit = []
        
        for i in range(len(title_list)):
            hit.append({'title': title_list[i], 'publisher': publisher_list[i], 'ID': ID_list[i], 'distance': score_list[i], 'E_label': e_label_list[i], 'N_label': n_label_list[i], 'URL': url_list[i], 'Language': language_list[i], 'Type': type_list[i], 'Public_date': public_date_list[i]})
        del hit[0]

        return render_template("search-card.html", hit = hit)

    else:
        return render_template('error.html')

if __name__ == "__main__":
    app.run(host='153.120.135.103', port = 80, debug=True)