| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330 | 
'''searx is free software: you can redistribute it and/or modifyit under the terms of the GNU Affero General Public License as published bythe Free Software Foundation, either version 3 of the License, or(at your option) any later version.searx is distributed in the hope that it will be useful,but WITHOUT ANY WARRANTY; without even the implied warranty ofMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See theGNU Affero General Public License for more details.You should have received a copy of the GNU Affero General Public Licensealong with searx. If not, see < http://www.gnu.org/licenses/ >.(C) 2013- by Adam Tauber, <asciimoo@gmail.com>'''from os.path import realpath, dirname, splitext, joinimport sysfrom imp import load_sourcefrom itertools import izip_longest, chainfrom operator import itemgetterfrom urlparse import urlparsefrom datetime import datetimeimport grequestsfrom flask.ext.babel import gettextfrom searx import settingsfrom searx.utils import gen_useragentengine_dir = dirname(realpath(__file__))number_of_searches = 0engines = {}categories = {'general': []}engine_shortcuts = {}def load_module(filename):    modname = splitext(filename)[0]    if modname in sys.modules:        del sys.modules[modname]    filepath = join(engine_dir, filename)    module = load_source(modname, filepath)    module.name = modname    return moduleif not 'engines' in settings or not settings['engines']:    print '[E] Error no engines found. Edit your settings.yml'    exit(2)for engine_data in settings['engines']:    engine_name = engine_data['engine']    engine = load_module(engine_name + '.py')    for param_name in engine_data:        if param_name == 'engine':            continue        if param_name == 'categories':            if engine_data['categories'] == 'none':                engine.categories = []            else:                engine.categories = map(                    str.strip, engine_data['categories'].split(','))            continue        setattr(engine, param_name, engine_data[param_name])    if not hasattr(engine, 'paging'):        engine.paging = False    if not hasattr(engine, 'categories'):        engine.categories = ['general']    if not hasattr(engine, 'language_support'):        #engine.language_support = False        engine.language_support = True    if not hasattr(engine, 'timeout'):        #engine.language_support = False        engine.timeout = settings['server']['request_timeout']    if not hasattr(engine, 'shortcut'):        #engine.shortcut = '''        engine.shortcut = ''    # checking required variables    for engine_attr in dir(engine):        if engine_attr.startswith('_'):            continue        if getattr(engine, engine_attr) is None:            print '[E] Engine config error: Missing attribute "{0}.{1}"'.format(engine.name, engine_attr)  # noqa            sys.exit(1)    engines[engine.name] = engine    engine.stats = {        'result_count': 0,        'search_count': 0,        'page_load_time': 0,        'score_count': 0,        'errors': 0    }    if hasattr(engine, 'categories'):        for category_name in engine.categories:            categories.setdefault(category_name, []).append(engine)    else:        categories['general'].append(engine)    if engine.shortcut:        # TODO check duplications        engine_shortcuts[engine.shortcut] = engine.namedef default_request_params():    return {        'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}def make_callback(engine_name, results, suggestions, callback, params):    # creating a callback wrapper for the search engine results    def process_callback(response, **kwargs):        cb_res = []        response.search_params = params        engines[engine_name].stats['page_load_time'] += \            (datetime.now() - params['started']).total_seconds()        try:            search_results = callback(response)        except Exception, e:            engines[engine_name].stats['errors'] += 1            results[engine_name] = cb_res            print '[E] Error with engine "{0}":\n\t{1}'.format(                engine_name, str(e))            return        for result in search_results:            result['engine'] = engine_name            if 'suggestion' in result:                # TODO type checks                suggestions.add(result['suggestion'])                continue            cb_res.append(result)        results[engine_name] = cb_res    return process_callbackdef score_results(results):    flat_res = filter(        None, chain.from_iterable(izip_longest(*results.values())))    flat_len = len(flat_res)    engines_len = len(results)    results = []    # deduplication + scoring    for i, res in enumerate(flat_res):        res['parsed_url'] = urlparse(res['url'])        res['engines'] = [res['engine']]        weight = 1.0        if hasattr(engines[res['engine']], 'weight'):            weight = float(engines[res['engine']].weight)        score = int((flat_len - i) / engines_len) * weight + 1        duplicated = False        for new_res in results:            p1 = res['parsed_url'].path[:-1] if res['parsed_url'].path.endswith('/') else res['parsed_url'].path  # noqa            p2 = new_res['parsed_url'].path[:-1] if new_res['parsed_url'].path.endswith('/') else new_res['parsed_url'].path  # noqa            if res['parsed_url'].netloc == new_res['parsed_url'].netloc and\               p1 == p2 and\               res['parsed_url'].query == new_res['parsed_url'].query and\               res.get('template') == new_res.get('template'):                duplicated = new_res                break        if duplicated:            if res.get('content') > duplicated.get('content'):                duplicated['content'] = res['content']            duplicated['score'] += score            duplicated['engines'].append(res['engine'])            if duplicated['parsed_url'].scheme == 'https':                continue            elif res['parsed_url'].scheme == 'https':                duplicated['url'] = res['parsed_url'].geturl()                duplicated['parsed_url'] = res['parsed_url']        else:            res['score'] = score            results.append(res)    return sorted(results, key=itemgetter('score'), reverse=True)def search(query, request, selected_engines, pageno=1, lang='all'):    global engines, categories, number_of_searches    requests = []    results = {}    suggestions = set()    number_of_searches += 1    #user_agent = request.headers.get('User-Agent', '')    user_agent = gen_useragent()    for selected_engine in selected_engines:        if selected_engine['name'] not in engines:            continue        engine = engines[selected_engine['name']]        if pageno > 1 and not engine.paging:            continue        if lang != 'all' and not engine.language_support:            continue        request_params = default_request_params()        request_params['headers']['User-Agent'] = user_agent        request_params['category'] = selected_engine['category']        request_params['started'] = datetime.now()        request_params['pageno'] = pageno        request_params['language'] = lang        request_params = engine.request(query.encode('utf-8'), request_params)        callback = make_callback(            selected_engine['name'],            results,            suggestions,            engine.response,            request_params        )        request_args = dict(            headers=request_params['headers'],            hooks=dict(response=callback),            cookies=request_params['cookies'],            timeout=engine.timeout        )        if request_params['method'] == 'GET':            req = grequests.get        else:            req = grequests.post            request_args['data'] = request_params['data']        # ignoring empty urls        if not request_params['url']:            continue        requests.append(req(request_params['url'], **request_args))    grequests.map(requests)    for engine_name, engine_results in results.items():        engines[engine_name].stats['search_count'] += 1        engines[engine_name].stats['result_count'] += len(engine_results)    results = score_results(results)    for result in results:        for res_engine in result['engines']:            engines[result['engine']].stats['score_count'] += result['score']    return results, suggestionsdef get_engines_stats():    # TODO refactor    pageloads = []    results = []    scores = []    errors = []    scores_per_result = []    max_pageload = max_results = max_score = max_errors = max_score_per_result = 0  # noqa    for engine in engines.values():        if engine.stats['search_count'] == 0:            continue        results_num = \            engine.stats['result_count'] / float(engine.stats['search_count'])        load_times = engine.stats['page_load_time'] / float(engine.stats['search_count'])  # noqa        if results_num:            score = engine.stats['score_count'] / float(engine.stats['search_count'])  # noqa            score_per_result = score / results_num        else:            score = score_per_result = 0.0        max_results = max(results_num, max_results)        max_pageload = max(load_times, max_pageload)        max_score = max(score, max_score)        max_score_per_result = max(score_per_result, max_score_per_result)        max_errors = max(max_errors, engine.stats['errors'])        pageloads.append({'avg': load_times, 'name': engine.name})        results.append({'avg': results_num, 'name': engine.name})        scores.append({'avg': score, 'name': engine.name})        errors.append({'avg': engine.stats['errors'], 'name': engine.name})        scores_per_result.append({            'avg': score_per_result,            'name': engine.name        })    for engine in pageloads:        engine['percentage'] = int(engine['avg'] / max_pageload * 100)    for engine in results:        engine['percentage'] = int(engine['avg'] / max_results * 100)    for engine in scores:        engine['percentage'] = int(engine['avg'] / max_score * 100)    for engine in scores_per_result:        engine['percentage'] = int(engine['avg'] / max_score_per_result * 100)    for engine in errors:        if max_errors:            engine['percentage'] = int(float(engine['avg']) / max_errors * 100)        else:            engine['percentage'] = 0    return [        (            gettext('Page loads (sec)'),            sorted(pageloads, key=itemgetter('avg'))        ),        (            gettext('Number of results'),            sorted(results, key=itemgetter('avg'), reverse=True)        ),        (            gettext('Scores'),            sorted(scores, key=itemgetter('avg'), reverse=True)        ),        (            gettext('Scores per result'),            sorted(scores_per_result, key=itemgetter('avg'), reverse=True)        ),        (            gettext('Errors'),            sorted(errors, key=itemgetter('avg'), reverse=True)        ),    ]
 |