swisscows.py 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. """
  2. Swisscows (Web, Images)
  3. @website https://swisscows.ch
  4. @provide-api no
  5. @using-api no
  6. @results HTML (using search portal)
  7. @stable no (HTML can change)
  8. @parse url, title, content
  9. """
  10. from json import loads
  11. from urllib import urlencode, unquote
  12. import re
  13. # engine dependent config
  14. categories = ['general', 'images']
  15. paging = True
  16. language_support = True
  17. supported_languages = ["ar-SA", "es-AR", "en-AU", "de-AT", "fr-BE", "nl-BE", "pt-BR", "bg-BG", "en-CA", "fr-CA",
  18. "es-CL", "zh-CN", "hr-HR", "cs-CZ", "da-DK", "et-EE", "fi-FI", "fr-FR", "de-DE", "el-GR",
  19. "zh-HK", "hu-HU", "en-IN", "en-IE", "he-IL", "it-IT", "ja-JP", "ko-KR", "lv-LV", "lt-LT",
  20. "en-MY", "es-MX", "nl-NL", "en-NZ", "nb-NO", "en-PH", "pl-PL", "pt-PT", "ro-RO", "ru-RU",
  21. "en-SG", "sk-SK", "sl-SI", "en-ZA", "es-ES", "sv-SE", "de-CH", "fr-CH", "zh-TW", "th-TH",
  22. "tr-TR", "uk-UA", "en-GB", "en-US", "es-US"]
  23. # search-url
  24. base_url = 'https://swisscows.ch/'
  25. search_string = '?{query}&page={page}'
  26. # regex
  27. regex_json = re.compile(r'initialData: {"Request":(.|\n)*},\s*environment')
  28. regex_json_remove_start = re.compile(r'^initialData:\s*')
  29. regex_json_remove_end = re.compile(r',\s*environment$')
  30. regex_img_url_remove_start = re.compile(r'^https?://i\.swisscows\.ch/\?link=')
  31. # do search-request
  32. def request(query, params):
  33. if params['language'] == 'all':
  34. ui_language = 'browser'
  35. region = 'browser'
  36. elif params['language'].split('-')[0] == 'no':
  37. region = 'nb-NO'
  38. else:
  39. region = params['language']
  40. ui_language = params['language'].split('-')[0]
  41. search_path = search_string.format(
  42. query=urlencode({'query': query,
  43. 'uiLanguage': ui_language,
  44. 'region': region}),
  45. page=params['pageno'])
  46. # image search query is something like 'image?{query}&page={page}'
  47. if params['category'] == 'images':
  48. search_path = 'image' + search_path
  49. params['url'] = base_url + search_path
  50. return params
  51. # get response from search-request
  52. def response(resp):
  53. results = []
  54. json_regex = regex_json.search(resp.content)
  55. # check if results are returned
  56. if not json_regex:
  57. return []
  58. json_raw = regex_json_remove_end.sub('', regex_json_remove_start.sub('', json_regex.group()))
  59. json = loads(json_raw)
  60. # parse results
  61. for result in json['Results'].get('items', []):
  62. result_title = result['Title'].replace(u'\uE000', '').replace(u'\uE001', '')
  63. # parse image results
  64. if result.get('ContentType', '').startswith('image'):
  65. img_url = unquote(regex_img_url_remove_start.sub('', result['Url']))
  66. # append result
  67. results.append({'url': result['SourceUrl'],
  68. 'title': result['Title'],
  69. 'content': '',
  70. 'img_src': img_url,
  71. 'template': 'images.html'})
  72. # parse general results
  73. else:
  74. result_url = result['Url'].replace(u'\uE000', '').replace(u'\uE001', '')
  75. result_content = result['Description'].replace(u'\uE000', '').replace(u'\uE001', '')
  76. # append result
  77. results.append({'url': result_url,
  78. 'title': result_title,
  79. 'content': result_content})
  80. # parse images
  81. for result in json.get('Images', []):
  82. # decode image url
  83. img_url = unquote(regex_img_url_remove_start.sub('', result['Url']))
  84. # append result
  85. results.append({'url': result['SourceUrl'],
  86. 'title': result['Title'],
  87. 'content': '',
  88. 'img_src': img_url,
  89. 'template': 'images.html'})
  90. # return results
  91. return results