Caching SearXNG

This revision is from 2024/06/24 16:10. You can Restore it.

SearXNG installs itself on /usr/local/searxng/searxng-src, with the main source code in searxng-src directory.

Interesting files are...

  1. webapp.py in /usr/local/searxng/searxng-src/searx/webapp.py : def search()
  2. __init__.py in /usr/local/searxng/searxng-src/searx/search/__init__.py : class Search

A cache implentation here...

  • making a directory in the searx folder named cache
  • make a sub-folder for every possible character in the cache directory, for instance a to z and 0 to 9

  • the cache files are named by and are indentical to the search term
  • check if the filename exists when a search is performed
  • if there is a match read in the local file instead and defer the search
  • send the keywords to cache maintainers so they can update the cache. They can then crawl the search engines and build a more comprehensive cache over time.
  • the user updates their cache

Another implementaiton is a searx caching search engine, where the search term goes to a caching proxy rather than directly to search engines.

Proposed searXNG options:

  • use cache
  • update the cache
  • Searx is privacy focused search engine, so disclosure to end user that caching requires keywords/search term sharing. That is how the cache is built.

Benefits:

  • turns searXNG into a full search engine built from caching results
  • searches are against a local file, so it speeds up searching significantly
  • offline searching if the cache gets big enough

File in question: __init__.py

class Search:

"""Search information container"""

__slots__ = "search_query", "result_container", "start_time", "actual_timeout"

def search_multiple_requests(self, requests):

# pylint: disable=protected-access

search_id = str(uuid4())

for engine_name, query, request_params in requests:

_search = copy_current_request_context(PROCESSORS[engine_name].search)

th = threading.Thread( # pylint: disable=invalid-name

target=_search,

args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),

name=search_id,

)

th._timeout = False

th._engine_name = engine_name

th.start()

for th in threading.enumerate(): # pylint: disable=invalid-name

if th.name == search_id:

remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time))

th.join(remaining_time)

if th.is_alive():

th._timeout = True

self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')

PROCESSORS[th._engine_name].logger.error('engine timeout')

def search_multiple_requests2(self, requests):

# pylint: disable=protected-access

search_id = str(uuid4())

mock_result_container = ResultContainer()

mock_results = [{'url': f'Mock Result {i}', 'content': ''} for i in range(1, 6)]

threads = []

for engine_name, _, _ in requests:

th = threading.Thread(

target=self.mock_search_function,

args=(engine_name, mock_results, mock_result_container),

name=search_id,

)

th._timeout = False

th._engine_name = engine_name

th.start()

threads.append(th)

remaining_time = None

for th in threads:

if th.name == search_id:

if remaining_time is None:

remaining_time = self.actual_timeout - (default_timer() - self.start_time)

th.join(remaining_time)

if th.is_alive():

th._timeout = True

self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')

PROCESSORS[th._engine_name].logger.error('engine timeout')

# Wait for all threads to finish, even if some have timed out

for th in threads:

th.join()

# Copy the mock results to the actual result_container

self.result_container = mock_result_container

def mock_search_function(self, engine_name, mock_results, result_container):

# This is a mock search function

time.sleep(0.1) # Simulate some processing time

result_container.extend(engine_name, mock_results)

def search_standard(self):

"""

Update self.result_container, self.actual_timeout

"""

requests, self.actual_timeout = self._get_requests()

cache_dir = 'cache'

query_file_path = os.path.join(cache_dir, self.search_query.query)

# send all search-request

if requests:

# Check if the file exists in the cache directory

if os.path.isfile(query_file_path):

self.search_multiple_requests2(requests)

else:

self.search_multiple_requests(requests)

# return results, suggestions, answers and infoboxes

return True

  

📝 📜 ⏱️ ⬆️