geminispace.info

gemini search engine
git clone https://git.clttr.info/geminispace.info.git
Log (Feed) | Files | Refs (Tags) | README | LICENSE

commit ded0c0ca62fe84b119a4325a53331408328e389d
parent 39010248c195bce521a2aaf59cc3a72a7523125a
Author: Natalie Pendragon <natpen@natpen.net>
Date:   Fri, 24 Jul 2020 06:43:53 -0400

[serve] Save searches to db

Diffstat:
Mgus/lib/db_model.py | 11+++++++++--
Mserve/models.py | 4+++-
2 files changed, 12 insertions(+), 3 deletions(-)

diff --git a/gus/lib/db_model.py b/gus/lib/db_model.py @@ -14,7 +14,7 @@ def init_db(filename=":memory:"): """ Bind an SQLite database to the Peewee ORM models. """ - models = [Page, Link, Crawl] + models = [Crawl, Link, Page, Search] db = SqliteDatabase(filename) db.bind(models) db.create_tables(models) @@ -59,7 +59,6 @@ class Link(Model): def get_is_cross_host_like(from_resource, to_resource): return from_resource.normalized_host_like != to_resource.normalized_host_like - class Crawl(Model): """ Attempts to crawl a page. @@ -70,3 +69,11 @@ class Crawl(Model): error_message = TextField(null=True) is_different = BooleanField() timestamp = DateTimeField() + +class Search(Model): + """ + A log of performed searches + """ + + query = TextField() + timestamp = DateTimeField() diff --git a/serve/models.py b/serve/models.py @@ -1,4 +1,5 @@ import re +from datetime import datetime from urllib.parse import quote from peewee import fn, SqliteDatabase @@ -6,7 +7,7 @@ from whoosh import highlight, qparser from whoosh.index import open_dir from . import constants -from gus.lib.db_model import init_db, Page, Link, Crawl +from gus.lib.db_model import init_db, Crawl, Link, Page, Search from gus.lib.gemini import GeminiResource from gus.lib.index_statistics import compute_index_statistics, load_all_statistics_from_file from gus.lib.misc import bytes2human @@ -41,6 +42,7 @@ class GUS(): def search_index(self, query, requested_page): + Search.create(query=query, timestamp=datetime.utcnow()) query = self.query_parser.parse(query) results = self.searcher.search_page(query, requested_page, pagelen=10) return (