commit f071d9d565ab40619850746c794df92609e306d0
parent 7093ad2aa8b298e8648f2c40381d1e288ef50605
Author: Natalie Pendragon <natpen@natpen.net>
Date: Mon, 11 May 2020 14:45:53 -0400
[serve] Stop hard-wrapping content
The Gemini spec was recently updated such that content creators are
now requested to NOT hard-wrap their content, so this commit updates
GUS to comply!
Diffstat:
1 file changed, 7 insertions(+), 18 deletions(-)
diff --git a/gus/serve.py b/gus/serve.py
@@ -40,15 +40,13 @@ def _render_index_statistics():
d = [
"",
"## Overall",
- "These figures are reflective of the aggregate size of Geminispace",
- "when the current index was generated on {}.".format(last_index_statistics["index_modification_time"]),
+ "These figures are reflective of the aggregate size of Geminispace when the current index was generated on {}.".format(last_index_statistics["index_modification_time"]),
"",
"Page Count : {:>5}".format(last_index_statistics["page_count"]),
"Domain Count : {:>5}".format(last_index_statistics["domain_count"]),
"",
"## By Content Type",
- "These figures represent the number of pages seen per content type",
- "when the current index was generated on {}.".format(last_index_statistics["index_modification_time"]),
+ "These figures represent the number of pages seen per content type when the current index was generated on {}.".format(last_index_statistics["index_modification_time"]),
"",
]
for pair in last_index_statistics["content_type_frequencies"]:
@@ -73,30 +71,21 @@ def index(request):
data = _render_header()
data.extend([
"",
- "GUS has many features to help make a relevant",
- "index. It will only index content within Geminispace,",
- "and will not index links out to other protocols.",
+ "GUS has many features to help make a relevant index. It will only index content within Geminispace, and will not index links out to other protocols.",
"",
- "To control crawling of your site, you can use a",
- "robots.txt file, Place it in your document root",
- "directory such that a request for \"robots.txt\" will",
- "fetch it.",
+ "To control crawling of your site, you can use a robots.txt file, Place it in your document root directory such that a request for \"robots.txt\" will fetch it.",
"",
"GUS obeys User-agent of \"gus\" and \"*\".",
"",
- "If you have questions about or ideas for GUS, please",
- "email me at natpen@natpen.net.",
+ "If you have questions about or ideas for GUS, please email me at natpen@natpen.net.",
"",
"# Advanced Searching",
"",
- "To improve the quality of your search results, you can",
- "use the following syntax to limit your results to specific",
- "content types:",
+ "To improve the quality of your search results, you can use the following syntax to limit your results to specific content types:",
"",
"<a search query> content_type:<a content type>",
"",
- "You can specify a general content type, like `audio`, or",
- "a more specific one like `audio/mp3`.",
+ "You can specify a general content type, like `audio`, or a more specific one like `audio/mp3`.",
"",
"Here are some useful content types to get started with:",
"- application/pdf",