mirror of
https://github.com/c9fe/22120.git
synced 2024-11-10 04:52:43 +01:00
"Up todo"
This commit is contained in:
parent
4c942dcb1a
commit
2bb0de4058
18
docs/todo
18
docs/todo
@ -1,6 +1,15 @@
|
||||
- fix issue with URL too long on some searches (e.g. tom delaur -- the chris hemsworth url too long, why?)
|
||||
- change name to ex libris ? exlibris?
|
||||
- implement trigram index run segmenter
|
||||
- complete snippet generation
|
||||
- improve title boosting of ranks
|
||||
- save trigram index to disk
|
||||
- let's not reindex unless we have changed contentSignature
|
||||
- result paging
|
||||
- publish button
|
||||
- We need to not open other localhosts if we already have one open
|
||||
- We need to reload on localhost 22120 if we open with that
|
||||
- ensure we are getting the page text to index once it is actually loaded (we should call again later, or add mutation observer and update on mutate)
|
||||
- then send console message to the controller ({textChanged}), to trigger indexURL again
|
||||
- throttle how often this can occur per URL
|
||||
- search improvements
|
||||
- use different min score options for different sources (noticed URL not match meghan highlight for hello mag even tho query got megan and did match and highlight queen in url)
|
||||
- get snippets earlier (before rendering in lib server) and use to add to signal
|
||||
@ -11,8 +20,3 @@
|
||||
- Create instant search (or at least instant queries (so search over previous queries -- not results necessarily))
|
||||
- an error in Full text search can corrupt the index and make it unrecoverable...we need to guard against this
|
||||
- this is still happening. sometimes the index is not saved, even on a normal error free restart. unknown why.
|
||||
- We need to reload on localhost 22120 if we open with that
|
||||
- We need to not open other localhosts if we already have one open
|
||||
- ensure we are getting the page text to index once it is actually loaded (we should call again later, or add mutation observer and update on mutate)
|
||||
- then send console message to the controller ({textChanged}), to trigger indexURL again
|
||||
- throttle how often this can occur per URL
|
||||
|
@ -9,6 +9,7 @@ export const TEXT_NODE = 3;
|
||||
export const MAX_HIGHLIGHTABLE_LENGTH = 0; /* 0 is no max length for highlight */
|
||||
export const MAX_TITLE_LENGTH = 140;
|
||||
export const MAX_URL_LENGTH = 140;
|
||||
export const MAX_HEAD = 140;
|
||||
|
||||
/* text nodes inside these elements that are ignored */
|
||||
export const FORBIDDEN_TEXT_PARENT = new Set([
|
||||
|
@ -2,7 +2,10 @@ import path from 'path';
|
||||
import express from 'express';
|
||||
|
||||
import args from './args.js';
|
||||
import {MAX_HIGHLIGHTABLE_LENGTH, DEBUG, say, sleep, APP_ROOT} from './common.js';
|
||||
import {
|
||||
MAX_HEAD, MAX_HIGHLIGHTABLE_LENGTH, DEBUG,
|
||||
say, sleep, APP_ROOT
|
||||
} from './common.js';
|
||||
import Archivist from './archivist.js';
|
||||
import {highlight} from './highlighter.js';
|
||||
|
||||
@ -179,9 +182,13 @@ function SearchResultView({results, query, HL}) {
|
||||
${
|
||||
results.map(({snippet, url,title,id}) => `
|
||||
<li>
|
||||
${DEBUG ? id + ':' : ''} <a target=_blank href=${url}>${HL.get(id)?.title||title||url}</a>
|
||||
${DEBUG ? id + ':' : ''} <a target=_blank href=${url}>${
|
||||
HL.get(id)?.title||(title||url||'').slice(0, MAX_HEAD)
|
||||
}</a>
|
||||
<br>
|
||||
<small class=url>${(HL.get(id)?.url||url)}</small>
|
||||
<small class=url>${
|
||||
HL.get(id)?.url||(url||'').slice(0, MAX_HEAD)
|
||||
}</small>
|
||||
<p>${snippet}</p>
|
||||
</li>
|
||||
`).join('\n')
|
||||
|
Loading…
Reference in New Issue
Block a user