mirror of
https://github.com/DeNNiiInc/dbgate.git
synced 2026-05-01 21:23:59 +00:00
search tokenizer
This commit is contained in:
@@ -67,24 +67,56 @@ export function filterNameCompoud(
|
||||
return 'none';
|
||||
}
|
||||
|
||||
export function tokenizeBySearchFilter(text: string, filter: string): { token: string; isMatch: boolean }[] {
|
||||
const tokens = filter.split(' ').map(x => x.trim());
|
||||
|
||||
const result = [];
|
||||
let lastMatch = 0;
|
||||
for (const token of tokens) {
|
||||
const index = text.indexOf(token, lastMatch);
|
||||
if (index < 0) {
|
||||
result.push({ token, isMatch: false });
|
||||
continue;
|
||||
export function tokenizeBySearchFilter(text: string, filter: string): { text: string; isMatch: boolean }[] {
|
||||
const camelTokens = [];
|
||||
const stdTokens = [];
|
||||
for (const token of filter.split(' ').map(x => x.trim())) {
|
||||
if (token.replace(/[A-Z]/g, '').length == 0) {
|
||||
camelTokens.push(token);
|
||||
} else {
|
||||
stdTokens.push(token.toUpperCase());
|
||||
}
|
||||
|
||||
result.push({ token: text.substring(lastMatch, index), isMatch: false });
|
||||
result.push({ token: text.substring(index, index + token.length), isMatch: true });
|
||||
lastMatch = index + token.length;
|
||||
}
|
||||
|
||||
result.push({ token: text.substring(lastMatch), isMatch: false });
|
||||
let res = [
|
||||
{
|
||||
text,
|
||||
isMatch: false,
|
||||
},
|
||||
];
|
||||
|
||||
return result;
|
||||
for (const token of stdTokens) {
|
||||
const nextres = [];
|
||||
for (const item of res) {
|
||||
const index = item.text?.toUpperCase().indexOf(token);
|
||||
if (index < 0) {
|
||||
nextres.push(item);
|
||||
} else {
|
||||
nextres.push({ text: item.text.substring(0, index), isMatch: false });
|
||||
nextres.push({ text: item.text.substring(index, index + token.length), isMatch: true });
|
||||
nextres.push({ text: item.text.substring(index + token.length), isMatch: false });
|
||||
}
|
||||
}
|
||||
res = nextres;
|
||||
}
|
||||
|
||||
return res;
|
||||
|
||||
// const result = [];
|
||||
// let lastMatch = 0;
|
||||
// for (const token of tokens) {
|
||||
// const index = text.indexOf(token, lastMatch);
|
||||
// if (index < 0) {
|
||||
// result.push({ token, isMatch: false });
|
||||
// continue;
|
||||
// }
|
||||
|
||||
// result.push({ token: text.substring(lastMatch, index), isMatch: false });
|
||||
// result.push({ token: text.substring(index, index + token.length), isMatch: true });
|
||||
// lastMatch = index + token.length;
|
||||
// }
|
||||
|
||||
// result.push({ token: text.substring(lastMatch), isMatch: false });
|
||||
|
||||
// return result;
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import CheckboxField from '../forms/CheckboxField.svelte';
|
||||
import { copyTextToClipboard } from '../utility/clipboard';
|
||||
import { showSnackbarSuccess } from '../utility/snackbar';
|
||||
import TokenizedFilteredText from '../widgets/TokenizedFilteredText.svelte';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
@@ -134,7 +135,7 @@
|
||||
{#if colorMark}
|
||||
<FontIcon style={`color:${colorMark}`} icon="icon square" />
|
||||
{/if}
|
||||
{title}
|
||||
<TokenizedFilteredText text={title} {filter} />
|
||||
{#if statusIconBefore}
|
||||
<span class="status">
|
||||
<FontIcon icon={statusIconBefore} />
|
||||
@@ -156,7 +157,7 @@
|
||||
{/if}
|
||||
{#if extInfo}
|
||||
<span class="ext-info">
|
||||
{extInfo}
|
||||
<TokenizedFilteredText text={extInfo} {filter} />
|
||||
</span>
|
||||
{/if}
|
||||
{#if onPin}
|
||||
|
||||
@@ -274,7 +274,9 @@
|
||||
? isExpandedBySearch
|
||||
? SubProcedureLineList
|
||||
: SubProcedureParamList
|
||||
: SubTableColumnList}
|
||||
: isExpandedBySearch && (data.objectTypeField == 'views' || data.objectTypeField == 'matviews')
|
||||
? SubProcedureLineList
|
||||
: SubTableColumnList}
|
||||
isExpandable={data =>
|
||||
data.objectTypeField == 'tables' ||
|
||||
data.objectTypeField == 'views' ||
|
||||
|
||||
26
packages/web/src/widgets/TokenizedFilteredText.svelte
Normal file
26
packages/web/src/widgets/TokenizedFilteredText.svelte
Normal file
@@ -0,0 +1,26 @@
|
||||
<script lang="ts">
|
||||
import { tokenizeBySearchFilter } from 'dbgate-tools';
|
||||
|
||||
export let text = '';
|
||||
export let filter = '';
|
||||
|
||||
$: tokenized = filter ? tokenizeBySearchFilter(text, filter) : null;
|
||||
</script>
|
||||
|
||||
{#if tokenized}
|
||||
{#each tokenized as token}
|
||||
{#if token.isMatch}
|
||||
<span class="highlight">{token.text}</span>
|
||||
{:else}
|
||||
{token.text}
|
||||
{/if}
|
||||
{/each}
|
||||
{:else}
|
||||
{text}
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.highlight {
|
||||
background-color: var(--theme-bg-orange);
|
||||
}
|
||||
</style>
|
||||
Reference in New Issue
Block a user