- def _compute_partial_tokens(self, name: str) -> List[int]:
- """ Normalize the given term, split it into partial words and return
- then token list for them.
- """
- assert self.conn is not None
- norm_name = self._search_normalized(name)
-
- tokens = []
- need_lookup = []
- for partial in norm_name.split():
- token = self._cache.partials.get(partial)
- if token:
- tokens.append(token)
- else:
- need_lookup.append(partial)
-
- if need_lookup:
- with self.conn.cursor() as cur:
- cur.execute("""SELECT word, getorcreate_partial_word(word)
- FROM unnest(%s) word""",
- (need_lookup, ))
-
- for partial, token in cur:
- assert token is not None
- tokens.append(token)
- self._cache.partials[partial] = token
-
- return tokens
-
-