;;; -*- Mode: tdl; Coding: utf-8; indent-tabs-mode: nil; -*- ;;; ;;; following are a number of settings for the new (as of late 2008) token ;;; mapping and lexical filtering support in PET. ;;; ;; ;; first, the general format of chart mapping rules, much like MRS transfer. ;; chart-mapping-context-path := "+CONTEXT". chart-mapping-input-path := "+INPUT". chart-mapping-output-path := "+OUTPUT". chart-mapping-position-path := "+POSITION". ;; ;; in lexical instatiation, the list of tokens activating a lexical entry (be ;; it native or generic) are unified into the lexical entry under this path. ;; lexicon-tokens-path := "TOKENS.+LIST". lexicon-last-token-path := "TOKENS.+LAST". ;; ;; furthermore, for the various input formats, we need to declare how parts of ;; input descriptions correspond to the grammar-internal feature geometry; in ;; the YY input format, for example, token feature structures (aka input items ;; PET-internally) are created from various parts of the token description. ;; token-form-path := "+FORM". ; [required] string for lexical lookup token-id-path := "+IDS". ; [optional] list of external id(s) token-from-path := "+FROM". ; [optional] surface start position token-to-path := "+TO". ; [optional] surface end position token-postags-path := "+TNT.+TAGS". ; [optional] list of POS tags token-posprobs-path := "+TNT.+PRBS". ; [optional] list of POS probabilities ;; ;; finally, declare TDL status values for the various new entity types ;; generic-lexentry-status-values := generic-lex-entry. token-mapping-rule-status-values := token-mapping-rule. lexical-filtering-rule-status-values := lexical-filtering-rule.