;;; -*- Mode: TDL-*- ;;; Authors: ;;; 2004-2006 Berthold Crysmann ;;; Copyright: DFKI GmbH :include "general". ;; name of type containing quick check structure qc-structure-unif := $qc_unif_set_pack. sm := "conll.mem". spanning-only-rules := $head-filler-get-rpunct-msgpred-rule $imp-get-rpunct-msgpred $coord-get-rpunct-msgpred-rule. packing := true. packing-restrictor := RELS HCONS SLASH _CAS V1 ALTKEY SELF ACTIVE INERT LEE LOR --RELH --RELI --UCOMPS IBOT ARG2 ARG3 ARG4. #| posmapping := NN $generic_count_noun NN $generic_title_noun NE $generic_name ADJA $generic_adja ADJD $generic_adjd ADV $generic_adverb CARD $generic_number . |# posmapping := NN $generic_count_noun NN $generic_mass_noun NE $generic_name ADJA $generic_adja ADJD $generic_adjd ADJD $generic_adverb ADV $generic_adverb CARD $generic_number CARD $generic_ordinal CARD $generic_year VVFIN $generic_itr_verb VVFIN $generic_tr_verb VVFIN $generic_scomp_verb VVIMP $generic_itr_verb VVIMP $generic_tr_verb VVIMP $generic_scomp_verb VVINF $generic_itr_verb VVINF $generic_tr_verb VVINF $generic_scomp_verb VVPP $generic_itr_verb VVPP $generic_tr_verb VVPP $generic_scomp_verb VVINF $generic_itr_part_verb VVINF $generic_tr_part_verb VVINF $generic_scomp_part_verb VVIZU $generic_itr_part_verb VVIZU $generic_tr_part_verb VVIZU $generic_scomp_part_verb . ;; ;; Tokenizer settings ;; case-sensitive. ; be case sensitive in string and yy tokenizer trivial-tokenizer. ; for Lingo-style tokenizer "string" ;;; ;;; following are a number of settings for the new (as of late 2008) token ;;; mapping and lexical filtering support in PET. ;;; ;; ;; first, the general format of chart mapping rules, much like MRS transfer. ;; chart-mapping-context-path := "+CONTEXT". chart-mapping-input-path := "+INPUT". chart-mapping-output-path := "+OUTPUT". chart-mapping-position-path := "+POSITION". ;; ;; in lexical instatiation, the list of tokens activating a lexical entry (be ;; it native or generic) are unified into the lexical entry under this path. ;; #| lexicon-tokens-path := "TOKENS.+LIST". lexicon-last-token-path := "TOKENS.+LAST". |# ;; ;; furthermore, for the various input formats, we need to declare how parts of ;; input descriptions correspond to the grammar-internal feature geometry; in ;; the YY input format, for example, token feature structures (aka input items ;; PET-internally) are created from various parts of the token description. ;; token-form-path := "+FORM". ; [required] string for lexical lookup token-id-path := "+ID". ; [optional] list of external ids token-from-path := "+FROM". ; [optional] surface start position token-to-path := "+TO". ; [optional] surface end position token-postags-path := "+TNT.+TAGS". ; [optional] list of POS tags token-posprobs-path := "+TNT.+PRBS". ; [optional] list of POS probabilities ;; ;; finally, declare TDL status values for the various new entity types ;; token-mapping-rule-status-values := token-mapping-rule. lexical-filtering-rule-status-values := lexical-filtering-rule.