highlite

Source highlighter for programming or markup languages. Currently only few languages are supported, other languages may be added. The interface supports one language nested in another.

備考: このモジュールを使うには packages/docutils/highlite をインポートしてください。

You can use this to build your own syntax highlighting, check this example:

let code = """for x in $int.high: echo x.ord mod 2 == 0"""
var toknizr: GeneralTokenizer
initGeneralTokenizer(toknizr, code)
while true:
  getNextToken(toknizr, langNim)
  case toknizr.kind
  of gtEof: break  # End Of File (or string)
  of gtWhitespace:
    echo gtWhitespace # Maybe you want "visible" whitespaces?.
    echo substr(code, toknizr.start, toknizr.length + toknizr.start - 1)
  of gtOperator:
    echo gtOperator # Maybe you want Operators to use a specific color?.
    echo substr(code, toknizr.start, toknizr.length + toknizr.start - 1)
  # of gtSomeSymbol: syntaxHighlight("Comic Sans", "bold", "99px", "pink")
  else:
    echo toknizr.kind # All the kinds of tokens can be processed here.
    echo substr(code, toknizr.start, toknizr.length + toknizr.start - 1)

The proc getSourceLanguage can get the language enum from a string:

for l in ["C", "c++", "jAvA", "Nim", "c#"]: echo getSourceLanguage(l)

TokenClass = enum
  gtEof, gtNone, gtWhitespace, gtDecNumber, gtBinNumber, gtHexNumber, gtOctNumber,
  gtFloatNumber, gtIdentifier, gtKeyword, gtStringLit, gtLongStringLit, gtCharLit,
  gtEscapeSequence, gtOperator, gtPunctuation, gtComment, gtLongComment,
  gtRegularExpression, gtTagStart, gtTagEnd, gtKey, gtValue, gtRawData, gtAssembler,
  gtPreprocessor, gtDirective, gtCommand, gtRule, gtHyperlink, gtLabel, gtReference,
  gtOther
  ソース 編集
GeneralTokenizer = object of RootObj
  kind*: TokenClass
  start*, length*: int
  buf: cstring
  pos: int
  state: TokenClass
  ソース 編集
SourceLanguage = enum
  langNone, langNim, langCpp, langCsharp, langC, langJava, langYaml
  ソース 編集

定数

sourceLanguageToStr: array[SourceLanguage, string] = ["none", "Nim", "C++", "C#", "C",
    "Java", "Yaml"]
  ソース 編集
tokenClassToStr: array[TokenClass, string] = ["Eof", "None", "Whitespace", "DecNumber",
    "BinNumber", "HexNumber", "OctNumber", "FloatNumber", "Identifier", "Keyword",
    "StringLit", "LongStringLit", "CharLit", "EscapeSequence", "Operator",
    "Punctuation", "Comment", "LongComment", "RegularExpression", "TagStart",
    "TagEnd", "Key", "Value", "RawData", "Assembler", "Preprocessor", "Directive",
    "Command", "Rule", "Hyperlink", "Label", "Reference", "Other"]
  ソース 編集

プロシージャ

proc getSourceLanguage(name: string): SourceLanguage {...}{.raises: [], tags: [].}
  ソース 編集
proc initGeneralTokenizer(g: var GeneralTokenizer; buf: cstring) {...}{.raises: [], tags: [].}
  ソース 編集
proc initGeneralTokenizer(g: var GeneralTokenizer; buf: string) {...}{.raises: [], tags: [].}
  ソース 編集
proc deinitGeneralTokenizer(g: var GeneralTokenizer) {...}{.raises: [], tags: [].}
  ソース 編集
proc getNextToken(g: var GeneralTokenizer; lang: SourceLanguage) {...}{.raises: [], tags: [].}
  ソース 編集