Skip to content
This repository has been archived by the owner on Aug 24, 2024. It is now read-only.

Commit

Permalink
feat: combine DocStringTokenInfo and TypeTokenInfo
Browse files Browse the repository at this point in the history
  • Loading branch information
insightmind committed Jan 10, 2022
1 parent b1f57bd commit 17da0fa
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 56 deletions.
68 changes: 27 additions & 41 deletions LeanInk/Analysis/DataTypes.lean
Original file line number Diff line number Diff line change
Expand Up @@ -44,38 +44,26 @@ instance : Positional Fragment where

/- Token -/
/--
`TokenInfo` is the base structure of all `Token`.
Inherently it describe the semantic info of the token, which can and should be used for semantic syntax
highlighting.
`SemanticTokenInfo` describe the semantic info of the token, which can and should be used for semantic syntax highlighting.
-/
structure TokenInfo extends Fragment where
structure SemanticTokenInfo extends Fragment where
semanticType: Option String := none
deriving Inhabited

/--
The `TypeTokenInfo` describes the metadata of a source text token that conforms
to a specific type within our type system.
E.g.: A variable token `p` might conform to type `Nat`
to a specific type within our type system. It may also describe just a docstring associated with the token.
We combine both these informatio together, as the docstring is naturally attached to the type.
E.g.: A variable token `p` might conform to type `Nat` and has the docstring for `Nat`
-/
structure TypeTokenInfo extends TokenInfo where
type: String
structure TypeTokenInfo extends Fragment where
type: Option String
docString: Option String
deriving Inhabited

instance : Positional TypeTokenInfo where
headPos := (λ x => x.toFragment.headPos)
tailPos := (λ x => x.toFragment.tailPos)

/--
The `DocStringTokenInfo` describes the metadata of an available docstring of a source text token.
In this case a source text token may be a function or structure with a docstring.
-/
structure DocStringTokenInfo extends TokenInfo where
docString: String
deriving Inhabited

instance : Positional DocStringTokenInfo where
headPos := (λ x => x.toFragment.headPos)
tailPos := (λ x => x.toFragment.tailPos)

/--
A `Token` describes the metadata of a specific range of source text.
Expand All @@ -86,25 +74,25 @@ instance : Positional DocStringTokenInfo where
-/
inductive Token where
| type (info: TypeTokenInfo)
| docString (info: DocStringTokenInfo)
| semantic (info: SemanticTokenInfo)
deriving Inhabited

instance : ToString Token where -- TODO: Improve this
instance : ToString Token where
toString : Token -> String
| Token.type _ => "Type"
| Token.docString _ => "DocString"
| Token.semantic _ => "Semantic"

namespace Token
def toFragment : Token -> Fragment
| type info => info.toFragment
| docString info => info.toFragment
| semantic info => info.toFragment

def toTypeTokenInfo? : Token -> Option TypeTokenInfo
| type info => info
| _ => none

def toDocStringTokenInfo? : Token -> Option DocStringTokenInfo
| docString info => info
def toSemanticTokenInfo? : Token -> Option SemanticTokenInfo
| semantic info => info
| _ => none
end Token

Expand Down Expand Up @@ -239,25 +227,23 @@ namespace TraversalFragment
let elabInfo := fragment.info
return ← findDocString? env elabInfo.elaborator <||> findDocString? env elabInfo.stx.getKind

def genDocStringTokenInfo? (self : TraversalFragment) : AnalysisM (Option DocStringTokenInfo) := do
match ← runMetaM (genDocString?) self with
| some string => some { headPos := self.headPos, tailPos := self.tailPos, docString := string }
| none => none

def genTypeTokenInfo? (self : TraversalFragment) : AnalysisM (Option TypeTokenInfo) := do
match ← runMetaM (inferType?) self with
| some string => some { headPos := self.headPos, tailPos := self.tailPos, type := string }
| none => none
let mut docString : Option String := none
let mut type : Option String := none
let config ← read
if config.experimentalDocString then
docString ← runMetaM (genDocString?) self
if config.experimentalTypeInfo then
type ← runMetaM (inferType?) self
if type == none ∧ docString == none then
return none
else
return some { headPos := self.headPos, tailPos := self.tailPos, type := type, docString := docString }

def genTokens (self : TraversalFragment) : AnalysisM (List Token) := do
let mut tokens : List Token := []
let config ← read
if config.experimentalTypeInfo then
if let some typeToken ← self.genTypeTokenInfo? then
tokens := tokens.append [Token.type typeToken]
if config.experimentalDocString then
if let some docStringToken ← self.genDocStringTokenInfo? then
tokens := tokens.append [Token.docString docStringToken]
if let some typeToken ← self.genTypeTokenInfo? then
tokens := tokens.append [Token.type typeToken]
return tokens

/- Sentence Generation -/
Expand Down
26 changes: 11 additions & 15 deletions LeanInk/Annotation/Alectryon.lean
Original file line number Diff line number Diff line change
Expand Up @@ -104,25 +104,21 @@ instance : ToJson Fragment where
Token Generation
-/

def genTypeInfo (name : String) (info : Analysis.TypeTokenInfo) : TypeInfo := { name := name, type := info.type }

def genTypeInfo? (getContents : String.Pos -> String.Pos -> String) (tokens : List Analysis.TypeTokenInfo) : AnalysisM (Option TypeInfo) :=
match Positional.smallest? tokens with
| some token => do
def genTypeInfo? (getContents : String.Pos -> String.Pos -> String) (token : Analysis.TypeTokenInfo) : AnalysisM (Option TypeInfo) := do
match token.type with
| some type => do
let headPos := Positional.headPos token
let tailPos := Positional.tailPos token
return genTypeInfo (getContents headPos tailPos) token
return some { name := (getContents headPos tailPos), type := type }
| none => none

def genDocString? (tokens : List Analysis.DocStringTokenInfo) : AnalysisM (Option String) :=
match Positional.smallest? tokens with
| some token => token.docString
| none => none

def genToken (token : Compound Analysis.Token) (contents : String) (getContents : String.Pos -> String.Pos -> String) : AnalysisM Token :=
let typeInfo := genTypeInfo? getContents (token.getFragments.filterMap (λ x => x.toTypeTokenInfo?))
let docString := genDocString? (token.getFragments.filterMap (λ x => x.toDocStringTokenInfo?))
return { raw := contents, typeinfo := ← typeInfo, docstring := ← docString }
def genToken (token : Compound Analysis.Token) (contents : String) (getContents : String.Pos -> String.Pos -> String) : AnalysisM Token := do
let typeTokens := token.getFragments.filterMap (λ x => x.toTypeTokenInfo?)
match (Positional.smallest? typeTokens) with
| none => do
return { raw := contents }
| some token => do
return { raw := contents, typeinfo := ← genTypeInfo? getContents token, link := none, docstring := token.docString }

def extractContents (offset : String.Pos) (contents : String) (head tail: String.Pos) : String :=
if head >= tail then
Expand Down

0 comments on commit 17da0fa

Please sign in to comment.