From 9801be99e48a949b7f963ec74d656aaef51eaa10 Mon Sep 17 00:00:00 2001 From: Gustavo Goretkin Date: Sun, 28 Feb 2021 15:56:22 -0500 Subject: [PATCH 1/6] Run `DocumenterTools.generate()` --- docs/.gitignore | 2 ++ docs/Project.toml | 2 ++ docs/make.jl | 15 +++++++++++++++ docs/src/index.md | 3 +++ 4 files changed, 22 insertions(+) create mode 100644 docs/.gitignore create mode 100644 docs/Project.toml create mode 100644 docs/make.jl create mode 100644 docs/src/index.md diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..a303fff --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,2 @@ +build/ +site/ diff --git a/docs/Project.toml b/docs/Project.toml new file mode 100644 index 0000000..dfa65cd --- /dev/null +++ b/docs/Project.toml @@ -0,0 +1,2 @@ +[deps] +Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" diff --git a/docs/make.jl b/docs/make.jl new file mode 100644 index 0000000..a443ebc --- /dev/null +++ b/docs/make.jl @@ -0,0 +1,15 @@ +using Documenter +using Tokenize + +makedocs( + sitename = "Tokenize", + format = Documenter.HTML(), + modules = [Tokenize] +) + +# Documenter can also automatically deploy documentation to gh-pages. +# See "Hosting Documentation" and deploydocs() in the Documenter manual +# for more information. +#=deploydocs( + repo = "" +)=# diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 0000000..6eb8e8e --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,3 @@ +# Tokenize.jl + +Documentation for Tokenize.jl From ef811c98449b861ae3c4a93ef3394b53e3938bff Mon Sep 17 00:00:00 2001 From: Gustavo Goretkin Date: Sun, 28 Feb 2021 16:42:11 -0500 Subject: [PATCH 2/6] Emit existing docstrings --- docs/make.jl | 5 ++++- docs/src/index.md | 7 +++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/make.jl b/docs/make.jl index a443ebc..154239f 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -4,7 +4,10 @@ using Tokenize makedocs( sitename = "Tokenize", format = Documenter.HTML(), - modules = [Tokenize] + modules = [Tokenize], + pages=[ + "Home" => "index.md", + ], ) # Documenter can also automatically deploy documentation to gh-pages. diff --git a/docs/src/index.md b/docs/src/index.md index 6eb8e8e..b0c0cbd 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -1,3 +1,10 @@ # Tokenize.jl Documentation for Tokenize.jl + +```@autodocs +Modules = [ + Tokenize, + Tokenize.Lexers +] +``` \ No newline at end of file From a4e0bca85bf5a662cfbf45384d917e83be77ffba Mon Sep 17 00:00:00 2001 From: Gustavo Goretkin Date: Sun, 28 Feb 2021 16:59:18 -0500 Subject: [PATCH 3/6] Deploy docs with GitHub Actions --- .github/workflows/CI.yml | 18 +++++++++++++++++- docs/make.jl | 11 +++++------ 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 08e7ec3..8f6fdce 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -46,4 +46,20 @@ jobs: - uses: codecov/codecov-action@v1 with: file: lcov.info - + docs: + name: 'Documentation' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@v1 + with: + version: '1' + - run: | + julia --project=docs -e ' + using Pkg + Pkg.develop(PackageSpec(path=pwd())) + Pkg.instantiate()' + - run: julia --project=docs --color=yes docs/make.jl + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} diff --git a/docs/make.jl b/docs/make.jl index 154239f..4dc5c8e 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -10,9 +10,8 @@ makedocs( ], ) -# Documenter can also automatically deploy documentation to gh-pages. -# See "Hosting Documentation" and deploydocs() in the Documenter manual -# for more information. -#=deploydocs( - repo = "" -)=# +deploydocs( + repo = "github.com/JuliaLang/Tokenize.jl.git", + devbranch = "master", + push_preview = true, +) From ff3304813bf4738ae8b5a609b9b95095281fff34 Mon Sep 17 00:00:00 2001 From: Gustavo Goretkin Date: Sun, 28 Feb 2021 17:25:34 -0500 Subject: [PATCH 4/6] docs --- docs/src/index.md | 3 ++- src/token.jl | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/src/index.md b/docs/src/index.md index b0c0cbd..04ba2ca 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -5,6 +5,7 @@ Documentation for Tokenize.jl ```@autodocs Modules = [ Tokenize, - Tokenize.Lexers + Tokenize.Lexers, + Tokenize.Tokens, ] ``` \ No newline at end of file diff --git a/src/token.jl b/src/token.jl index c9f5051..f1a3944 100644 --- a/src/token.jl +++ b/src/token.jl @@ -48,6 +48,9 @@ TOKEN_ERROR_DESCRIPTION = Dict{TokenError, String}( abstract type AbstractToken end +""" +Each `Token` is represented by where it starts and ends, what string it contains and what type it is. +""" struct Token <: AbstractToken kind::Kind # Offsets into a string or buffer @@ -66,6 +69,9 @@ Token(kind, startposition, endposition, startbyte, endbyte, val, NO_ERR, false, end Token() = Token(ERROR, (0,0), (0,0), 0, 0, "", UNKNOWN, false, false) +""" +Like [`Tokens.Token`](@ref), but without the `val` field. +""" struct RawToken <: AbstractToken kind::Kind # Offsets into a string or buffer From faa226aeddfe644bdd9e8fa9114fe7ed8d4222b6 Mon Sep 17 00:00:00 2001 From: Gustavo Goretkin Date: Sun, 28 Feb 2021 17:42:07 -0500 Subject: [PATCH 5/6] Set `CurrentModule` for submodule documentation --- docs/src/index.md | 11 ++++++++++- src/token.jl | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 04ba2ca..23d6bb1 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -6,6 +6,15 @@ Documentation for Tokenize.jl Modules = [ Tokenize, Tokenize.Lexers, - Tokenize.Tokens, +] +``` + +```@meta +CurrentModule = Tokenize.Tokens +``` + +```@autodocs +Modules = [ + Tokens, ] ``` \ No newline at end of file diff --git a/src/token.jl b/src/token.jl index f1a3944..a91d3b3 100644 --- a/src/token.jl +++ b/src/token.jl @@ -70,7 +70,7 @@ end Token() = Token(ERROR, (0,0), (0,0), 0, 0, "", UNKNOWN, false, false) """ -Like [`Tokens.Token`](@ref), but without the `val` field. +Like [`Token`](@ref), but without the `val` field. """ struct RawToken <: AbstractToken kind::Kind From 4b7aa36da39a7408d4ddfc701cfb957e593a3a4c Mon Sep 17 00:00:00 2001 From: Gustavo Goretkin Date: Sun, 28 Feb 2021 17:49:57 -0500 Subject: [PATCH 6/6] @ref for all `Token` and `RawToken` --- README.md | 10 +++++----- src/lexer.jl | 14 +++++++------- src/token.jl | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index f26f748..847554a 100644 --- a/README.md +++ b/README.md @@ -14,8 +14,8 @@ The goals of this package is to be #### Tokenization -The function `tokenize` is the main entrypoint for generating `Token`s. -It takes a string or a buffer and creates an iterator that will sequentially return the next `Token` until the end of string or buffer. The argument to `tokenize` can either be a `String`, `IOBuffer` or an `IOStream`. +The function `tokenize` is the main entrypoint for generating [`Token`](@ref)s. +It takes a string or a buffer and creates an iterator that will sequentially return the next [`Token`](@ref) until the end of string or buffer. The argument to `tokenize` can either be a `String`, `IOBuffer` or an `IOStream`. ```jl julia> collect(tokenize("function f(x) end")) @@ -30,11 +30,11 @@ julia> collect(tokenize("function f(x) end")) 1,18-1,17 ENDMARKER "" ``` -#### `Token`s +#### [`Token`](@ref)s -Each `Token` is represented by where it starts and ends, what string it contains and what type it is. +Each [`Token`](@ref) is represented by where it starts and ends, what string it contains and what type it is. -The API for a `Token` (non exported from the `Tokenize.Tokens` module) is. +The API for a [`Token`](@ref) (non exported from the `Tokenize.Tokens` module) is. ```julia startpos(t)::Tuple{Int, Int} # row and column where the token start diff --git a/src/lexer.jl b/src/lexer.jl index b9600ab..dc31e00 100644 --- a/src/lexer.jl +++ b/src/lexer.jl @@ -66,7 +66,7 @@ Lexer(str::AbstractString, T::Type{TT} = Token) where TT <: AbstractToken = Lexe Returns an `Iterable` containing the tokenized input. Can be reverted by e.g. `join(untokenize.(tokenize(x)))`. Setting `T` chooses the type of token -produced by the lexer (`Token` or `RawToken`). +produced by the lexer ([`Token`](@ref) or [`RawToken`](@ref)). """ tokenize(x, ::Type{Token}) = Lexer(x, Token) tokenize(x, ::Type{RawToken}) = Lexer(x, RawToken) @@ -104,7 +104,7 @@ end """ startpos(l::Lexer) -Return the latest `Token`'s starting position. +Return the latest [`Token`](@ref)'s starting position. """ startpos(l::Lexer) = l.token_startpos @@ -120,7 +120,7 @@ Base.seekstart(l::Lexer) = seek(l.io, l.io_startpos) """ seek2startpos!(l::Lexer) -Sets the lexer's current position to the beginning of the latest `Token`. +Sets the lexer's current position to the beginning of the latest [`Token`](@ref). """ seek2startpos!(l::Lexer) = seek(l, startpos(l)) @@ -157,7 +157,7 @@ Base.seek(l::Lexer, pos) = seek(l.io, pos) """ start_token!(l::Lexer) -Updates the lexer's state such that the next `Token` will start at the current +Updates the lexer's state such that the next [`Token`](@ref) will start at the current position. """ function start_token!(l::Lexer) @@ -241,7 +241,7 @@ end """ emit(l::Lexer, kind::Kind, err::TokenError=Tokens.NO_ERR) -Returns a `Token` of kind `kind` with contents `str` and starts a new `Token`. +Returns a [`Token`](@ref) of kind `kind` with contents `str` and starts a new [`Token`](@ref). """ function emit(l::Lexer{IO_t,Token}, kind::Kind, err::TokenError = Tokens.NO_ERR) where IO_t suffix = false @@ -290,7 +290,7 @@ end """ emit_error(l::Lexer, err::TokenError=Tokens.UNKNOWN) -Returns an `ERROR` token with error `err` and starts a new `Token`. +Returns an `ERROR` token with error `err` and starts a new [`Token`](@ref). """ function emit_error(l::Lexer, err::TokenError = Tokens.UNKNOWN) return emit(l, Tokens.ERROR, err) @@ -300,7 +300,7 @@ end """ next_token(l::Lexer) -Returns the next `Token`. +Returns the next [`Token`](@ref). """ function next_token(l::Lexer, start = true) start && start_token!(l) diff --git a/src/token.jl b/src/token.jl index a91d3b3..13d9335 100644 --- a/src/token.jl +++ b/src/token.jl @@ -49,7 +49,7 @@ TOKEN_ERROR_DESCRIPTION = Dict{TokenError, String}( abstract type AbstractToken end """ -Each `Token` is represented by where it starts and ends, what string it contains and what type it is. +Each [`Token`](@ref) is represented by where it starts and ends, what string it contains and what type it is. """ struct Token <: AbstractToken kind::Kind