Deny unreachable-pub

It's very useful when `pub` is equivalent to "this is crate's public
API", let's enforce this!

Ideally, we should enforce it for local `cargo test`, and only during
CI, but that needs https://github.com/rust-lang/cargo/issues/5034.
This commit is contained in:
Aleksey Kladov 2020-11-02 13:13:32 +01:00
parent e7f90866bc
commit b610118453
34 changed files with 157 additions and 158 deletions

View file

@ -17,7 +17,7 @@ use crate::{
pub use self::{
expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp},
generated::*,
generated::{nodes::*, tokens::*},
node_ext::{
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
StructKind, TypeBoundKind, VisibilityKind,

View file

@ -1,8 +1,8 @@
//! This file is actually hand-written, but the submodules are indeed generated.
#[rustfmt::skip]
mod nodes;
pub(crate) mod nodes;
#[rustfmt::skip]
mod tokens;
pub(crate) mod tokens;
use crate::{
AstNode,
@ -10,7 +10,7 @@ use crate::{
SyntaxNode,
};
pub use {nodes::*, tokens::*};
pub(crate) use nodes::*;
// Stmt is the only nested enum, so it's easier to just hand-write it
impl AstNode for Stmt {

View file

@ -46,16 +46,19 @@ use text_edit::Indel;
pub use crate::{
algo::InsertPosition,
ast::{AstNode, AstToken},
parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
parsing::lexer::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
ptr::{AstPtr, SyntaxNodePtr},
syntax_error::SyntaxError,
syntax_node::{
Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode,
SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
SyntaxTreeBuilder,
},
};
pub use parser::{SyntaxKind, T};
pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent};
pub use rowan::{
Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset,
WalkEvent,
};
/// `Parse` is the result of the parsing: a syntax tree and a collection of
/// errors.

View file

@ -1,7 +1,7 @@
//! Lexing, bridging to parser (which does the actual parsing) and
//! incremental reparsing.
mod lexer;
pub(crate) mod lexer;
mod text_token_source;
mod text_tree_sink;
mod reparsing;
@ -10,7 +10,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink;
pub use lexer::*;
pub(crate) use lexer::*;
pub(crate) use self::reparsing::incremental_reparse;
use parser::SyntaxKind;

View file

@ -65,7 +65,7 @@ fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Tok
impl<'t> TextTokenSource<'t> {
/// Generate input from tokens(expect comment and whitespace).
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let token_offset_pairs: Vec<_> = raw_tokens
.iter()
.filter_map({

View file

@ -10,9 +10,7 @@ use rowan::{GreenNodeBuilder, Language};
use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize};
pub use rowan::GreenNode;
pub(crate) use rowan::GreenToken;
pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum RustLanguage {}
@ -34,8 +32,6 @@ pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
pub use rowan::{Direction, NodeOrToken};
#[derive(Default)]
pub struct SyntaxTreeBuilder {
errors: Vec<SyntaxError>,