use crate::{
dom::{Cast, EntryNode, KeyNode, NodeSyntax, RootNode},
syntax::{SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken},
};
use rowan::{GreenNode, GreenNodeBuilder, NodeOrToken, SmolStr, TextRange};
use std::{iter::FromIterator, mem, rc::Rc};
#[cfg(feature = "serde")]
use serde_crate::{Deserialize, Serialize};
#[cfg(feature = "schema")]
use schemars::JsonSchema;
#[macro_use]
mod macros;
#[derive(Debug, Clone, Default)]
pub struct ScopedOptions(Vec<(TextRange, OptionsIncomplete)>);
impl FromIterator<(TextRange, OptionsIncomplete)> for ScopedOptions {
fn from_iter<T: IntoIterator<Item = (TextRange, OptionsIncomplete)>>(iter: T) -> Self {
Self(Vec::from_iter(iter.into_iter()))
}
}
create_options!(
#[derive(Debug, Clone, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "serde", serde(crate = "serde_crate"))]
pub struct Options {
pub align_entries: bool,
pub array_trailing_comma: bool,
pub array_auto_expand: bool,
pub array_auto_collapse: bool,
pub compact_arrays: bool,
pub compact_inline_tables: bool,
pub column_width: usize,
pub indent_tables: bool,
pub indent_string: String,
pub trailing_newline: bool,
pub reorder_keys: bool,
pub allowed_blank_lines: usize,
pub crlf: bool,
}
);
#[derive(Debug)]
pub enum OptionParseError {
InvalidOption(String),
InvalidValue {
key: String,
error: Box<dyn std::error::Error>,
},
}
impl core::fmt::Display for OptionParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"invalid formatting option: {}",
match self {
OptionParseError::InvalidOption(k) => {
format!(r#"invalid option "{}""#, k)
}
OptionParseError::InvalidValue { key, error } => {
format!(r#"invalid value for option "{}": {}"#, key, error)
}
}
)
}
}
impl std::error::Error for OptionParseError {}
impl Default for Options {
fn default() -> Self {
Options {
align_entries: false,
array_trailing_comma: true,
array_auto_expand: true,
array_auto_collapse: true,
compact_arrays: true,
compact_inline_tables: false,
column_width: 80,
indent_tables: false,
trailing_newline: true,
allowed_blank_lines: 2,
indent_string: " ".into(),
reorder_keys: false,
crlf: false,
}
}
}
impl Options {
fn newline(&self) -> SmolStr {
if self.crlf {
"\r\n".into()
} else {
"\n".into()
}
}
fn newlines(&self, count: usize) -> SmolStr {
self.newline()
.repeat(usize::min(count, self.allowed_blank_lines + 1))
.into()
}
fn indent_chars(&self, level: usize) -> usize {
self.indent_string.repeat(level).chars().count()
}
}
#[derive(Debug, Clone, Default)]
struct Context {
indent_level: usize,
line_char_count: usize,
scopes: Rc<ScopedOptions>,
}
impl Context {
fn update_options(&self, opts: &mut Options, range: TextRange) {
for (r, s) in &self.scopes.0 {
if r.contains_range(range) {
opts.update(s.clone());
}
}
}
}
pub fn format_green(green: GreenNode, options: Options) -> String {
format_syntax(SyntaxNode::new_root(green), options)
}
pub fn format(src: &str, options: Options) -> String {
format_syntax(crate::parser::parse(src).into_syntax(), options)
}
pub fn format_syntax(node: SyntaxNode, options: Options) -> String {
let mut s = format_impl(node, options.clone(), Context::default()).to_string();
s = s.trim_end().into();
if options.trailing_newline {
s += options.newline().as_str();
}
s
}
pub fn format_with_scopes(dom: RootNode, options: Options, scopes: ScopedOptions) -> String {
let mut c = Context::default();
c.scopes = Rc::new(scopes);
let mut s = format_impl(
dom.syntax().into_node().unwrap(),
options.clone(),
Context::default(),
)
.to_string();
s = s.trim_end().into();
if options.trailing_newline {
s += options.newline().as_str();
}
s
}
pub fn format_with_path_scopes<I: IntoIterator<Item = (String, OptionsIncomplete)>>(
dom: RootNode,
options: Options,
scopes: I,
) -> String {
let mut c = Context::default();
let mut s = Vec::new();
for (scope, opts) in scopes {
let pat = glob::Pattern::new(&scope).unwrap();
for (p2, node) in dom.iter() {
if pat.matches(&p2.dotted()) {
s.extend(node.text_ranges().into_iter().map(|r| (r, opts.clone())))
}
}
}
c.scopes = Rc::new(ScopedOptions::from_iter(s));
let mut s = format_impl(dom.syntax().into_node().unwrap(), options.clone(), c).to_string();
s = s.trim_end().into();
if options.trailing_newline {
s += options.newline().as_str();
}
s
}
fn format_impl(node: SyntaxNode, options: Options, mut context: Context) -> SyntaxNode {
let kind: SyntaxKind = node.kind();
let mut builder = GreenNodeBuilder::new();
match kind {
KEY => format_key(node, &mut builder, options, &mut context),
VALUE => format_value(node, &mut builder, options, &mut context),
TABLE_HEADER | TABLE_ARRAY_HEADER => {
format_table_header(node, &mut builder, options, &mut context)
}
ENTRY => format_entry(node, &mut builder, options, &mut context),
ARRAY => format_array(node, &mut builder, options, &mut context),
INLINE_TABLE => format_inline_table(node, &mut builder, options, &mut context),
ROOT => format_root(node, &mut builder, options, &mut context),
_ => return node,
};
SyntaxNode::new_root(builder.finish())
}
#[allow(clippy::cognitive_complexity)]
fn format_root(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
options: Options,
context: &mut Context,
) {
builder.start_node(ROOT.into());
let mut entry_group: Vec<SyntaxNode> = Vec::new();
let mut indent_level: usize = 0;
let mut indent_levels: Vec<(KeyNode, usize)> = Vec::new();
let mut skip_newline = 0;
let mut comments: Vec<SyntaxToken> = Vec::new();
let mut prev_newlines = 0;
fn add_comments(
indent: &str,
comments: &mut Vec<SyntaxToken>,
builder: &mut GreenNodeBuilder,
options: &Options,
trailing_newline: bool,
) {
for (i, comment) in comments.iter().enumerate() {
builder.token(WHITESPACE.into(), indent.into());
if i != 0 {
builder.token(NEWLINE.into(), options.newline());
}
builder.token(comment.kind().into(), comment.text().clone());
}
if trailing_newline && !comments.is_empty() {
builder.token(NEWLINE.into(), options.newline());
}
comments.clear();
}
for c in node.children_with_tokens() {
let mut options = options.clone();
context.update_options(&mut options, c.text_range());
match c.clone() {
NodeOrToken::Node(n) => {
if n.descendants_with_tokens().any(|e| e.kind() == ERROR) {
add_all(n, builder);
continue;
}
match n.kind() {
TABLE_HEADER | TABLE_ARRAY_HEADER => {
let indent_str = options.indent_string.repeat(indent_level);
if options.reorder_keys {
entry_group.sort_by(|a, b| {
let ea = EntryNode::cast(NodeOrToken::Node(a.clone())).unwrap();
let eb = EntryNode::cast(NodeOrToken::Node(b.clone())).unwrap();
ea.key()
.full_key_string()
.partial_cmp(&eb.key().full_key_string())
.unwrap()
});
}
if !entry_group.is_empty() {
add_aligned(
mem::take(&mut entry_group),
builder,
&options.newline(),
if options.indent_tables {
Some(&indent_str)
} else {
None
},
if options.align_entries { None } else { Some(1) },
);
builder.token(NEWLINE.into(), options.newline());
}
if options.indent_tables {
if let Some(key_syntax) = n.first_child() {
if let Some(key) = KeyNode::cast(NodeOrToken::Node(key_syntax)) {
indent_level = indent_levels
.iter()
.filter_map(|(k, level)| {
if k.common_prefix_count(&key) > 0
&& k.key_count() <= key.key_count()
{
if k.key_count() == key.key_count() {
Some(*level)
} else {
Some(k.common_prefix_count(&key))
}
} else {
None
}
})
.max()
.unwrap_or(0);
indent_levels.push((key.clone(), indent_level));
}
}
}
let indent_str = options.indent_string.repeat(indent_level);
add_comments(&indent_str, &mut comments, builder, &options, true);
if options.indent_tables {
builder.token(WHITESPACE.into(), indent_str.into());
}
format_table_header(n, builder, options.clone(), context)
}
ENTRY => {
let indent_str = options.indent_string.repeat(indent_level);
add_comments(&indent_str, &mut comments, builder, &options, true);
let mut entry_b = GreenNodeBuilder::new();
format_entry(
n,
&mut entry_b,
options.clone(),
&mut Context {
indent_level,
..Default::default()
},
);
entry_group.push(extract_comment_from_entry(SyntaxNode::new_root(
entry_b.finish(),
)));
skip_newline += 1;
}
_ => {
let indent_str = options.indent_string.repeat(indent_level);
add_comments(&indent_str, &mut comments, builder, &options, true);
if options.indent_tables {
builder.token(
WHITESPACE.into(),
options.indent_string.repeat(indent_level).into(),
);
}
add_all(n, builder);
}
};
}
NodeOrToken::Token(t) => match t.kind() {
NEWLINE => {
let mut newline_count = t.text().as_str().newline_count();
let indent_str = options.indent_string.repeat(indent_level);
if let Some(nt) = t.next_sibling_or_token() {
if let Some(nnt) = nt.next_sibling_or_token() {
if nt.kind() == WHITESPACE && nnt.kind() == NEWLINE {
prev_newlines += newline_count;
continue;
} else {
newline_count += prev_newlines;
prev_newlines = 0;
}
} else {
newline_count += prev_newlines;
prev_newlines = 0;
}
} else {
newline_count += prev_newlines;
prev_newlines = 0;
}
if newline_count > 1 && !comments.is_empty() {
add_comments(&indent_str, &mut comments, builder, &options, false);
}
if newline_count > 1 && options.allowed_blank_lines != 0 {
if !entry_group.is_empty() {
if options.reorder_keys {
entry_group.sort_by(|a, b| {
let ea = EntryNode::cast(NodeOrToken::Node(a.clone())).unwrap();
let eb = EntryNode::cast(NodeOrToken::Node(b.clone())).unwrap();
ea.key()
.full_key_string()
.partial_cmp(&eb.key().full_key_string())
.unwrap()
});
}
add_aligned(
mem::take(&mut entry_group),
builder,
&options.newline(),
if options.indent_tables {
Some(&indent_str)
} else {
None
},
if options.align_entries { None } else { Some(1) },
);
}
builder.token(NEWLINE.into(), options.newlines(newline_count));
} else if skip_newline == 0 {
builder.token(NEWLINE.into(), options.newlines(newline_count));
}
skip_newline = i32::max(0, skip_newline - 1);
}
COMMENT => {
if options.reorder_keys {
entry_group.sort_by(|a, b| {
let ea = EntryNode::cast(NodeOrToken::Node(a.clone())).unwrap();
let eb = EntryNode::cast(NodeOrToken::Node(b.clone())).unwrap();
ea.key()
.full_key_string()
.partial_cmp(&eb.key().full_key_string())
.unwrap()
});
}
if !entry_group.is_empty() {
let indent_str = options.indent_string.repeat(indent_level);
add_aligned(
mem::take(&mut entry_group),
builder,
&options.newline(),
if options.indent_tables {
Some(&indent_str)
} else {
None
},
if options.align_entries { None } else { Some(1) },
);
builder.token(NEWLINE.into(), options.newline());
}
comments.push(t);
skip_newline += 1;
}
WHITESPACE => {}
_ => {
if options.indent_tables {
builder.token(
WHITESPACE.into(),
options.indent_string.repeat(indent_level).into(),
);
}
builder.token(t.kind().into(), t.text().clone())
}
},
}
}
let indent_str = options.indent_string.repeat(indent_level);
if options.reorder_keys {
entry_group.sort_by(|a, b| {
let ea = EntryNode::cast(NodeOrToken::Node(a.clone())).unwrap();
let eb = EntryNode::cast(NodeOrToken::Node(b.clone())).unwrap();
ea.key()
.full_key_string()
.partial_cmp(&eb.key().full_key_string())
.unwrap()
});
}
add_aligned(
mem::take(&mut entry_group),
builder,
&options.newline(),
if options.indent_tables {
Some(&indent_str)
} else {
None
},
if options.align_entries { None } else { Some(1) },
);
add_comments(&indent_str, &mut comments, builder, &options, false);
builder.finish_node();
}
fn format_inline_table(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
options: Options,
context: &mut Context,
) {
builder.start_node(INLINE_TABLE.into());
if node.children().count() == 0 {
builder.token(BRACE_START.into(), "{".into());
builder.token(BRACE_END.into(), "}".into());
} else {
let mut has_previous = false;
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(n) => {
if has_previous {
builder.token(COMMA.into(), ",".into());
builder.token(WHITESPACE.into(), " ".into());
}
format_entry(n, builder, options.clone(), context);
has_previous = true;
}
NodeOrToken::Token(t) => match t.kind() {
BRACE_START => {
builder.token(t.kind().into(), t.text().clone());
if !options.compact_inline_tables {
builder.token(WHITESPACE.into(), " ".into());
}
}
BRACE_END => {
if !options.compact_inline_tables {
builder.token(WHITESPACE.into(), " ".into());
}
builder.token(t.kind().into(), t.text().clone());
}
WHITESPACE | NEWLINE | COMMA => {}
_ => builder.token(t.kind().into(), t.text().clone()),
},
}
}
}
builder.finish_node();
}
fn format_array(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
options: Options,
context: &mut Context,
) {
builder.start_node(ARRAY.into());
let (token_count, char_count, has_newline) = node
.descendants_with_tokens()
.filter(|t| match t {
NodeOrToken::Node(_) => false,
NodeOrToken::Token(t) => t.kind() != WHITESPACE,
})
.fold((0, 0, false), |(mut count, mut len, mut has_newline), e| {
len += u32::from(e.text_range().len());
count += 1;
if e.kind() == NEWLINE {
count -= 1;
has_newline = true;
}
(count, len, has_newline)
});
let child_count = node.children().count();
let all_token_count = node.children_with_tokens().count();
let has_comment_inside = node
.children_with_tokens()
.enumerate()
.map(|(i, c)| match c {
NodeOrToken::Node(n) => n.descendants_with_tokens().any(|d| d.kind() == COMMENT),
NodeOrToken::Token(t) => i != all_token_count - 1 && t.kind() == COMMENT,
})
.any(|h| h);
if token_count - 2 == 0 {
builder.token(BRACKET_START.into(), "[".into());
builder.token(BRACKET_END.into(), "]".into());
} else {
let too_long = char_count
+ context.line_char_count as u32
+ options.indent_chars(context.indent_level) as u32
> options.column_width as u32;
let multiline: bool =
(has_comment_inside || has_newline || (options.array_auto_expand && too_long))
&& !(options.array_auto_collapse && !has_comment_inside && !too_long);
let mut was_value = false;
let mut was_comment = false;
let mut node_index = 0;
let mut prev_token: Option<SyntaxToken> = None;
for (i, c) in node.children_with_tokens().enumerate() {
match c {
NodeOrToken::Node(n) => {
if node_index != 0 || was_comment {
if multiline {
builder.token(NEWLINE.into(), options.newline());
} else {
builder.token(WHITESPACE.into(), " ".into());
}
}
if multiline {
builder.token(
WHITESPACE.into(),
options
.indent_string
.repeat(context.indent_level + 1)
.into(),
);
}
let mut b = GreenNodeBuilder::new();
format_value(
n,
&mut b,
options.clone(),
&mut Context {
indent_level: context.indent_level + 1,
..context.clone()
},
);
let (val, comment) =
extract_comment_from_value(SyntaxNode::new_root(b.finish()));
add_all(val, builder);
if node_index != child_count - 1 || (multiline && options.array_trailing_comma)
{
builder.token(COMMA.into(), ",".into());
}
if let Some(comm) = comment {
builder.token(WHITESPACE.into(), " ".into());
builder.token(COMMENT.into(), comm.into());
} else {
was_value = true;
}
node_index += 1;
}
NodeOrToken::Token(t) => {
match t.kind() {
BRACKET_START => {
builder.token(t.kind().into(), t.text().clone());
if multiline {
builder.token(NEWLINE.into(), options.newline());
} else if !options.compact_arrays {
builder.token(WHITESPACE.into(), " ".into());
}
}
BRACKET_END => {
if multiline {
builder.token(NEWLINE.into(), options.newline());
builder.token(
WHITESPACE.into(),
options.indent_string.repeat(context.indent_level).into(),
);
} else if !options.compact_arrays {
builder.token(WHITESPACE.into(), " ".into());
}
builder.token(t.kind().into(), t.text().clone());
}
COMMENT => {
if i == all_token_count - 1 {
builder.token(WHITESPACE.into(), " ".into());
builder.token(t.kind().into(), t.text().clone());
} else {
if was_comment || was_value {
if prev_token
.take()
.map(|t| t.kind() == NEWLINE)
.unwrap_or(false)
{
builder.token(NEWLINE.into(), options.newline());
builder.token(
WHITESPACE.into(),
options
.indent_string
.repeat(context.indent_level + 1)
.into(),
);
} else {
builder.token(WHITESPACE.into(), " ".into());
}
} else {
builder.token(
WHITESPACE.into(),
options
.indent_string
.repeat(context.indent_level + 1)
.into(),
);
}
builder.token(t.kind().into(), t.text().clone());
was_comment = true;
}
was_value = false;
}
WHITESPACE | NEWLINE | COMMA => {}
_ => builder.token(t.kind().into(), t.text().clone()),
}
if t.kind() != WHITESPACE {
prev_token = Some(t.clone());
}
}
}
}
}
builder.finish_node();
}
fn format_entry(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
options: Options,
context: &mut Context,
) {
builder.start_node(ENTRY.into());
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(n) => match n.kind() {
KEY => {
format_key(n, builder, options.clone(), context);
builder.token(WHITESPACE.into(), " ".into())
}
VALUE => format_value(n, builder, options.clone(), context),
_ => add_all(n, builder),
},
NodeOrToken::Token(t) => match t.kind() {
EQ => {
context.line_char_count += 1;
builder.token(EQ.into(), "=".into());
builder.token(WHITESPACE.into(), " ".into());
}
WHITESPACE | NEWLINE => {}
_ => {
context.line_char_count += u32::from(t.text_range().len()) as usize;
builder.token(t.kind().into(), t.text().clone())
}
},
}
}
builder.finish_node();
}
fn format_key(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
_options: Options,
context: &mut Context,
) {
builder.start_node(KEY.into());
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(_) => {}
NodeOrToken::Token(t) => match t.kind() {
IDENT => {
context.line_char_count += u32::from(t.text_range().len()) as usize;
builder.token(IDENT.into(), t.text().clone())
}
PERIOD => {
context.line_char_count += u32::from(t.text_range().len()) as usize;
builder.token(PERIOD.into(), ".".into())
}
WHITESPACE | NEWLINE => {}
_ => {
context.line_char_count += u32::from(t.text_range().len()) as usize;
builder.token(t.kind().into(), t.text().clone())
}
},
}
}
builder.finish_node();
}
fn format_value(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
options: Options,
context: &mut Context,
) {
builder.start_node(VALUE.into());
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(n) => match n.kind() {
ARRAY => format_array(n, builder, options.clone(), context),
INLINE_TABLE => format_inline_table(n, builder, options.clone(), context),
_ => add_all(n, builder),
},
NodeOrToken::Token(t) => match t.kind() {
NEWLINE | WHITESPACE => {}
_ => builder.token(t.kind().into(), t.text().clone()),
},
}
}
builder.finish_node();
}
fn format_table_header(
node: SyntaxNode,
builder: &mut GreenNodeBuilder,
options: Options,
context: &mut Context,
) {
builder.start_node(node.kind().into());
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(n) => {
format_key(n, builder, options.clone(), context);
}
NodeOrToken::Token(t) => match t.kind() {
BRACKET_START | BRACKET_END => builder.token(t.kind().into(), t.text().clone()),
WHITESPACE | NEWLINE => {}
COMMENT => {
builder.token(WHITESPACE.into(), " ".into());
builder.token(t.kind().into(), t.text().clone());
}
_ => builder.token(t.kind().into(), t.text().clone()),
},
}
}
builder.finish_node()
}
fn add_aligned(
nodes: Vec<SyntaxNode>,
builder: &mut GreenNodeBuilder,
newline: &str,
indent: Option<&str>,
exact_tabs: Option<usize>,
) {
let mut max_lengths: Vec<u32> = Vec::new();
for node in &nodes {
for (i, c) in node
.children_with_tokens()
.filter(|c| c.kind() != WHITESPACE)
.enumerate()
{
let ts = c.text_range().len();
if let Some(l) = max_lengths.get_mut(i) {
*l = u32::max(*l, ts.into())
} else {
max_lengths.push(ts.into())
}
}
}
let node_count = nodes.len();
for (i, node) in nodes.into_iter().enumerate() {
builder.start_node(node.kind().into());
if let Some(ind) = indent {
builder.token(WHITESPACE.into(), ind.into());
}
let child_count = node
.children_with_tokens()
.filter(|c| c.kind() != WHITESPACE)
.count();
for (i, c) in node
.children_with_tokens()
.filter(|c| c.kind() != WHITESPACE)
.enumerate()
{
let ws_count = match &exact_tabs {
Some(t) => *t,
None => (max_lengths[i] - u32::from(c.text_range().len()) + 1) as usize,
};
match c {
NodeOrToken::Node(n) => add_all(n, builder),
NodeOrToken::Token(t) => {
builder.token(t.kind().into(), t.text().clone());
}
}
if i != child_count - 1
&& ws_count > 0
&& i != max_lengths.len().checked_sub(1).unwrap_or_default()
{
builder.token(WHITESPACE.into(), " ".repeat(ws_count).into())
}
}
builder.finish_node();
if i != node_count - 1 {
builder.token(NEWLINE.into(), newline.into());
}
}
}
fn add_all(node: SyntaxNode, builder: &mut GreenNodeBuilder) {
builder.start_node(node.kind().into());
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(n) => add_all(n, builder),
NodeOrToken::Token(t) => builder.token(t.kind().into(), t.text().clone()),
}
}
builder.finish_node()
}
fn extract_comment_from_entry(node: SyntaxNode) -> SyntaxNode {
let mut b = GreenNodeBuilder::new();
b.start_node(node.kind().into());
let mut comment = None;
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(child_n) => match child_n.kind() {
VALUE => {
b.start_node(VALUE.into());
for val_child in child_n.children_with_tokens() {
match val_child {
NodeOrToken::Node(n) => {
if let ARRAY | INLINE_TABLE = n.kind() {
b.start_node(n.kind().into());
let mut after_end = false;
for inner_child in n.children_with_tokens() {
if let COMMENT = inner_child.kind() {
comment = inner_child
.as_token()
.unwrap()
.text()
.clone()
.into();
} else {
match inner_child {
NodeOrToken::Node(child_n) => {
add_all(child_n, &mut b);
}
NodeOrToken::Token(t) => match t.kind() {
WHITESPACE => {
if !after_end {
b.token(
t.kind().into(),
t.text().clone(),
);
}
}
BRACE_END | BRACKET_END => {
after_end = true;
b.token(t.kind().into(), t.text().clone());
}
_ => {
b.token(t.kind().into(), t.text().clone());
}
},
}
}
}
b.finish_node();
} else {
add_all(n, &mut b);
}
}
NodeOrToken::Token(t) => match t.kind() {
COMMENT => {
comment = t.text().clone().into();
}
_ => {
b.token(t.kind().into(), t.text().clone());
}
},
}
}
b.finish_node();
}
_ => {
add_all(child_n, &mut b);
}
},
NodeOrToken::Token(child_t) => b.token(child_t.kind().into(), child_t.text().clone()),
}
}
if let Some(c) = comment {
b.token(COMMENT.into(), c)
}
b.finish_node();
SyntaxNode::new_root(b.finish())
}
fn extract_comment_from_value(node: SyntaxNode) -> (SyntaxNode, Option<String>) {
let mut b = GreenNodeBuilder::new();
b.start_node(node.kind().into());
let mut comment = None;
for c in node.children_with_tokens() {
match c {
NodeOrToken::Node(n) => match n.kind() {
ARRAY | INLINE_TABLE => {
let has_comment = n
.children_with_tokens()
.last()
.map(|t| t.kind() == COMMENT)
.unwrap_or(false);
b.start_node(n.kind().into());
let child_count = n.children_with_tokens().count();
for (i, c2) in n.children_with_tokens().enumerate() {
match c2 {
NodeOrToken::Node(n2) => {
add_all(n2, &mut b);
}
NodeOrToken::Token(t2) => match t2.kind() {
COMMENT => comment = Some(t2.text().to_string()),
WHITESPACE => {
if !has_comment || i < child_count - 2 {
b.token(t2.kind().into(), t2.text().clone())
}
}
_ => b.token(t2.kind().into(), t2.text().clone()),
},
}
}
b.finish_node();
}
_ => {
add_all(n, &mut b);
}
},
NodeOrToken::Token(c) => match c.kind() {
COMMENT => comment = Some(c.text().to_string()),
_ => b.token(c.kind().into(), c.text().clone()),
},
}
}
b.finish_node();
(SyntaxNode::new_root(b.finish()), comment)
}
trait NewlineCount {
fn newline_count(&self) -> usize;
}
impl NewlineCount for &str {
fn newline_count(&self) -> usize {
self.chars().filter(|c| c == &'\n').count()
}
}